mirror of
https://github.com/unraid/api.git
synced 2026-01-02 14:40:01 -06:00
Compare commits
97 Commits
4.16.0-bui
...
v4.22.2
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
45d9d65c13 | ||
|
|
771014b005 | ||
|
|
31a255c928 | ||
|
|
167857a323 | ||
|
|
b80988aaab | ||
|
|
fe4a6451f1 | ||
|
|
9a86c615da | ||
|
|
25ff8992a5 | ||
|
|
45fb53d040 | ||
|
|
c855caa9b2 | ||
|
|
ba4a43aec8 | ||
|
|
c4ca761dfc | ||
|
|
01d353fa08 | ||
|
|
4a07953457 | ||
|
|
0b20e3ea9f | ||
|
|
3f4af09db5 | ||
|
|
222ced7518 | ||
|
|
03dae7ce66 | ||
|
|
0990b898bd | ||
|
|
95faeaa2f3 | ||
|
|
b49ef5a762 | ||
|
|
c782cf0e87 | ||
|
|
f95ca9c9cb | ||
|
|
a59b363ebc | ||
|
|
2fef10c94a | ||
|
|
1c73a4af42 | ||
|
|
88a924c84f | ||
|
|
ae4d3ecbc4 | ||
|
|
c569043ab5 | ||
|
|
50ea2a3ffb | ||
|
|
b518131406 | ||
|
|
e57d81e073 | ||
|
|
88baddd6c0 | ||
|
|
abc22bdb87 | ||
|
|
6ed2f5ce8e | ||
|
|
b79b44e95c | ||
|
|
ca22285a26 | ||
|
|
838be2c52e | ||
|
|
73c1100d0b | ||
|
|
434e331384 | ||
|
|
a27453fda8 | ||
|
|
98e6058cd8 | ||
|
|
6c2c51ae1d | ||
|
|
d10c12035e | ||
|
|
5dd6f42550 | ||
|
|
4759b3d0b3 | ||
|
|
daeeba8c1f | ||
|
|
196bd52628 | ||
|
|
6c0061923a | ||
|
|
f33afe7ae5 | ||
|
|
aecf70ffad | ||
|
|
785f1f5eb1 | ||
|
|
193be3df36 | ||
|
|
116ee88fcf | ||
|
|
413db4bd30 | ||
|
|
095c2221c9 | ||
|
|
dfe891ce38 | ||
|
|
797bf50ec7 | ||
|
|
af5ca11860 | ||
|
|
f0cffbdc7a | ||
|
|
16905dd3a6 | ||
|
|
2ecdb99052 | ||
|
|
286f1be8ed | ||
|
|
bcefdd5261 | ||
|
|
d3459ecbc6 | ||
|
|
534a07788b | ||
|
|
239cdd6133 | ||
|
|
77cfc07dda | ||
|
|
728b38ac11 | ||
|
|
44774d0acd | ||
|
|
e204eb80a0 | ||
|
|
0c727c37f4 | ||
|
|
292bc0fc81 | ||
|
|
53f501e1a7 | ||
|
|
6cf7c88242 | ||
|
|
33774aa596 | ||
|
|
88087d5201 | ||
|
|
5d89682a3f | ||
|
|
bc15bd3d70 | ||
|
|
7c3aee8f3f | ||
|
|
c7c3bb57ea | ||
|
|
99dbad57d5 | ||
|
|
c42f79d406 | ||
|
|
4d8588b173 | ||
|
|
0d1d27064e | ||
|
|
0fe2c2c1c8 | ||
|
|
a8e4119270 | ||
|
|
372a4ebb42 | ||
|
|
4e945f5f56 | ||
|
|
6356f9c41d | ||
|
|
a1ee915ca5 | ||
|
|
c147a6b507 | ||
|
|
9d42b36f74 | ||
|
|
26a95af953 | ||
|
|
0ead267838 | ||
|
|
163763f9e5 | ||
|
|
6469d002b7 |
@@ -1,123 +1,3 @@
|
||||
{
|
||||
"permissions": {
|
||||
"allow": [
|
||||
"# Development Commands",
|
||||
"Bash(pnpm install)",
|
||||
"Bash(pnpm dev)",
|
||||
"Bash(pnpm build)",
|
||||
"Bash(pnpm test)",
|
||||
"Bash(pnpm test:*)",
|
||||
"Bash(pnpm lint)",
|
||||
"Bash(pnpm lint:fix)",
|
||||
"Bash(pnpm type-check)",
|
||||
"Bash(pnpm codegen)",
|
||||
"Bash(pnpm storybook)",
|
||||
"Bash(pnpm --filter * dev)",
|
||||
"Bash(pnpm --filter * build)",
|
||||
"Bash(pnpm --filter * test)",
|
||||
"Bash(pnpm --filter * lint)",
|
||||
"Bash(pnpm --filter * codegen)",
|
||||
|
||||
"# Git Commands (read-only)",
|
||||
"Bash(git status)",
|
||||
"Bash(git diff)",
|
||||
"Bash(git log)",
|
||||
"Bash(git branch)",
|
||||
"Bash(git remote -v)",
|
||||
|
||||
"# Search Commands",
|
||||
"Bash(rg *)",
|
||||
|
||||
"# File System (read-only)",
|
||||
"Bash(ls)",
|
||||
"Bash(ls -la)",
|
||||
"Bash(pwd)",
|
||||
"Bash(find . -name)",
|
||||
"Bash(find . -type)",
|
||||
|
||||
"# Node/NPM Commands",
|
||||
"Bash(node --version)",
|
||||
"Bash(pnpm --version)",
|
||||
"Bash(npx --version)",
|
||||
|
||||
"# Environment Commands",
|
||||
"Bash(echo $*)",
|
||||
"Bash(which *)",
|
||||
|
||||
"# Process Commands",
|
||||
"Bash(ps aux | grep)",
|
||||
"Bash(lsof -i)",
|
||||
|
||||
"# Documentation Domains",
|
||||
"WebFetch(domain:tailwindcss.com)",
|
||||
"WebFetch(domain:github.com)",
|
||||
"WebFetch(domain:reka-ui.com)",
|
||||
"WebFetch(domain:nodejs.org)",
|
||||
"WebFetch(domain:pnpm.io)",
|
||||
"WebFetch(domain:vitejs.dev)",
|
||||
"WebFetch(domain:nuxt.com)",
|
||||
"WebFetch(domain:nestjs.com)",
|
||||
|
||||
"# IDE Integration",
|
||||
"mcp__ide__getDiagnostics",
|
||||
|
||||
"# Browser MCP (for testing)",
|
||||
"mcp__browsermcp__browser_navigate",
|
||||
"mcp__browsermcp__browser_click",
|
||||
"mcp__browsermcp__browser_screenshot"
|
||||
],
|
||||
"deny": [
|
||||
"# Dangerous Commands",
|
||||
"Bash(rm -rf)",
|
||||
"Bash(chmod 777)",
|
||||
"Bash(curl)",
|
||||
"Bash(wget)",
|
||||
"Bash(ssh)",
|
||||
"Bash(scp)",
|
||||
"Bash(sudo)",
|
||||
"Bash(su)",
|
||||
"Bash(pkill)",
|
||||
"Bash(kill)",
|
||||
"Bash(killall)",
|
||||
"Bash(python)",
|
||||
"Bash(python3)",
|
||||
"Bash(pip)",
|
||||
"Bash(npm)",
|
||||
"Bash(yarn)",
|
||||
"Bash(apt)",
|
||||
"Bash(brew)",
|
||||
"Bash(systemctl)",
|
||||
"Bash(service)",
|
||||
"Bash(docker)",
|
||||
"Bash(docker-compose)",
|
||||
|
||||
"# File Modification (use Edit/Write tools instead)",
|
||||
"Bash(sed)",
|
||||
"Bash(awk)",
|
||||
"Bash(perl)",
|
||||
"Bash(echo > *)",
|
||||
"Bash(echo >> *)",
|
||||
"Bash(cat > *)",
|
||||
"Bash(cat >> *)",
|
||||
"Bash(tee)",
|
||||
|
||||
"# Git Write Commands (require explicit user action)",
|
||||
"Bash(git add)",
|
||||
"Bash(git commit)",
|
||||
"Bash(git push)",
|
||||
"Bash(git pull)",
|
||||
"Bash(git merge)",
|
||||
"Bash(git rebase)",
|
||||
"Bash(git checkout)",
|
||||
"Bash(git reset)",
|
||||
"Bash(git clean)",
|
||||
|
||||
"# Package Management Write Commands",
|
||||
"Bash(pnpm add)",
|
||||
"Bash(pnpm remove)",
|
||||
"Bash(pnpm update)",
|
||||
"Bash(pnpm upgrade)"
|
||||
]
|
||||
},
|
||||
"enableAllProjectMcpServers": false
|
||||
"permissions": {}
|
||||
}
|
||||
68
.github/workflows/build-plugin.yml
vendored
68
.github/workflows/build-plugin.yml
vendored
@@ -36,6 +36,8 @@ on:
|
||||
required: true
|
||||
CF_ENDPOINT:
|
||||
required: true
|
||||
UNRAID_BOT_GITHUB_ADMIN_TOKEN:
|
||||
required: false
|
||||
jobs:
|
||||
build-plugin:
|
||||
name: Build and Deploy Plugin
|
||||
@@ -49,21 +51,16 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Get pnpm store directory
|
||||
id: pnpm-cache
|
||||
shell: bash
|
||||
run: |
|
||||
echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Get API Version
|
||||
id: vars
|
||||
@@ -74,14 +71,6 @@ jobs:
|
||||
API_VERSION=$([[ -n "$IS_TAGGED" ]] && echo "$PACKAGE_LOCK_VERSION" || echo "${PACKAGE_LOCK_VERSION}+${GIT_SHA}")
|
||||
echo "API_VERSION=${API_VERSION}" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/cache@v4
|
||||
name: Setup pnpm cache
|
||||
with:
|
||||
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
cd ${{ github.workspace }}
|
||||
@@ -97,7 +86,7 @@ jobs:
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
pattern: unraid-wc-rich
|
||||
path: ${{ github.workspace }}/plugin/source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components/nuxt
|
||||
path: ${{ github.workspace }}/plugin/source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components/standalone
|
||||
merge-multiple: true
|
||||
- name: Download Unraid API
|
||||
uses: actions/download-artifact@v5
|
||||
@@ -151,8 +140,8 @@ jobs:
|
||||
uses: the-actions-org/workflow-dispatch@v4.0.0
|
||||
with:
|
||||
workflow: release-production.yml
|
||||
inputs: '{ "version": "${{ steps.vars.outputs.API_VERSION }}" }'
|
||||
token: ${{ secrets.WORKFLOW_TRIGGER_PAT }}
|
||||
inputs: '{ "version": "v${{ steps.vars.outputs.API_VERSION }}" }'
|
||||
token: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN }}
|
||||
|
||||
- name: Upload to Cloudflare
|
||||
if: inputs.RELEASE_CREATED == 'false'
|
||||
@@ -181,3 +170,40 @@ jobs:
|
||||
```
|
||||
${{ inputs.BASE_URL }}/tag/${{ inputs.TAG }}/dynamix.unraid.net.plg
|
||||
```
|
||||
|
||||
- name: Clean up old preview builds
|
||||
if: inputs.RELEASE_CREATED == 'false' && github.event_name == 'push'
|
||||
continue-on-error: true
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.CF_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
AWS_DEFAULT_REGION: auto
|
||||
run: |
|
||||
echo "🧹 Cleaning up old preview builds (keeping last 7 days)..."
|
||||
|
||||
# Calculate cutoff date (7 days ago)
|
||||
CUTOFF_DATE=$(date -d "7 days ago" +"%Y.%m.%d")
|
||||
echo "Deleting builds older than: ${CUTOFF_DATE}"
|
||||
|
||||
# List and delete old timestamped .txz files
|
||||
OLD_FILES=$(aws s3 ls "s3://${{ secrets.CF_BUCKET_PREVIEW }}/unraid-api/" \
|
||||
--endpoint-url ${{ secrets.CF_ENDPOINT }} --recursive | \
|
||||
grep -E "dynamix\.unraid\.net-[0-9]{4}\.[0-9]{2}\.[0-9]{2}\.[0-9]{4}\.txz" | \
|
||||
awk '{print $4}' || true)
|
||||
|
||||
DELETED_COUNT=0
|
||||
if [ -n "$OLD_FILES" ]; then
|
||||
while IFS= read -r file; do
|
||||
if [[ $file =~ ([0-9]{4}\.[0-9]{2}\.[0-9]{2})\.[0-9]{4}\.txz ]]; then
|
||||
FILE_DATE="${BASH_REMATCH[1]}"
|
||||
if [[ "$FILE_DATE" < "$CUTOFF_DATE" ]]; then
|
||||
echo "Deleting old build: $(basename "$file")"
|
||||
aws s3 rm "s3://${{ secrets.CF_BUCKET_PREVIEW }}/${file}" \
|
||||
--endpoint-url ${{ secrets.CF_ENDPOINT }} || true
|
||||
((DELETED_COUNT++))
|
||||
fi
|
||||
fi
|
||||
done <<< "$OLD_FILES"
|
||||
fi
|
||||
|
||||
echo "✅ Deleted ${DELETED_COUNT} old builds"
|
||||
|
||||
13
.github/workflows/deploy-storybook.yml
vendored
13
.github/workflows/deploy-storybook.yml
vendored
@@ -22,16 +22,17 @@ jobs:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '22.18.0'
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
@@ -65,7 +66,7 @@ jobs:
|
||||
|
||||
- name: Comment PR with deployment URL
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: actions/github-script@v7
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
script: |
|
||||
github.rest.issues.createComment({
|
||||
|
||||
193
.github/workflows/main.yml
vendored
193
.github/workflows/main.yml
vendored
@@ -6,29 +6,15 @@ on:
|
||||
branches:
|
||||
- main
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
jobs:
|
||||
release-please:
|
||||
name: Release Please
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
# Only run release-please on pushes to main
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
||||
|
||||
- id: release
|
||||
uses: googleapis/release-please-action@v4
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
||||
outputs:
|
||||
releases_created: ${{ steps.release.outputs.releases_created || 'false' }}
|
||||
tag_name: ${{ steps.release.outputs.tag_name || '' }}
|
||||
test-api:
|
||||
name: Test API
|
||||
defaults:
|
||||
@@ -38,36 +24,25 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential libvirt-daemon-system
|
||||
version: 1.0
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Get pnpm store directory
|
||||
id: pnpm-cache
|
||||
shell: bash
|
||||
run: |
|
||||
echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/cache@v4
|
||||
name: Setup pnpm cache
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential libvirt-daemon-system php-cli
|
||||
version: 1.0
|
||||
|
||||
- name: PNPM Install
|
||||
run: pnpm install --frozen-lockfile
|
||||
@@ -117,42 +92,68 @@ jobs:
|
||||
# Verify libvirt is running using sudo to bypass group membership delays
|
||||
sudo virsh list --all || true
|
||||
|
||||
- uses: oven-sh/setup-bun@v2
|
||||
with:
|
||||
bun-version: latest
|
||||
- name: Build UI Package First
|
||||
run: |
|
||||
echo "🔧 Building UI package for web tests dependency..."
|
||||
cd ../unraid-ui && pnpm run build
|
||||
|
||||
- name: Run Tests Concurrently
|
||||
run: |
|
||||
set -e
|
||||
|
||||
# Run all tests in parallel with labeled output
|
||||
# Run all tests in parallel with labeled output and coverage generation
|
||||
echo "🚀 Starting API coverage tests..."
|
||||
pnpm run coverage > api-test.log 2>&1 &
|
||||
API_PID=$!
|
||||
|
||||
echo "🚀 Starting Connect plugin tests..."
|
||||
(cd ../packages/unraid-api-plugin-connect && pnpm test) > connect-test.log 2>&1 &
|
||||
(cd ../packages/unraid-api-plugin-connect && pnpm test --coverage 2>/dev/null || pnpm test) > connect-test.log 2>&1 &
|
||||
CONNECT_PID=$!
|
||||
|
||||
echo "🚀 Starting Shared package tests..."
|
||||
(cd ../packages/unraid-shared && pnpm test) > shared-test.log 2>&1 &
|
||||
(cd ../packages/unraid-shared && pnpm test --coverage 2>/dev/null || pnpm test) > shared-test.log 2>&1 &
|
||||
SHARED_PID=$!
|
||||
|
||||
echo "🚀 Starting Web package coverage tests..."
|
||||
(cd ../web && (pnpm test --coverage || pnpm test)) > web-test.log 2>&1 &
|
||||
WEB_PID=$!
|
||||
|
||||
echo "🚀 Starting UI package coverage tests..."
|
||||
(cd ../unraid-ui && pnpm test --coverage 2>/dev/null || pnpm test) > ui-test.log 2>&1 &
|
||||
UI_PID=$!
|
||||
|
||||
echo "🚀 Starting Plugin tests..."
|
||||
(cd ../plugin && pnpm test) > plugin-test.log 2>&1 &
|
||||
PLUGIN_PID=$!
|
||||
|
||||
# Wait for all processes and capture exit codes
|
||||
wait $API_PID && echo "✅ API tests completed" || { echo "❌ API tests failed"; API_EXIT=1; }
|
||||
wait $CONNECT_PID && echo "✅ Connect tests completed" || { echo "❌ Connect tests failed"; CONNECT_EXIT=1; }
|
||||
wait $SHARED_PID && echo "✅ Shared tests completed" || { echo "❌ Shared tests failed"; SHARED_EXIT=1; }
|
||||
wait $WEB_PID && echo "✅ Web tests completed" || { echo "❌ Web tests failed"; WEB_EXIT=1; }
|
||||
wait $UI_PID && echo "✅ UI tests completed" || { echo "❌ UI tests failed"; UI_EXIT=1; }
|
||||
wait $PLUGIN_PID && echo "✅ Plugin tests completed" || { echo "❌ Plugin tests failed"; PLUGIN_EXIT=1; }
|
||||
|
||||
# Display all outputs
|
||||
echo "📋 API Test Results:" && cat api-test.log
|
||||
echo "📋 Connect Plugin Test Results:" && cat connect-test.log
|
||||
echo "📋 Shared Package Test Results:" && cat shared-test.log
|
||||
echo "📋 Web Package Test Results:" && cat web-test.log
|
||||
echo "📋 UI Package Test Results:" && cat ui-test.log
|
||||
echo "📋 Plugin Test Results:" && cat plugin-test.log
|
||||
|
||||
# Exit with error if any test failed
|
||||
if [[ ${API_EXIT:-0} -eq 1 || ${CONNECT_EXIT:-0} -eq 1 || ${SHARED_EXIT:-0} -eq 1 ]]; then
|
||||
if [[ ${API_EXIT:-0} -eq 1 || ${CONNECT_EXIT:-0} -eq 1 || ${SHARED_EXIT:-0} -eq 1 || ${WEB_EXIT:-0} -eq 1 || ${UI_EXIT:-0} -eq 1 || ${PLUGIN_EXIT:-0} -eq 1 ]]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Upload all coverage reports to Codecov
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
files: ./coverage/coverage-final.json,../web/coverage/coverage-final.json,../unraid-ui/coverage/coverage-final.json,../packages/unraid-api-plugin-connect/coverage/coverage-final.json,../packages/unraid-shared/coverage/coverage-final.json
|
||||
fail_ci_if_error: false
|
||||
|
||||
build-api:
|
||||
name: Build API
|
||||
runs-on: ubuntu-latest
|
||||
@@ -165,29 +166,16 @@ jobs:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Get pnpm store directory
|
||||
id: pnpm-cache
|
||||
shell: bash
|
||||
run: |
|
||||
echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/cache@v4
|
||||
name: Setup pnpm cache
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
@@ -218,7 +206,7 @@ jobs:
|
||||
id: buildnumber
|
||||
uses: onyxmueller/build-tag-number@v1
|
||||
with:
|
||||
token: ${{secrets.github_token}}
|
||||
token: ${{secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN}}
|
||||
prefix: ${{steps.vars.outputs.PACKAGE_LOCK_VERSION}}
|
||||
|
||||
- name: Build
|
||||
@@ -242,29 +230,16 @@ jobs:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Get pnpm store directory
|
||||
id: pnpm-cache
|
||||
shell: bash
|
||||
run: |
|
||||
echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/cache@v4
|
||||
name: Setup pnpm cache
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
@@ -307,31 +282,17 @@ jobs:
|
||||
echo VITE_CONNECT=${{ secrets.VITE_CONNECT }} >> .env
|
||||
echo VITE_UNRAID_NET=${{ secrets.VITE_UNRAID_NET }} >> .env
|
||||
echo VITE_CALLBACK_KEY=${{ secrets.VITE_CALLBACK_KEY }} >> .env
|
||||
cat .env
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Get pnpm store directory
|
||||
id: pnpm-cache
|
||||
shell: bash
|
||||
run: |
|
||||
echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/cache@v4
|
||||
name: Setup pnpm cache
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: PNPM Install
|
||||
run: |
|
||||
@@ -359,12 +320,34 @@ jobs:
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: unraid-wc-rich
|
||||
path: web/.nuxt/nuxt-custom-elements/dist/unraid-components
|
||||
path: web/dist
|
||||
|
||||
release-please:
|
||||
name: Release Please
|
||||
runs-on: ubuntu-latest
|
||||
# Only run on pushes to main AND after tests pass
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
||||
needs:
|
||||
- test-api
|
||||
- build-api
|
||||
- build-web
|
||||
- build-unraid-ui-webcomponents
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- id: release
|
||||
uses: googleapis/release-please-action@v4
|
||||
outputs:
|
||||
releases_created: ${{ steps.release.outputs.releases_created || 'false' }}
|
||||
tag_name: ${{ steps.release.outputs.tag_name || '' }}
|
||||
|
||||
build-plugin-staging-pr:
|
||||
name: Build and Deploy Plugin
|
||||
needs:
|
||||
- release-please
|
||||
- build-api
|
||||
- build-web
|
||||
- build-unraid-ui-webcomponents
|
||||
@@ -388,9 +371,6 @@ jobs:
|
||||
needs:
|
||||
- release-please
|
||||
- build-api
|
||||
- build-web
|
||||
- build-unraid-ui-webcomponents
|
||||
- test-api
|
||||
uses: ./.github/workflows/build-plugin.yml
|
||||
with:
|
||||
RELEASE_CREATED: true
|
||||
@@ -404,3 +384,4 @@ jobs:
|
||||
CF_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
CF_BUCKET_PREVIEW: ${{ secrets.CF_BUCKET_PREVIEW }}
|
||||
CF_ENDPOINT: ${{ secrets.CF_ENDPOINT }}
|
||||
UNRAID_BOT_GITHUB_ADMIN_TOKEN: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN }}
|
||||
|
||||
100
.github/workflows/push-staging-pr-on-close.yml
vendored
100
.github/workflows/push-staging-pr-on-close.yml
vendored
@@ -1,4 +1,9 @@
|
||||
name: Push Staging Plugin on PR Close
|
||||
name: Replace PR Plugin with Staging Redirect on Merge
|
||||
|
||||
# This workflow runs when a PR is merged and replaces the PR-specific plugin
|
||||
# with a redirect version that points to the main staging URL.
|
||||
# This ensures users who installed the PR version will automatically
|
||||
# update to the staging version on their next update check.
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
@@ -17,18 +22,13 @@ on:
|
||||
default: true
|
||||
|
||||
jobs:
|
||||
push-staging:
|
||||
push-staging-redirect:
|
||||
if: (github.event_name == 'pull_request' && github.event.pull_request.merged == true) || (github.event_name == 'workflow_dispatch' && inputs.pr_merged == true)
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
actions: read
|
||||
steps:
|
||||
- name: Set Timezone
|
||||
uses: szenius/set-timezone@v2.0
|
||||
with:
|
||||
timezoneLinux: "America/Los_Angeles"
|
||||
|
||||
- name: Set PR number
|
||||
id: pr_number
|
||||
run: |
|
||||
@@ -45,11 +45,12 @@ jobs:
|
||||
name: unraid-plugin-.*
|
||||
path: connect-files
|
||||
pr: ${{ steps.pr_number.outputs.pr_number }}
|
||||
workflow: main.yml
|
||||
workflow_conclusion: success
|
||||
workflow_search: true
|
||||
search_artifacts: true
|
||||
if_no_artifact_found: fail
|
||||
|
||||
- name: Update Downloaded Staging Plugin to New Date
|
||||
- name: Update Downloaded Plugin to Redirect to Staging
|
||||
run: |
|
||||
# Find the .plg file in the downloaded artifact
|
||||
plgfile=$(find connect-files -name "*.plg" -type f | head -1)
|
||||
@@ -60,23 +61,82 @@ jobs:
|
||||
fi
|
||||
|
||||
echo "Found plugin file: $plgfile"
|
||||
version=$(date +"%Y.%m.%d.%H%M")
|
||||
sed -i -E "s#(<!ENTITY version \").*(\">)#\1${version}\2#g" "${plgfile}" || exit 1
|
||||
|
||||
# Get current version and bump it with current timestamp
|
||||
current_version=$(grep '<!ENTITY version' "${plgfile}" | sed -E 's/.*"(.*)".*/\1/')
|
||||
echo "Current version: ${current_version}"
|
||||
|
||||
# Create new version with current timestamp (ensures it's newer)
|
||||
new_version=$(date +"%Y.%m.%d.%H%M")
|
||||
echo "New redirect version: ${new_version}"
|
||||
|
||||
# Update version to trigger update
|
||||
sed -i -E "s#(<!ENTITY version \").*(\">)#\1${new_version}\2#g" "${plgfile}" || exit 1
|
||||
|
||||
# Change the plugin url to point to staging
|
||||
# Change the plugin url to point to staging - users will switch to staging on next update
|
||||
url="https://preview.dl.unraid.net/unraid-api/dynamix.unraid.net.plg"
|
||||
sed -i -E "s#(<!ENTITY plugin_url \").*?(\">)#\1${url}\2#g" "${plgfile}" || exit 1
|
||||
cat "${plgfile}"
|
||||
|
||||
echo "Modified plugin to redirect to: ${url}"
|
||||
echo "Version bumped from ${current_version} to ${new_version}"
|
||||
|
||||
mkdir -p pr-release
|
||||
mv "${plgfile}" pr-release/dynamix.unraid.net.plg
|
||||
|
||||
- name: Upload to Cloudflare
|
||||
uses: jakejarvis/s3-sync-action@v0.5.1
|
||||
- name: Clean up old PR artifacts from Cloudflare
|
||||
env:
|
||||
AWS_S3_ENDPOINT: ${{ secrets.CF_ENDPOINT }}
|
||||
AWS_S3_BUCKET: ${{ secrets.CF_BUCKET_PREVIEW }}
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.CF_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
AWS_REGION: "auto"
|
||||
SOURCE_DIR: pr-release
|
||||
DEST_DIR: unraid-api/tag/PR${{ steps.pr_number.outputs.pr_number }}
|
||||
AWS_DEFAULT_REGION: auto
|
||||
run: |
|
||||
# Delete all existing files in the PR directory first (txz, plg, etc.)
|
||||
aws s3 rm s3://${{ secrets.CF_BUCKET_PREVIEW }}/unraid-api/tag/PR${{ steps.pr_number.outputs.pr_number }}/ \
|
||||
--recursive \
|
||||
--endpoint-url ${{ secrets.CF_ENDPOINT }}
|
||||
|
||||
echo "✅ Cleaned up old PR artifacts"
|
||||
|
||||
- name: Upload PR Redirect Plugin to Cloudflare
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.CF_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
AWS_DEFAULT_REGION: auto
|
||||
run: |
|
||||
# Upload only the redirect plugin file
|
||||
aws s3 cp pr-release/dynamix.unraid.net.plg \
|
||||
s3://${{ secrets.CF_BUCKET_PREVIEW }}/unraid-api/tag/PR${{ steps.pr_number.outputs.pr_number }}/dynamix.unraid.net.plg \
|
||||
--endpoint-url ${{ secrets.CF_ENDPOINT }} \
|
||||
--content-encoding none \
|
||||
--acl public-read
|
||||
|
||||
echo "✅ Uploaded redirect plugin"
|
||||
|
||||
- name: Output redirect information
|
||||
run: |
|
||||
echo "✅ PR plugin replaced with staging redirect version"
|
||||
echo "PR URL remains: https://preview.dl.unraid.net/unraid-api/tag/PR${{ steps.pr_number.outputs.pr_number }}/dynamix.unraid.net.plg"
|
||||
echo "Redirects users to staging: https://preview.dl.unraid.net/unraid-api/dynamix.unraid.net.plg"
|
||||
echo "Users updating from this PR version will automatically switch to staging"
|
||||
|
||||
- name: Comment on PR about staging redirect
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: thollander/actions-comment-pull-request@v3
|
||||
with:
|
||||
comment-tag: pr-closed-staging
|
||||
mode: recreate
|
||||
message: |
|
||||
## 🔄 PR Merged - Plugin Redirected to Staging
|
||||
|
||||
This PR has been merged and the preview plugin has been updated to redirect to the staging version.
|
||||
|
||||
**For users testing this PR:**
|
||||
- Your plugin will automatically update to the staging version on the next update check
|
||||
- The staging version includes all merged changes from this PR
|
||||
- No manual intervention required
|
||||
|
||||
**Staging URL:**
|
||||
```
|
||||
https://preview.dl.unraid.net/unraid-api/dynamix.unraid.net.plg
|
||||
```
|
||||
|
||||
Thank you for testing! 🚀
|
||||
|
||||
24
.github/workflows/release-production.yml
vendored
24
.github/workflows/release-production.yml
vendored
@@ -28,16 +28,21 @@ jobs:
|
||||
with:
|
||||
latest: true
|
||||
prerelease: false
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
node-version: '22.18.0'
|
||||
run_install: false
|
||||
- uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
- run: |
|
||||
cat << 'EOF' > release-notes.txt
|
||||
${{ steps.release-info.outputs.body }}
|
||||
EOF
|
||||
- run: npm install html-escaper@2 xml2js
|
||||
- name: Update Plugin Changelog
|
||||
uses: actions/github-script@v7
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
script: |
|
||||
const fs = require('fs');
|
||||
@@ -124,3 +129,16 @@ jobs:
|
||||
--no-guess-mime-type \
|
||||
--content-encoding none \
|
||||
--acl public-read
|
||||
|
||||
- name: Actions for Discord
|
||||
uses: Ilshidur/action-discord@0.4.0
|
||||
env:
|
||||
DISCORD_WEBHOOK: ${{ secrets.PUBLIC_DISCORD_RELEASE_ENDPOINT }}
|
||||
with:
|
||||
args: |
|
||||
🚀 **Unraid API Release ${{ inputs.version }}**
|
||||
|
||||
View Release: https://github.com/${{ github.repository }}/releases/tag/${{ inputs.version }}
|
||||
|
||||
**Changelog:**
|
||||
${{ steps.release-info.outputs.body }}
|
||||
|
||||
71
.github/workflows/test-libvirt.yml
vendored
71
.github/workflows/test-libvirt.yml
vendored
@@ -1,71 +0,0 @@
|
||||
name: Test Libvirt
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- "libvirt/**"
|
||||
pull_request:
|
||||
paths:
|
||||
- "libvirt/**"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./libvirt
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
submodules: recursive
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.13.7"
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
packages: libvirt-dev
|
||||
version: 1.0
|
||||
|
||||
- name: Set Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10.15.0
|
||||
run_install: false
|
||||
|
||||
- name: Get pnpm store directory
|
||||
id: pnpm-cache
|
||||
shell: bash
|
||||
run: |
|
||||
echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/cache@v4
|
||||
name: Setup pnpm cache
|
||||
with:
|
||||
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('libvirt/package.json') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
|
||||
- name: pnpm install
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Build
|
||||
run: pnpm run build
|
||||
|
||||
- name: test
|
||||
run: pnpm run test
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -29,6 +29,10 @@ unraid-ui/node_modules/
|
||||
# TypeScript v1 declaration files
|
||||
typings/
|
||||
|
||||
# Auto-generated type declarations for Nuxt UI
|
||||
auto-imports.d.ts
|
||||
components.d.ts
|
||||
|
||||
# Optional npm cache directory
|
||||
.npm
|
||||
|
||||
@@ -118,3 +122,4 @@ api/dev/Unraid.net/myservers.cfg
|
||||
|
||||
# local Mise settings
|
||||
.mise.toml
|
||||
|
||||
|
||||
@@ -1 +1 @@
|
||||
{".":"4.16.0"}
|
||||
{".":"4.22.2"}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
@custom-variant dark (&:where(.dark, .dark *));
|
||||
|
||||
@layer utilities {
|
||||
:host {
|
||||
/* Utility defaults for web components (when we were using shadow DOM) */
|
||||
:host {
|
||||
--tw-divide-y-reverse: 0;
|
||||
--tw-border-style: solid;
|
||||
--tw-font-weight: initial;
|
||||
@@ -48,21 +48,20 @@
|
||||
--tw-drop-shadow: initial;
|
||||
--tw-duration: initial;
|
||||
--tw-ease: initial;
|
||||
}
|
||||
}
|
||||
|
||||
@layer base {
|
||||
*,
|
||||
::after,
|
||||
::before,
|
||||
::backdrop,
|
||||
::file-selector-button {
|
||||
border-color: hsl(var(--border));
|
||||
}
|
||||
/* Global border color - this is what's causing the issue! */
|
||||
/* Commenting out since it affects all elements globally
|
||||
*,
|
||||
::after,
|
||||
::before,
|
||||
::backdrop,
|
||||
::file-selector-button {
|
||||
border-color: hsl(var(--border));
|
||||
}
|
||||
*/
|
||||
|
||||
|
||||
|
||||
body {
|
||||
body {
|
||||
--color-alpha: #1c1b1b;
|
||||
--color-beta: #f2f2f2;
|
||||
--color-gamma: #999999;
|
||||
@@ -74,8 +73,24 @@
|
||||
--ring-shadow: 0 0 var(--color-beta);
|
||||
}
|
||||
|
||||
button:not(:disabled),
|
||||
[role='button']:not(:disabled) {
|
||||
cursor: pointer;
|
||||
}
|
||||
button:not(:disabled),
|
||||
[role='button']:not(:disabled) {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
/* Font size overrides for SSO button component */
|
||||
unraid-sso-button {
|
||||
--text-xs: 0.75rem;
|
||||
--text-sm: 0.875rem;
|
||||
--text-base: 1rem;
|
||||
--text-lg: 1.125rem;
|
||||
--text-xl: 1.25rem;
|
||||
--text-2xl: 1.5rem;
|
||||
--text-3xl: 1.875rem;
|
||||
--text-4xl: 2.25rem;
|
||||
--text-5xl: 3rem;
|
||||
--text-6xl: 3.75rem;
|
||||
--text-7xl: 4.5rem;
|
||||
--text-8xl: 6rem;
|
||||
--text-9xl: 8rem;
|
||||
}
|
||||
@@ -1,7 +1,61 @@
|
||||
/* Hybrid theme system: Native CSS + Theme Store fallback */
|
||||
@layer base {
|
||||
/* Light mode defaults */
|
||||
:root {
|
||||
|
||||
/* Light mode defaults */
|
||||
:root {
|
||||
/* Nuxt UI Color System - Primary (Orange for Unraid) */
|
||||
--ui-color-primary-50: #fff7ed;
|
||||
--ui-color-primary-100: #ffedd5;
|
||||
--ui-color-primary-200: #fed7aa;
|
||||
--ui-color-primary-300: #fdba74;
|
||||
--ui-color-primary-400: #fb923c;
|
||||
--ui-color-primary-500: #ff8c2f;
|
||||
--ui-color-primary-600: #ea580c;
|
||||
--ui-color-primary-700: #c2410c;
|
||||
--ui-color-primary-800: #9a3412;
|
||||
--ui-color-primary-900: #7c2d12;
|
||||
--ui-color-primary-950: #431407;
|
||||
|
||||
/* Nuxt UI Color System - Neutral (True Gray) */
|
||||
--ui-color-neutral-50: #fafafa;
|
||||
--ui-color-neutral-100: #f5f5f5;
|
||||
--ui-color-neutral-200: #e5e5e5;
|
||||
--ui-color-neutral-300: #d4d4d4;
|
||||
--ui-color-neutral-400: #a3a3a3;
|
||||
--ui-color-neutral-500: #737373;
|
||||
--ui-color-neutral-600: #525252;
|
||||
--ui-color-neutral-700: #404040;
|
||||
--ui-color-neutral-800: #262626;
|
||||
--ui-color-neutral-900: #171717;
|
||||
--ui-color-neutral-950: #0a0a0a;
|
||||
|
||||
/* Nuxt UI Default color shades */
|
||||
--ui-primary: var(--ui-color-primary-500);
|
||||
--ui-secondary: var(--ui-color-neutral-500);
|
||||
|
||||
/* Nuxt UI Design Tokens - Text */
|
||||
--ui-text-dimmed: var(--ui-color-neutral-400);
|
||||
--ui-text-muted: var(--ui-color-neutral-500);
|
||||
--ui-text-toned: var(--ui-color-neutral-600);
|
||||
--ui-text: var(--ui-color-neutral-700);
|
||||
--ui-text-highlighted: var(--ui-color-neutral-900);
|
||||
--ui-text-inverted: white;
|
||||
|
||||
/* Nuxt UI Design Tokens - Background */
|
||||
--ui-bg: white;
|
||||
--ui-bg-muted: var(--ui-color-neutral-50);
|
||||
--ui-bg-elevated: var(--ui-color-neutral-100);
|
||||
--ui-bg-accented: var(--ui-color-neutral-200);
|
||||
--ui-bg-inverted: var(--ui-color-neutral-900);
|
||||
|
||||
/* Nuxt UI Design Tokens - Border */
|
||||
--ui-border: var(--ui-color-neutral-200);
|
||||
--ui-border-muted: var(--ui-color-neutral-200);
|
||||
--ui-border-accented: var(--ui-color-neutral-300);
|
||||
--ui-border-inverted: var(--ui-color-neutral-900);
|
||||
|
||||
/* Nuxt UI Radius */
|
||||
--ui-radius: 0.5rem;
|
||||
|
||||
--background: 0 0% 100%;
|
||||
--foreground: 0 0% 3.9%;
|
||||
--muted: 0 0% 96.1%;
|
||||
@@ -12,7 +66,7 @@
|
||||
--card-foreground: 0 0% 3.9%;
|
||||
--border: 0 0% 89.8%;
|
||||
--input: 0 0% 89.8%;
|
||||
--primary: 0 0% 9%;
|
||||
--primary: 24 100% 50%; /* Orange #ff8c2f in HSL */
|
||||
--primary-foreground: 0 0% 98%;
|
||||
--secondary: 0 0% 96.1%;
|
||||
--secondary-foreground: 0 0% 9%;
|
||||
@@ -20,7 +74,7 @@
|
||||
--accent-foreground: 0 0% 9%;
|
||||
--destructive: 0 84.2% 60.2%;
|
||||
--destructive-foreground: 0 0% 98%;
|
||||
--ring: 0 0% 3.9%;
|
||||
--ring: 24 100% 50%; /* Orange ring to match primary */
|
||||
--chart-1: 12 76% 61%;
|
||||
--chart-2: 173 58% 39%;
|
||||
--chart-3: 197 37% 24%;
|
||||
@@ -30,6 +84,31 @@
|
||||
|
||||
/* Dark mode */
|
||||
.dark {
|
||||
/* Nuxt UI Default color shades - Dark mode */
|
||||
--ui-primary: var(--ui-color-primary-400);
|
||||
--ui-secondary: var(--ui-color-neutral-400);
|
||||
|
||||
/* Nuxt UI Design Tokens - Text (Dark) */
|
||||
--ui-text-dimmed: var(--ui-color-neutral-500);
|
||||
--ui-text-muted: var(--ui-color-neutral-400);
|
||||
--ui-text-toned: var(--ui-color-neutral-300);
|
||||
--ui-text: var(--ui-color-neutral-200);
|
||||
--ui-text-highlighted: white;
|
||||
--ui-text-inverted: var(--ui-color-neutral-900);
|
||||
|
||||
/* Nuxt UI Design Tokens - Background (Dark) */
|
||||
--ui-bg: var(--ui-color-neutral-900);
|
||||
--ui-bg-muted: var(--ui-color-neutral-800);
|
||||
--ui-bg-elevated: var(--ui-color-neutral-800);
|
||||
--ui-bg-accented: var(--ui-color-neutral-700);
|
||||
--ui-bg-inverted: white;
|
||||
|
||||
/* Nuxt UI Design Tokens - Border (Dark) */
|
||||
--ui-border: var(--ui-color-neutral-800);
|
||||
--ui-border-muted: var(--ui-color-neutral-700);
|
||||
--ui-border-accented: var(--ui-color-neutral-700);
|
||||
--ui-border-inverted: white;
|
||||
|
||||
--background: 0 0% 3.9%;
|
||||
--foreground: 0 0% 98%;
|
||||
--muted: 0 0% 14.9%;
|
||||
@@ -40,15 +119,15 @@
|
||||
--card-foreground: 0 0% 98%;
|
||||
--border: 0 0% 14.9%;
|
||||
--input: 0 0% 14.9%;
|
||||
--primary: 0 0% 98%;
|
||||
--primary-foreground: 0 0% 9%;
|
||||
--primary: 24 100% 50%; /* Orange #ff8c2f in HSL */
|
||||
--primary-foreground: 0 0% 98%;
|
||||
--secondary: 0 0% 14.9%;
|
||||
--secondary-foreground: 0 0% 98%;
|
||||
--accent: 0 0% 14.9%;
|
||||
--accent-foreground: 0 0% 98%;
|
||||
--destructive: 0 62.8% 30.6%;
|
||||
--destructive-foreground: 0 0% 98%;
|
||||
--ring: 0 0% 83.1%;
|
||||
--ring: 24 100% 50%; /* Orange ring to match primary */
|
||||
--chart-1: 220 70% 50%;
|
||||
--chart-2: 160 60% 45%;
|
||||
--chart-3: 30 80% 55%;
|
||||
@@ -62,69 +141,4 @@
|
||||
--background: 0 0% 3.9%;
|
||||
--foreground: 0 0% 98%;
|
||||
--border: 0 0% 14.9%;
|
||||
}
|
||||
|
||||
/* For web components: inherit CSS variables from the host */
|
||||
:host {
|
||||
--background: inherit;
|
||||
--foreground: inherit;
|
||||
--muted: inherit;
|
||||
--muted-foreground: inherit;
|
||||
--popover: inherit;
|
||||
--popover-foreground: inherit;
|
||||
--card: inherit;
|
||||
--card-foreground: inherit;
|
||||
--border: inherit;
|
||||
--input: inherit;
|
||||
--primary: inherit;
|
||||
--primary-foreground: inherit;
|
||||
--secondary: inherit;
|
||||
--secondary-foreground: inherit;
|
||||
--accent: inherit;
|
||||
--accent-foreground: inherit;
|
||||
--destructive: inherit;
|
||||
--destructive-foreground: inherit;
|
||||
--ring: inherit;
|
||||
--chart-1: inherit;
|
||||
--chart-2: inherit;
|
||||
--chart-3: inherit;
|
||||
--chart-4: inherit;
|
||||
--chart-5: inherit;
|
||||
}
|
||||
|
||||
/* Class-based dark mode support for web components using :host-context */
|
||||
:host-context(.dark) {
|
||||
--background: 0 0% 3.9%;
|
||||
--foreground: 0 0% 98%;
|
||||
--muted: 0 0% 14.9%;
|
||||
--muted-foreground: 0 0% 63.9%;
|
||||
--popover: 0 0% 3.9%;
|
||||
--popover-foreground: 0 0% 98%;
|
||||
--card: 0 0% 3.9%;
|
||||
--card-foreground: 0 0% 98%;
|
||||
--border: 0 0% 14.9%;
|
||||
--input: 0 0% 14.9%;
|
||||
--primary: 0 0% 98%;
|
||||
--primary-foreground: 0 0% 9%;
|
||||
--secondary: 0 0% 14.9%;
|
||||
--secondary-foreground: 0 0% 98%;
|
||||
--accent: 0 0% 14.9%;
|
||||
--accent-foreground: 0 0% 98%;
|
||||
--destructive: 0 62.8% 30.6%;
|
||||
--destructive-foreground: 0 0% 98%;
|
||||
--ring: 0 0% 83.1%;
|
||||
--chart-1: 220 70% 50%;
|
||||
--chart-2: 160 60% 45%;
|
||||
--chart-3: 30 80% 55%;
|
||||
--chart-4: 280 65% 60%;
|
||||
--chart-5: 340 75% 55%;
|
||||
}
|
||||
|
||||
/* Alternative class-based dark mode support for specific Unraid themes */
|
||||
:host-context(.dark[data-theme='black']),
|
||||
:host-context(.dark[data-theme='gray']) {
|
||||
--background: 0 0% 3.9%;
|
||||
--foreground: 0 0% 98%;
|
||||
--border: 0 0% 14.9%;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/* Tailwind Shared Styles - Single entry point for all shared CSS */
|
||||
@import './css-variables.css';
|
||||
@import './unraid-theme.css';
|
||||
@import './theme-variants.css';
|
||||
@import './base-utilities.css';
|
||||
@import './sonner.css';
|
||||
@@ -1,665 +0,0 @@
|
||||
/**------------------------------------------------------------------------------------------------
|
||||
* SONNER.CSS
|
||||
* This is a copy of Sonner's `style.css` as of commit a5b77c2df08d5c05aa923170176168102855533d
|
||||
*
|
||||
* This was necessary because I couldn't find a simple way to include Sonner's styles in vite's
|
||||
* css build output. They wouldn't show up even though the toaster was included, and vue-sonner
|
||||
* currently doesn't export its stylesheet (it appears to be inlined, but styles weren't applied
|
||||
* to the unraid-toaster component for some reason).
|
||||
*------------------------------------------------------------------------------------------------**/
|
||||
:where(html[dir='ltr']),
|
||||
:where([data-sonner-toaster][dir='ltr']) {
|
||||
--toast-icon-margin-start: -3px;
|
||||
--toast-icon-margin-end: 4px;
|
||||
--toast-svg-margin-start: -1px;
|
||||
--toast-svg-margin-end: 0px;
|
||||
--toast-button-margin-start: auto;
|
||||
--toast-button-margin-end: 0;
|
||||
--toast-close-button-start: 0;
|
||||
--toast-close-button-end: unset;
|
||||
--toast-close-button-transform: translate(-35%, -35%);
|
||||
}
|
||||
|
||||
:where(html[dir='rtl']),
|
||||
:where([data-sonner-toaster][dir='rtl']) {
|
||||
--toast-icon-margin-start: 4px;
|
||||
--toast-icon-margin-end: -3px;
|
||||
--toast-svg-margin-start: 0px;
|
||||
--toast-svg-margin-end: -1px;
|
||||
--toast-button-margin-start: 0;
|
||||
--toast-button-margin-end: auto;
|
||||
--toast-close-button-start: unset;
|
||||
--toast-close-button-end: 0;
|
||||
--toast-close-button-transform: translate(35%, -35%);
|
||||
}
|
||||
|
||||
:where([data-sonner-toaster]) {
|
||||
position: fixed;
|
||||
width: var(--width);
|
||||
font-family: ui-sans-serif, system-ui, -apple-system, BlinkMacSystemFont, Segoe UI, Roboto, Helvetica Neue, Arial,
|
||||
Noto Sans, sans-serif, Apple Color Emoji, Segoe UI Emoji, Segoe UI Symbol, Noto Color Emoji;
|
||||
--gray1: hsl(0, 0%, 99%);
|
||||
--gray2: hsl(0, 0%, 97.3%);
|
||||
--gray3: hsl(0, 0%, 95.1%);
|
||||
--gray4: hsl(0, 0%, 93%);
|
||||
--gray5: hsl(0, 0%, 90.9%);
|
||||
--gray6: hsl(0, 0%, 88.7%);
|
||||
--gray7: hsl(0, 0%, 85.8%);
|
||||
--gray8: hsl(0, 0%, 78%);
|
||||
--gray9: hsl(0, 0%, 56.1%);
|
||||
--gray10: hsl(0, 0%, 52.3%);
|
||||
--gray11: hsl(0, 0%, 43.5%);
|
||||
--gray12: hsl(0, 0%, 9%);
|
||||
--border-radius: 8px;
|
||||
box-sizing: border-box;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
list-style: none;
|
||||
outline: none;
|
||||
z-index: 999999999;
|
||||
transition: transform 400ms ease;
|
||||
}
|
||||
|
||||
:where([data-sonner-toaster][data-lifted='true']) {
|
||||
transform: translateY(-10px);
|
||||
}
|
||||
|
||||
@media (hover: none) and (pointer: coarse) {
|
||||
:where([data-sonner-toaster][data-lifted='true']) {
|
||||
transform: none;
|
||||
}
|
||||
}
|
||||
|
||||
:where([data-sonner-toaster][data-x-position='right']) {
|
||||
right: max(var(--offset), env(safe-area-inset-right));
|
||||
}
|
||||
|
||||
:where([data-sonner-toaster][data-x-position='left']) {
|
||||
left: max(var(--offset), env(safe-area-inset-left));
|
||||
}
|
||||
|
||||
:where([data-sonner-toaster][data-x-position='center']) {
|
||||
left: 50%;
|
||||
transform: translateX(-50%);
|
||||
}
|
||||
|
||||
:where([data-sonner-toaster][data-y-position='top']) {
|
||||
top: max(var(--offset), env(safe-area-inset-top));
|
||||
}
|
||||
|
||||
:where([data-sonner-toaster][data-y-position='bottom']) {
|
||||
bottom: max(var(--offset), env(safe-area-inset-bottom));
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) {
|
||||
--y: translateY(100%);
|
||||
--lift-amount: calc(var(--lift) * var(--gap));
|
||||
z-index: var(--z-index);
|
||||
position: absolute;
|
||||
opacity: 0;
|
||||
transform: var(--y);
|
||||
filter: blur(0);
|
||||
/* https://stackoverflow.com/questions/48124372/pointermove-event-not-working-with-touch-why-not */
|
||||
touch-action: none;
|
||||
transition: transform 400ms, opacity 400ms, height 400ms, box-shadow 200ms;
|
||||
box-sizing: border-box;
|
||||
outline: none;
|
||||
overflow-wrap: anywhere;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-styled='true']) {
|
||||
padding: 16px;
|
||||
background: var(--normal-bg);
|
||||
border: 1px solid var(--normal-border);
|
||||
color: var(--normal-text);
|
||||
border-radius: var(--border-radius);
|
||||
box-shadow: 0px 4px 12px rgba(0, 0, 0, 0.1);
|
||||
width: var(--width);
|
||||
font-size: 13px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 6px;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]:focus-visible) {
|
||||
box-shadow: 0px 4px 12px rgba(0, 0, 0, 0.1), 0 0 0 2px rgba(0, 0, 0, 0.2);
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-y-position='top']) {
|
||||
top: 0;
|
||||
--y: translateY(-100%);
|
||||
--lift: 1;
|
||||
--lift-amount: calc(1 * var(--gap));
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-y-position='bottom']) {
|
||||
bottom: 0;
|
||||
--y: translateY(100%);
|
||||
--lift: -1;
|
||||
--lift-amount: calc(var(--lift) * var(--gap));
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) :where([data-description]) {
|
||||
font-weight: 400;
|
||||
line-height: 1.4;
|
||||
color: inherit;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) :where([data-title]) {
|
||||
font-weight: 500;
|
||||
line-height: 1.5;
|
||||
color: inherit;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) :where([data-icon]) {
|
||||
display: flex;
|
||||
height: 16px;
|
||||
width: 16px;
|
||||
position: relative;
|
||||
justify-content: flex-start;
|
||||
align-items: center;
|
||||
flex-shrink: 0;
|
||||
margin-left: var(--toast-icon-margin-start);
|
||||
margin-right: var(--toast-icon-margin-end);
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-promise='true']) :where([data-icon]) > svg {
|
||||
opacity: 0;
|
||||
transform: scale(0.8);
|
||||
transform-origin: center;
|
||||
animation: sonner-fade-in 300ms ease forwards;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) :where([data-icon]) > * {
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) :where([data-icon]) svg {
|
||||
margin-left: var(--toast-svg-margin-start);
|
||||
margin-right: var(--toast-svg-margin-end);
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) :where([data-content]) {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 2px;
|
||||
}
|
||||
|
||||
[data-sonner-toast][data-styled='true'] [data-button] {
|
||||
border-radius: 4px;
|
||||
padding-left: 8px;
|
||||
padding-right: 8px;
|
||||
height: 24px;
|
||||
font-size: 12px;
|
||||
color: var(--normal-bg);
|
||||
background: var(--normal-text);
|
||||
margin-left: var(--toast-button-margin-start);
|
||||
margin-right: var(--toast-button-margin-end);
|
||||
border: none;
|
||||
cursor: pointer;
|
||||
outline: none;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
flex-shrink: 0;
|
||||
transition: opacity 400ms, box-shadow 200ms;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) :where([data-button]):focus-visible {
|
||||
box-shadow: 0 0 0 2px rgba(0, 0, 0, 0.4);
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) :where([data-button]):first-of-type {
|
||||
margin-left: var(--toast-button-margin-start);
|
||||
margin-right: var(--toast-button-margin-end);
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) :where([data-cancel]) {
|
||||
color: var(--normal-text);
|
||||
background: rgba(0, 0, 0, 0.08);
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-theme='dark']) :where([data-cancel]) {
|
||||
background: rgba(255, 255, 255, 0.3);
|
||||
}
|
||||
|
||||
[data-sonner-toast] [data-close-button] {
|
||||
position: absolute;
|
||||
left: var(--toast-close-button-start);
|
||||
right: var(--toast-close-button-end);
|
||||
top: 0;
|
||||
height: 20px;
|
||||
width: 20px;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
padding: 0;
|
||||
color: var(--gray12);
|
||||
border: 1px solid var(--gray4);
|
||||
transform: var(--toast-close-button-transform);
|
||||
border-radius: 50%;
|
||||
cursor: pointer;
|
||||
z-index: 1;
|
||||
transition: opacity 100ms, background 200ms, border-color 200ms;
|
||||
}
|
||||
|
||||
[data-sonner-toast] [data-close-button] {
|
||||
background: var(--gray1);
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) :where([data-close-button]):focus-visible {
|
||||
box-shadow: 0px 4px 12px rgba(0, 0, 0, 0.1), 0 0 0 2px rgba(0, 0, 0, 0.2);
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) :where([data-disabled='true']) {
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
[data-sonner-toast]:hover [data-close-button]:hover {
|
||||
background: var(--gray2);
|
||||
border-color: var(--gray5);
|
||||
}
|
||||
|
||||
/* Leave a ghost div to avoid setting hover to false when swiping out */
|
||||
:where([data-sonner-toast][data-swiping='true'])::before {
|
||||
content: '';
|
||||
position: absolute;
|
||||
left: 0;
|
||||
right: 0;
|
||||
height: 100%;
|
||||
z-index: -1;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-y-position='top'][data-swiping='true'])::before {
|
||||
/* y 50% needed to distribute height additional height evenly */
|
||||
bottom: 50%;
|
||||
transform: scaleY(3) translateY(50%);
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-y-position='bottom'][data-swiping='true'])::before {
|
||||
/* y -50% needed to distribute height additional height evenly */
|
||||
top: 50%;
|
||||
transform: scaleY(3) translateY(-50%);
|
||||
}
|
||||
|
||||
/* Leave a ghost div to avoid setting hover to false when transitioning out */
|
||||
:where([data-sonner-toast][data-swiping='false'][data-removed='true'])::before {
|
||||
content: '';
|
||||
position: absolute;
|
||||
inset: 0;
|
||||
transform: scaleY(2);
|
||||
}
|
||||
|
||||
/* Needed to avoid setting hover to false when inbetween toasts */
|
||||
:where([data-sonner-toast])::after {
|
||||
content: '';
|
||||
position: absolute;
|
||||
left: 0;
|
||||
height: calc(var(--gap) + 1px);
|
||||
bottom: 100%;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-mounted='true']) {
|
||||
--y: translateY(0);
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-expanded='false'][data-front='false']) {
|
||||
--scale: var(--toasts-before) * 0.05 + 1;
|
||||
--y: translateY(calc(var(--lift-amount) * var(--toasts-before))) scale(calc(-1 * var(--scale)));
|
||||
height: var(--front-toast-height);
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) > * {
|
||||
transition: opacity 400ms;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-expanded='false'][data-front='false'][data-styled='true']) > * {
|
||||
opacity: 0;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-visible='false']) {
|
||||
opacity: 0;
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-mounted='true'][data-expanded='true']) {
|
||||
--y: translateY(calc(var(--lift) * var(--offset)));
|
||||
height: var(--initial-height);
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-removed='true'][data-front='true'][data-swipe-out='false']) {
|
||||
--y: translateY(calc(var(--lift) * -100%));
|
||||
opacity: 0;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-removed='true'][data-front='false'][data-swipe-out='false'][data-expanded='true']) {
|
||||
--y: translateY(calc(var(--lift) * var(--offset) + var(--lift) * -100%));
|
||||
opacity: 0;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-removed='true'][data-front='false'][data-swipe-out='false'][data-expanded='false']) {
|
||||
--y: translateY(40%);
|
||||
opacity: 0;
|
||||
transition: transform 500ms, opacity 200ms;
|
||||
}
|
||||
|
||||
/* Bump up the height to make sure hover state doesn't get set to false */
|
||||
:where([data-sonner-toast][data-removed='true'][data-front='false'])::before {
|
||||
height: calc(var(--initial-height) + 20%);
|
||||
}
|
||||
|
||||
[data-sonner-toast][data-swiping='true'] {
|
||||
transform: var(--y) translateY(var(--swipe-amount, 0px));
|
||||
transition: none;
|
||||
}
|
||||
|
||||
[data-sonner-toast][data-swiped='true'] {
|
||||
user-select: none;
|
||||
}
|
||||
|
||||
[data-sonner-toast][data-swipe-out='true'][data-y-position='bottom'],
|
||||
[data-sonner-toast][data-swipe-out='true'][data-y-position='top'] {
|
||||
animation: swipe-out 200ms ease-out forwards;
|
||||
}
|
||||
|
||||
@keyframes swipe-out {
|
||||
from {
|
||||
transform: translateY(calc(var(--lift) * var(--offset) + var(--swipe-amount)));
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
to {
|
||||
transform: translateY(calc(var(--lift) * var(--offset) + var(--swipe-amount) + var(--lift) * -100%));
|
||||
opacity: 0;
|
||||
}
|
||||
}
|
||||
|
||||
@media (max-width: 600px) {
|
||||
[data-sonner-toaster] {
|
||||
position: fixed;
|
||||
--mobile-offset: 16px;
|
||||
right: var(--mobile-offset);
|
||||
left: var(--mobile-offset);
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
[data-sonner-toaster][dir='rtl'] {
|
||||
left: calc(var(--mobile-offset) * -1);
|
||||
}
|
||||
|
||||
[data-sonner-toaster] [data-sonner-toast] {
|
||||
left: 0;
|
||||
right: 0;
|
||||
width: calc(100% - var(--mobile-offset) * 2);
|
||||
}
|
||||
|
||||
[data-sonner-toaster][data-x-position='left'] {
|
||||
left: var(--mobile-offset);
|
||||
}
|
||||
|
||||
[data-sonner-toaster][data-y-position='bottom'] {
|
||||
bottom: 20px;
|
||||
}
|
||||
|
||||
[data-sonner-toaster][data-y-position='top'] {
|
||||
top: 20px;
|
||||
}
|
||||
|
||||
[data-sonner-toaster][data-x-position='center'] {
|
||||
left: var(--mobile-offset);
|
||||
right: var(--mobile-offset);
|
||||
transform: none;
|
||||
}
|
||||
}
|
||||
|
||||
[data-sonner-toaster][data-theme='light'] {
|
||||
--normal-bg: #fff;
|
||||
--normal-border: var(--gray4);
|
||||
--normal-text: var(--gray12);
|
||||
|
||||
--success-bg: hsl(143, 85%, 96%);
|
||||
--success-border: hsl(145, 92%, 91%);
|
||||
--success-text: hsl(140, 100%, 27%);
|
||||
|
||||
--info-bg: hsl(208, 100%, 97%);
|
||||
--info-border: hsl(221, 91%, 91%);
|
||||
--info-text: hsl(210, 92%, 45%);
|
||||
|
||||
--warning-bg: hsl(49, 100%, 97%);
|
||||
--warning-border: hsl(49, 91%, 91%);
|
||||
--warning-text: hsl(31, 92%, 45%);
|
||||
|
||||
--error-bg: hsl(359, 100%, 97%);
|
||||
--error-border: hsl(359, 100%, 94%);
|
||||
--error-text: hsl(360, 100%, 45%);
|
||||
}
|
||||
|
||||
[data-sonner-toaster][data-theme='light'] [data-sonner-toast][data-invert='true'] {
|
||||
--normal-bg: #000;
|
||||
--normal-border: hsl(0, 0%, 20%);
|
||||
--normal-text: var(--gray1);
|
||||
}
|
||||
|
||||
[data-sonner-toaster][data-theme='dark'] [data-sonner-toast][data-invert='true'] {
|
||||
--normal-bg: #fff;
|
||||
--normal-border: var(--gray3);
|
||||
--normal-text: var(--gray12);
|
||||
}
|
||||
|
||||
[data-sonner-toaster][data-theme='dark'] {
|
||||
--normal-bg: #000;
|
||||
--normal-border: hsl(0, 0%, 20%);
|
||||
--normal-text: var(--gray1);
|
||||
|
||||
--success-bg: hsl(150, 100%, 6%);
|
||||
--success-border: hsl(147, 100%, 12%);
|
||||
--success-text: hsl(150, 86%, 65%);
|
||||
|
||||
--info-bg: hsl(215, 100%, 6%);
|
||||
--info-border: hsl(223, 100%, 12%);
|
||||
--info-text: hsl(216, 87%, 65%);
|
||||
|
||||
--warning-bg: hsl(64, 100%, 6%);
|
||||
--warning-border: hsl(60, 100%, 12%);
|
||||
--warning-text: hsl(46, 87%, 65%);
|
||||
|
||||
--error-bg: hsl(358, 76%, 10%);
|
||||
--error-border: hsl(357, 89%, 16%);
|
||||
--error-text: hsl(358, 100%, 81%);
|
||||
}
|
||||
|
||||
[data-rich-colors='true'][data-sonner-toast][data-type='success'] {
|
||||
background: var(--success-bg);
|
||||
border-color: var(--success-border);
|
||||
color: var(--success-text);
|
||||
}
|
||||
|
||||
[data-rich-colors='true'][data-sonner-toast][data-type='success'] [data-close-button] {
|
||||
background: var(--success-bg);
|
||||
border-color: var(--success-border);
|
||||
color: var(--success-text);
|
||||
}
|
||||
|
||||
[data-rich-colors='true'][data-sonner-toast][data-type='info'] {
|
||||
background: var(--info-bg);
|
||||
border-color: var(--info-border);
|
||||
color: var(--info-text);
|
||||
}
|
||||
|
||||
[data-rich-colors='true'][data-sonner-toast][data-type='info'] [data-close-button] {
|
||||
background: var(--info-bg);
|
||||
border-color: var(--info-border);
|
||||
color: var(--info-text);
|
||||
}
|
||||
|
||||
[data-rich-colors='true'][data-sonner-toast][data-type='warning'] {
|
||||
background: var(--warning-bg);
|
||||
border-color: var(--warning-border);
|
||||
color: var(--warning-text);
|
||||
}
|
||||
|
||||
[data-rich-colors='true'][data-sonner-toast][data-type='warning'] [data-close-button] {
|
||||
background: var(--warning-bg);
|
||||
border-color: var(--warning-border);
|
||||
color: var(--warning-text);
|
||||
}
|
||||
|
||||
[data-rich-colors='true'][data-sonner-toast][data-type='error'] {
|
||||
background: var(--error-bg);
|
||||
border-color: var(--error-border);
|
||||
color: var(--error-text);
|
||||
}
|
||||
|
||||
[data-rich-colors='true'][data-sonner-toast][data-type='error'] [data-close-button] {
|
||||
background: var(--error-bg);
|
||||
border-color: var(--error-border);
|
||||
color: var(--error-text);
|
||||
}
|
||||
|
||||
.sonner-loading-wrapper {
|
||||
--size: 16px;
|
||||
height: var(--size);
|
||||
width: var(--size);
|
||||
position: absolute;
|
||||
inset: 0;
|
||||
z-index: 10;
|
||||
}
|
||||
|
||||
.sonner-loading-wrapper[data-visible='false'] {
|
||||
transform-origin: center;
|
||||
animation: sonner-fade-out 0.2s ease forwards;
|
||||
}
|
||||
|
||||
.sonner-spinner {
|
||||
position: relative;
|
||||
top: 50%;
|
||||
left: 50%;
|
||||
height: var(--size);
|
||||
width: var(--size);
|
||||
}
|
||||
|
||||
.sonner-loading-bar {
|
||||
animation: sonner-spin 1.2s linear infinite;
|
||||
background: var(--gray11);
|
||||
border-radius: 6px;
|
||||
height: 8%;
|
||||
left: -10%;
|
||||
position: absolute;
|
||||
top: -3.9%;
|
||||
width: 24%;
|
||||
}
|
||||
|
||||
.sonner-loading-bar:nth-child(1) {
|
||||
animation-delay: -1.2s;
|
||||
transform: rotate(0.0001deg) translate(146%);
|
||||
}
|
||||
|
||||
.sonner-loading-bar:nth-child(2) {
|
||||
animation-delay: -1.1s;
|
||||
transform: rotate(30deg) translate(146%);
|
||||
}
|
||||
|
||||
.sonner-loading-bar:nth-child(3) {
|
||||
animation-delay: -1s;
|
||||
transform: rotate(60deg) translate(146%);
|
||||
}
|
||||
|
||||
.sonner-loading-bar:nth-child(4) {
|
||||
animation-delay: -0.9s;
|
||||
transform: rotate(90deg) translate(146%);
|
||||
}
|
||||
|
||||
.sonner-loading-bar:nth-child(5) {
|
||||
animation-delay: -0.8s;
|
||||
transform: rotate(120deg) translate(146%);
|
||||
}
|
||||
|
||||
.sonner-loading-bar:nth-child(6) {
|
||||
animation-delay: -0.7s;
|
||||
transform: rotate(150deg) translate(146%);
|
||||
}
|
||||
|
||||
.sonner-loading-bar:nth-child(7) {
|
||||
animation-delay: -0.6s;
|
||||
transform: rotate(180deg) translate(146%);
|
||||
}
|
||||
|
||||
.sonner-loading-bar:nth-child(8) {
|
||||
animation-delay: -0.5s;
|
||||
transform: rotate(210deg) translate(146%);
|
||||
}
|
||||
|
||||
.sonner-loading-bar:nth-child(9) {
|
||||
animation-delay: -0.4s;
|
||||
transform: rotate(240deg) translate(146%);
|
||||
}
|
||||
|
||||
.sonner-loading-bar:nth-child(10) {
|
||||
animation-delay: -0.3s;
|
||||
transform: rotate(270deg) translate(146%);
|
||||
}
|
||||
|
||||
.sonner-loading-bar:nth-child(11) {
|
||||
animation-delay: -0.2s;
|
||||
transform: rotate(300deg) translate(146%);
|
||||
}
|
||||
|
||||
.sonner-loading-bar:nth-child(12) {
|
||||
animation-delay: -0.1s;
|
||||
transform: rotate(330deg) translate(146%);
|
||||
}
|
||||
|
||||
@keyframes sonner-fade-in {
|
||||
0% {
|
||||
opacity: 0;
|
||||
transform: scale(0.8);
|
||||
}
|
||||
100% {
|
||||
opacity: 1;
|
||||
transform: scale(1);
|
||||
}
|
||||
}
|
||||
|
||||
@keyframes sonner-fade-out {
|
||||
0% {
|
||||
opacity: 1;
|
||||
transform: scale(1);
|
||||
}
|
||||
100% {
|
||||
opacity: 0;
|
||||
transform: scale(0.8);
|
||||
}
|
||||
}
|
||||
|
||||
@keyframes sonner-spin {
|
||||
0% {
|
||||
opacity: 1;
|
||||
}
|
||||
100% {
|
||||
opacity: 0.15;
|
||||
}
|
||||
}
|
||||
|
||||
@media (prefers-reduced-motion) {
|
||||
[data-sonner-toast],
|
||||
[data-sonner-toast] > *,
|
||||
.sonner-loading-bar {
|
||||
transition: none !important;
|
||||
animation: none !important;
|
||||
}
|
||||
}
|
||||
|
||||
.sonner-loader {
|
||||
position: absolute;
|
||||
top: 50%;
|
||||
left: 50%;
|
||||
transform: translate(-50%, -50%);
|
||||
transform-origin: center;
|
||||
transition: opacity 200ms, transform 200ms;
|
||||
}
|
||||
|
||||
.sonner-loader[data-visible='false'] {
|
||||
opacity: 0;
|
||||
transform: scale(0.8) translate(-50%, -50%);
|
||||
}
|
||||
97
@tailwind-shared/theme-variants.css
Normal file
97
@tailwind-shared/theme-variants.css
Normal file
@@ -0,0 +1,97 @@
|
||||
/**
|
||||
* Tailwind v4 Theme Variants
|
||||
* Defines theme-specific CSS variables that can be switched via classes
|
||||
* These are applied dynamically based on the theme selected in GraphQL
|
||||
*/
|
||||
|
||||
/* Default/White Theme */
|
||||
:root,
|
||||
.theme-white {
|
||||
--header-text-primary: #ffffff;
|
||||
--header-text-secondary: #999999;
|
||||
--header-background-color: #1c1b1b;
|
||||
--header-gradient-start: rgba(28, 27, 27, 0);
|
||||
--header-gradient-end: rgba(28, 27, 27, 0.7);
|
||||
--ui-border-muted: hsl(240 5% 20%);
|
||||
--color-border: #383735;
|
||||
--color-alpha: #ff8c2f;
|
||||
--color-beta: #1c1b1b;
|
||||
--color-gamma: #ffffff;
|
||||
--color-gamma-opaque: rgba(255, 255, 255, 0.3);
|
||||
}
|
||||
|
||||
/* Black Theme */
|
||||
.theme-black,
|
||||
.theme-black.dark {
|
||||
--header-text-primary: #1c1b1b;
|
||||
--header-text-secondary: #999999;
|
||||
--header-background-color: #f2f2f2;
|
||||
--header-gradient-start: rgba(242, 242, 242, 0);
|
||||
--header-gradient-end: rgba(242, 242, 242, 0.7);
|
||||
--ui-border-muted: hsl(240 5.9% 90%);
|
||||
--color-border: #e0e0e0;
|
||||
--color-alpha: #ff8c2f;
|
||||
--color-beta: #f2f2f2;
|
||||
--color-gamma: #1c1b1b;
|
||||
--color-gamma-opaque: rgba(28, 27, 27, 0.3);
|
||||
}
|
||||
|
||||
/* Gray Theme */
|
||||
.theme-gray {
|
||||
--header-text-primary: #ffffff;
|
||||
--header-text-secondary: #999999;
|
||||
--header-background-color: #1c1b1b;
|
||||
--header-gradient-start: rgba(28, 27, 27, 0);
|
||||
--header-gradient-end: rgba(28, 27, 27, 0.7);
|
||||
--ui-border-muted: hsl(240 5% 25%);
|
||||
--color-border: #383735;
|
||||
--color-alpha: #ff8c2f;
|
||||
--color-beta: #383735;
|
||||
--color-gamma: #ffffff;
|
||||
--color-gamma-opaque: rgba(255, 255, 255, 0.3);
|
||||
}
|
||||
|
||||
/* Azure Theme */
|
||||
.theme-azure {
|
||||
--header-text-primary: #1c1b1b;
|
||||
--header-text-secondary: #999999;
|
||||
--header-background-color: #f2f2f2;
|
||||
--header-gradient-start: rgba(242, 242, 242, 0);
|
||||
--header-gradient-end: rgba(242, 242, 242, 0.7);
|
||||
--ui-border-muted: hsl(210 40% 80%);
|
||||
--color-border: #5a8bb8;
|
||||
--color-alpha: #ff8c2f;
|
||||
--color-beta: #e7f2f8;
|
||||
--color-gamma: #336699;
|
||||
--color-gamma-opaque: rgba(51, 102, 153, 0.3);
|
||||
}
|
||||
|
||||
/* Dark Mode Overrides */
|
||||
.dark {
|
||||
--ui-border-muted: hsl(240 5% 20%);
|
||||
--color-border: #383735;
|
||||
}
|
||||
|
||||
/*
|
||||
* Dynamic color variables for user overrides from GraphQL
|
||||
* These are set via JavaScript and override the theme defaults
|
||||
* Using :root with class for higher specificity to override theme classes
|
||||
*/
|
||||
:root.has-custom-header-text {
|
||||
--header-text-primary: var(--custom-header-text-primary);
|
||||
--color-header-text-primary: var(--custom-header-text-primary);
|
||||
}
|
||||
|
||||
:root.has-custom-header-meta {
|
||||
--header-text-secondary: var(--custom-header-text-secondary);
|
||||
--color-header-text-secondary: var(--custom-header-text-secondary);
|
||||
}
|
||||
|
||||
:root.has-custom-header-bg {
|
||||
--header-background-color: var(--custom-header-background-color);
|
||||
--color-header-background: var(--custom-header-background-color);
|
||||
--header-gradient-start: var(--custom-header-gradient-start);
|
||||
--header-gradient-end: var(--custom-header-gradient-end);
|
||||
--color-header-gradient-start: var(--custom-header-gradient-start);
|
||||
--color-header-gradient-end: var(--custom-header-gradient-end);
|
||||
}
|
||||
@@ -84,23 +84,23 @@
|
||||
--color-primary-900: #7c2d12;
|
||||
--color-primary-950: #431407;
|
||||
|
||||
/* Header colors */
|
||||
--color-header-text-primary: var(--header-text-primary);
|
||||
--color-header-text-secondary: var(--header-text-secondary);
|
||||
--color-header-background-color: var(--header-background-color);
|
||||
/* Header colors - defaults will be overridden by theme */
|
||||
--color-header-text-primary: var(--header-text-primary, #1c1c1c);
|
||||
--color-header-text-secondary: var(--header-text-secondary, #999999);
|
||||
--color-header-background: var(--header-background-color, #f2f2f2);
|
||||
|
||||
/* Legacy colors */
|
||||
--color-alpha: var(--color-alpha);
|
||||
--color-beta: var(--color-beta);
|
||||
--color-gamma: var(--color-gamma);
|
||||
--color-gamma-opaque: var(--color-gamma-opaque);
|
||||
--color-customgradient-start: var(--color-customgradient-start);
|
||||
--color-customgradient-end: var(--color-customgradient-end);
|
||||
/* Legacy colors - defaults (overridden by theme-variants.css) */
|
||||
--color-alpha: #ff8c2f;
|
||||
--color-beta: #f2f2f2;
|
||||
--color-gamma: #999999;
|
||||
--color-gamma-opaque: rgba(153, 153, 153, 0.5);
|
||||
--color-customgradient-start: rgba(242, 242, 242, 0);
|
||||
--color-customgradient-end: rgba(242, 242, 242, 0.85);
|
||||
|
||||
/* Gradients */
|
||||
--color-header-gradient-start: var(--header-gradient-start);
|
||||
--color-header-gradient-end: var(--header-gradient-end);
|
||||
--color-banner-gradient: var(--banner-gradient);
|
||||
/* Gradients - defaults (overridden by theme-variants.css) */
|
||||
--color-header-gradient-start: rgba(242, 242, 242, 0);
|
||||
--color-header-gradient-end: rgba(242, 242, 242, 0.85);
|
||||
--color-banner-gradient: none;
|
||||
|
||||
/* Font sizes */
|
||||
--font-10px: 10px;
|
||||
@@ -167,6 +167,27 @@
|
||||
--max-width-800px: 800px;
|
||||
--max-width-1024px: 1024px;
|
||||
|
||||
/* Container sizes adjusted for 10px base font size (1.6x scale) */
|
||||
--container-xs: 32rem;
|
||||
--container-sm: 38.4rem;
|
||||
--container-md: 44.8rem;
|
||||
--container-lg: 51.2rem;
|
||||
--container-xl: 57.6rem;
|
||||
--container-2xl: 67.2rem;
|
||||
--container-3xl: 76.8rem;
|
||||
--container-4xl: 89.6rem;
|
||||
--container-5xl: 102.4rem;
|
||||
--container-6xl: 115.2rem;
|
||||
--container-7xl: 128rem;
|
||||
|
||||
/* Extended width scale for max-w-* utilities */
|
||||
--width-5xl: 102.4rem;
|
||||
--width-6xl: 115.2rem;
|
||||
--width-7xl: 128rem;
|
||||
--width-8xl: 140.8rem;
|
||||
--width-9xl: 153.6rem;
|
||||
--width-10xl: 166.4rem;
|
||||
|
||||
/* Animations */
|
||||
--animate-mark-2: mark-2 1.5s ease infinite;
|
||||
--animate-mark-3: mark-3 1.5s ease infinite;
|
||||
|
||||
10
CLAUDE.md
10
CLAUDE.md
@@ -7,7 +7,7 @@ This file provides guidance to Claude Code (claude.ai/code) when working with co
|
||||
This is the Unraid API monorepo containing multiple packages that provide API functionality for Unraid servers. It uses pnpm workspaces with the following structure:
|
||||
|
||||
- `/api` - Core NestJS API server with GraphQL
|
||||
- `/web` - Nuxt.js frontend application
|
||||
- `/web` - Vue 3 frontend application
|
||||
- `/unraid-ui` - Vue 3 component library
|
||||
- `/plugin` - Unraid plugin package (.plg)
|
||||
- `/packages` - Shared packages and API plugins
|
||||
@@ -128,9 +128,6 @@ Enables GraphQL playground at `http://tower.local/graphql`
|
||||
- **Use Mocks Correctly**: Mocks should be used as nouns, not verbs.
|
||||
|
||||
#### Vue Component Testing
|
||||
|
||||
- This is a Nuxt.js app but we are testing with vitest outside of the Nuxt environment
|
||||
- Nuxt is currently set to auto import so some vue files may need compute or ref imported
|
||||
- Use pnpm when running terminal commands and stay within the web directory
|
||||
- Tests are located under `web/__test__`, run with `pnpm test`
|
||||
- Use `mount` from Vue Test Utils for component testing
|
||||
@@ -157,4 +154,7 @@ Enables GraphQL playground at `http://tower.local/graphql`
|
||||
|
||||
- We are using tailwind v4 we do not need a tailwind config anymore
|
||||
- always search the internet for tailwind v4 documentation when making tailwind related style changes
|
||||
- never run or restart the API server or web server. I will handle the lifecylce, simply wait and ask me to do this for you
|
||||
- never run or restart the API server or web server. I will handle the lifecycle, simply wait and ask me to do this for you
|
||||
- Never use the `any` type. Always prefer proper typing
|
||||
- Avoid using casting whenever possible, prefer proper typing from the start
|
||||
- **IMPORTANT:** cache-manager v7 expects TTL values in **milliseconds**, not seconds. Always use milliseconds when setting cache TTL (e.g., 600000 for 10 minutes, not 600)
|
||||
|
||||
@@ -31,3 +31,4 @@ BYPASS_CORS_CHECKS=true
|
||||
CHOKIDAR_USEPOLLING=true
|
||||
LOG_TRANSPORT=console
|
||||
LOG_LEVEL=trace
|
||||
ENABLE_NEXT_DOCKER_RELEASE=true
|
||||
|
||||
3
api/.gitignore
vendored
3
api/.gitignore
vendored
@@ -93,3 +93,6 @@ dev/local-session
|
||||
|
||||
# local OIDC config for testing - contains secrets
|
||||
dev/configs/oidc.local.json
|
||||
|
||||
# local api keys
|
||||
dev/keys/*
|
||||
|
||||
166
api/CHANGELOG.md
166
api/CHANGELOG.md
@@ -1,5 +1,171 @@
|
||||
# Changelog
|
||||
|
||||
## [4.22.2](https://github.com/unraid/api/compare/v4.22.1...v4.22.2) (2025-09-15)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **deps:** pin dependency conventional-changelog-conventionalcommits to 9.1.0 ([#1697](https://github.com/unraid/api/issues/1697)) ([9a86c61](https://github.com/unraid/api/commit/9a86c615da2e975f568922fa012cc29b3f9cde0e))
|
||||
* **deps:** update dependency filenamify to v7 ([#1703](https://github.com/unraid/api/issues/1703)) ([b80988a](https://github.com/unraid/api/commit/b80988aaabebc4b8dbf2bf31f0764bf2f28e1575))
|
||||
* **deps:** update graphqlcodegenerator monorepo (major) ([#1689](https://github.com/unraid/api/issues/1689)) ([ba4a43a](https://github.com/unraid/api/commit/ba4a43aec863fc30c47dd17370d74daed7f84703))
|
||||
* false positive on verify_install script being external shell ([#1704](https://github.com/unraid/api/issues/1704)) ([31a255c](https://github.com/unraid/api/commit/31a255c9281b29df983d0f5d0475cd5a69790a48))
|
||||
* improve vue mount speed by 10x ([c855caa](https://github.com/unraid/api/commit/c855caa9b2d4d63bead1a992f5c583e00b9ba843))
|
||||
|
||||
## [4.22.1](https://github.com/unraid/api/compare/v4.22.0...v4.22.1) (2025-09-12)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* set input color in SSO field rather than inside of the main.css ([01d353f](https://github.com/unraid/api/commit/01d353fa08a3df688b37a495a204605138f7f71d))
|
||||
|
||||
## [4.22.0](https://github.com/unraid/api/compare/v4.21.0...v4.22.0) (2025-09-12)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* improved update ui ([#1691](https://github.com/unraid/api/issues/1691)) ([a59b363](https://github.com/unraid/api/commit/a59b363ebc1e660f854c55d50fc02c823c2fd0cc))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **deps:** update dependency camelcase-keys to v10 ([#1687](https://github.com/unraid/api/issues/1687)) ([95faeaa](https://github.com/unraid/api/commit/95faeaa2f39bf7bd16502698d7530aaa590b286d))
|
||||
* **deps:** update dependency p-retry to v7 ([#1608](https://github.com/unraid/api/issues/1608)) ([c782cf0](https://github.com/unraid/api/commit/c782cf0e8710c6690050376feefda3edb30dd549))
|
||||
* **deps:** update dependency uuid to v13 ([#1688](https://github.com/unraid/api/issues/1688)) ([2fef10c](https://github.com/unraid/api/commit/2fef10c94aae910e95d9f5bcacf7289e2cca6ed9))
|
||||
* **deps:** update dependency vue-sonner to v2 ([#1475](https://github.com/unraid/api/issues/1475)) ([f95ca9c](https://github.com/unraid/api/commit/f95ca9c9cb69725dcf3bb4bcbd0b558a2074e311))
|
||||
* display settings fix for languages on less than 7.2-beta.2.3 ([#1696](https://github.com/unraid/api/issues/1696)) ([03dae7c](https://github.com/unraid/api/commit/03dae7ce66b3409593eeee90cd5b56e2a920ca44))
|
||||
* hide reset help option when sso is being checked ([#1695](https://github.com/unraid/api/issues/1695)) ([222ced7](https://github.com/unraid/api/commit/222ced7518d40c207198a3b8548f0e024bc865b0))
|
||||
* progressFrame white on black ([0990b89](https://github.com/unraid/api/commit/0990b898bd02c231153157c20d5142e5fd4513cd))
|
||||
|
||||
## [4.21.0](https://github.com/unraid/api/compare/v4.20.4...v4.21.0) (2025-09-10)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add zsh shell detection to install script ([#1539](https://github.com/unraid/api/issues/1539)) ([50ea2a3](https://github.com/unraid/api/commit/50ea2a3ffb82b30152fb85e0fb9b0d178d596efe))
|
||||
* **api:** determine if docker container has update ([#1582](https://github.com/unraid/api/issues/1582)) ([e57d81e](https://github.com/unraid/api/commit/e57d81e0735772758bb85e0b3c89dce15c56635e))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* white on white login text ([ae4d3ec](https://github.com/unraid/api/commit/ae4d3ecbc417454ae3c6e02018f8e4c49bbfc902))
|
||||
|
||||
## [4.20.4](https://github.com/unraid/api/compare/v4.20.3...v4.20.4) (2025-09-09)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* staging PR plugin fixes + UI issues on 7.2 beta ([b79b44e](https://github.com/unraid/api/commit/b79b44e95c65a124313814ab55b0d0a745a799c7))
|
||||
|
||||
## [4.20.3](https://github.com/unraid/api/compare/v4.20.2...v4.20.3) (2025-09-09)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* header background color issues fixed on 7.2 - thanks Nick! ([73c1100](https://github.com/unraid/api/commit/73c1100d0ba396fe4342f8ce7561017ab821e68b))
|
||||
|
||||
## [4.20.2](https://github.com/unraid/api/compare/v4.20.1...v4.20.2) (2025-09-09)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* trigger deployment ([a27453f](https://github.com/unraid/api/commit/a27453fda81e4eeb07f257e60516bebbbc27cf7a))
|
||||
|
||||
## [4.20.1](https://github.com/unraid/api/compare/v4.20.0...v4.20.1) (2025-09-09)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* adjust header styles to fix flashing and width issues - thanks ZarZ ([4759b3d](https://github.com/unraid/api/commit/4759b3d0b3fb6bc71636f75f807cd6f4f62305d1))
|
||||
|
||||
## [4.20.0](https://github.com/unraid/api/compare/v4.19.1...v4.20.0) (2025-09-08)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **disks:** add isSpinning field to Disk type ([#1527](https://github.com/unraid/api/issues/1527)) ([193be3d](https://github.com/unraid/api/commit/193be3df3672514be9904e3d4fbdff776470afc0))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* better component loading to prevent per-page strange behavior ([095c222](https://github.com/unraid/api/commit/095c2221c94f144f8ad410a69362b15803765531))
|
||||
* **deps:** pin dependencies ([#1669](https://github.com/unraid/api/issues/1669)) ([413db4b](https://github.com/unraid/api/commit/413db4bd30a06aa69d3ca86e793782854f822589))
|
||||
* **plugin:** add fallback for unraid-api stop in deprecation cleanup ([#1668](https://github.com/unraid/api/issues/1668)) ([797bf50](https://github.com/unraid/api/commit/797bf50ec702ebc8244ff71a8ef1a80ea5cd2169))
|
||||
* prepend 'v' to API version in workflow dispatch inputs ([f0cffbd](https://github.com/unraid/api/commit/f0cffbdc7ac36e7037ab60fe9dddbb2cab4a5e10))
|
||||
* progress frame background color fix ([#1672](https://github.com/unraid/api/issues/1672)) ([785f1f5](https://github.com/unraid/api/commit/785f1f5eb1a1cc8b41f6eb502e4092d149cfbd80))
|
||||
* properly override header values ([#1673](https://github.com/unraid/api/issues/1673)) ([aecf70f](https://github.com/unraid/api/commit/aecf70ffad60c83074347d3d6ec23f73acbd1aee))
|
||||
|
||||
## [4.19.1](https://github.com/unraid/api/compare/v4.19.0...v4.19.1) (2025-09-05)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* custom path detection to fix setup issues ([#1664](https://github.com/unraid/api/issues/1664)) ([2ecdb99](https://github.com/unraid/api/commit/2ecdb99052f39d89af21bbe7ad3f80b83bb1eaa9))
|
||||
|
||||
## [4.19.0](https://github.com/unraid/api/compare/v4.18.2...v4.19.0) (2025-09-04)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* mount vue apps, not web components ([#1639](https://github.com/unraid/api/issues/1639)) ([88087d5](https://github.com/unraid/api/commit/88087d5201992298cdafa791d5d1b5bb23dcd72b))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* api version json response ([#1653](https://github.com/unraid/api/issues/1653)) ([292bc0f](https://github.com/unraid/api/commit/292bc0fc810a0d0f0cce6813b0631ff25099cc05))
|
||||
* enhance DOM validation and cleanup in vue-mount-app ([6cf7c88](https://github.com/unraid/api/commit/6cf7c88242f2f4fe9f83871560039767b5b90273))
|
||||
* enhance getKeyFile function to handle missing key file gracefully ([#1659](https://github.com/unraid/api/issues/1659)) ([728b38a](https://github.com/unraid/api/commit/728b38ac11faeacd39ce9d0157024ad140e29b36))
|
||||
* info alert docker icon ([#1661](https://github.com/unraid/api/issues/1661)) ([239cdd6](https://github.com/unraid/api/commit/239cdd6133690699348e61f68e485d2b54fdcbdb))
|
||||
* oidc cache busting issues fixed ([#1656](https://github.com/unraid/api/issues/1656)) ([e204eb8](https://github.com/unraid/api/commit/e204eb80a00ab9242e3dca4ccfc3e1b55a7694b7))
|
||||
* **plugin:** restore cleanup behavior for unsupported unraid versions ([#1658](https://github.com/unraid/api/issues/1658)) ([534a077](https://github.com/unraid/api/commit/534a07788b76de49e9ba14059a9aed0bf16e02ca))
|
||||
* UnraidToaster component and update dialog close button ([#1657](https://github.com/unraid/api/issues/1657)) ([44774d0](https://github.com/unraid/api/commit/44774d0acdd25aa33cb60a5d0b4f80777f4068e5))
|
||||
* vue mounting logic with tests ([#1651](https://github.com/unraid/api/issues/1651)) ([33774aa](https://github.com/unraid/api/commit/33774aa596124a031a7452b62ca4c43743a09951))
|
||||
|
||||
## [4.18.2](https://github.com/unraid/api/compare/v4.18.1...v4.18.2) (2025-09-03)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* add missing CPU guest metrics to CPU responses ([#1644](https://github.com/unraid/api/issues/1644)) ([99dbad5](https://github.com/unraid/api/commit/99dbad57d55a256f5f3f850f9a47a6eaa6348065))
|
||||
* **plugin:** raise minimum unraid os version to 6.12.15 ([#1649](https://github.com/unraid/api/issues/1649)) ([bc15bd3](https://github.com/unraid/api/commit/bc15bd3d7008acb416ac3c6fb1f4724c685ec7e7))
|
||||
* update GitHub Actions token for workflow trigger ([4d8588b](https://github.com/unraid/api/commit/4d8588b17331afa45ba8caf84fcec8c0ea03591f))
|
||||
* update OIDC URL validation and add tests ([#1646](https://github.com/unraid/api/issues/1646)) ([c7c3bb5](https://github.com/unraid/api/commit/c7c3bb57ea482633a7acff064b39fbc8d4e07213))
|
||||
* use shared bg & border color for styled toasts ([#1647](https://github.com/unraid/api/issues/1647)) ([7c3aee8](https://github.com/unraid/api/commit/7c3aee8f3f9ba82ae8c8ed3840c20ab47f3cb00f))
|
||||
|
||||
## [4.18.1](https://github.com/unraid/api/compare/v4.18.0...v4.18.1) (2025-09-03)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* OIDC and API Key management issues ([#1642](https://github.com/unraid/api/issues/1642)) ([0fe2c2c](https://github.com/unraid/api/commit/0fe2c2c1c85dcc547e4b1217a3b5636d7dd6d4b4))
|
||||
* rm redundant emission to `$HOME/.pm2/logs` ([#1640](https://github.com/unraid/api/issues/1640)) ([a8e4119](https://github.com/unraid/api/commit/a8e4119270868a1dabccd405853a7340f8dcd8a5))
|
||||
|
||||
## [4.18.0](https://github.com/unraid/api/compare/v4.17.0...v4.18.0) (2025-09-02)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **api:** enhance OIDC redirect URI handling in service and tests ([#1618](https://github.com/unraid/api/issues/1618)) ([4e945f5](https://github.com/unraid/api/commit/4e945f5f56ce059eb275a9576caf3194a5df8a90))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* api key creation cli ([#1637](https://github.com/unraid/api/issues/1637)) ([c147a6b](https://github.com/unraid/api/commit/c147a6b5075969e77798210c4a5cfd1fa5b96ae3))
|
||||
* **cli:** support `--log-level` for `start` and `restart` cmds ([#1623](https://github.com/unraid/api/issues/1623)) ([a1ee915](https://github.com/unraid/api/commit/a1ee915ca52e5a063eccf8facbada911a63f37f6))
|
||||
* confusing server -> status query ([#1635](https://github.com/unraid/api/issues/1635)) ([9d42b36](https://github.com/unraid/api/commit/9d42b36f74274cad72490da5152fdb98fdc5b89b))
|
||||
* use unraid css variables in sonner ([#1634](https://github.com/unraid/api/issues/1634)) ([26a95af](https://github.com/unraid/api/commit/26a95af9539d05a837112d62dc6b7dd46761c83f))
|
||||
|
||||
## [4.17.0](https://github.com/unraid/api/compare/v4.16.0...v4.17.0) (2025-08-27)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add tailwind class sort plugin ([#1562](https://github.com/unraid/api/issues/1562)) ([ab11e7f](https://github.com/unraid/api/commit/ab11e7ff7ff74da1f1cd5e49938459d00bfc846b))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* cleanup obsoleted legacy api keys on api startup (cli / connect) ([#1630](https://github.com/unraid/api/issues/1630)) ([6469d00](https://github.com/unraid/api/commit/6469d002b7b18e49c77ee650a4255974ab43e790))
|
||||
|
||||
## [4.16.0](https://github.com/unraid/api/compare/v4.15.1...v4.16.0) (2025-08-27)
|
||||
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"version": "4.15.1",
|
||||
"version": "4.22.1",
|
||||
"extraOrigins": [],
|
||||
"sandbox": true,
|
||||
"ssoSubIds": [],
|
||||
|
||||
@@ -17,5 +17,6 @@
|
||||
],
|
||||
"buttonText": "Login With Unraid.net"
|
||||
}
|
||||
]
|
||||
],
|
||||
"defaultAllowedOrigins": []
|
||||
}
|
||||
247
api/docs/developer/feature-flags.md
Normal file
247
api/docs/developer/feature-flags.md
Normal file
@@ -0,0 +1,247 @@
|
||||
# Feature Flags
|
||||
|
||||
Feature flags allow you to conditionally enable or disable functionality in the Unraid API. This is useful for gradually rolling out new features, A/B testing, or keeping experimental code behind flags during development.
|
||||
|
||||
## Setting Up Feature Flags
|
||||
|
||||
### 1. Define the Feature Flag
|
||||
|
||||
Feature flags are defined as environment variables and collected in `src/consts.ts`:
|
||||
|
||||
```typescript
|
||||
// src/environment.ts
|
||||
export const ENABLE_MY_NEW_FEATURE = process.env.ENABLE_MY_NEW_FEATURE === 'true';
|
||||
|
||||
// src/consts.ts
|
||||
export const FeatureFlags = Object.freeze({
|
||||
ENABLE_NEXT_DOCKER_RELEASE,
|
||||
ENABLE_MY_NEW_FEATURE, // Add your new flag here
|
||||
});
|
||||
```
|
||||
|
||||
### 2. Set the Environment Variable
|
||||
|
||||
Set the environment variable when running the API:
|
||||
|
||||
```bash
|
||||
ENABLE_MY_NEW_FEATURE=true unraid-api start
|
||||
```
|
||||
|
||||
Or add it to your `.env` file:
|
||||
|
||||
```env
|
||||
ENABLE_MY_NEW_FEATURE=true
|
||||
```
|
||||
|
||||
## Using Feature Flags in GraphQL
|
||||
|
||||
### Method 1: @UseFeatureFlag Decorator (Schema-Level)
|
||||
|
||||
The `@UseFeatureFlag` decorator conditionally includes or excludes GraphQL fields, queries, and mutations from the schema based on feature flags. When a feature flag is disabled, the field won't appear in the GraphQL schema at all.
|
||||
|
||||
```typescript
|
||||
import { UseFeatureFlag } from '@app/unraid-api/decorators/use-feature-flag.decorator.js';
|
||||
import { Query, Mutation, ResolveField } from '@nestjs/graphql';
|
||||
|
||||
@Resolver()
|
||||
export class MyResolver {
|
||||
|
||||
// Conditionally include a query
|
||||
@UseFeatureFlag('ENABLE_MY_NEW_FEATURE')
|
||||
@Query(() => String)
|
||||
async experimentalQuery() {
|
||||
return 'This query only exists when ENABLE_MY_NEW_FEATURE is true';
|
||||
}
|
||||
|
||||
// Conditionally include a mutation
|
||||
@UseFeatureFlag('ENABLE_MY_NEW_FEATURE')
|
||||
@Mutation(() => Boolean)
|
||||
async experimentalMutation() {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Conditionally include a field resolver
|
||||
@UseFeatureFlag('ENABLE_MY_NEW_FEATURE')
|
||||
@ResolveField(() => String)
|
||||
async experimentalField() {
|
||||
return 'This field only exists when the flag is enabled';
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Benefits:**
|
||||
- Clean schema - disabled features don't appear in GraphQL introspection
|
||||
- No runtime overhead for disabled features
|
||||
- Clear feature boundaries
|
||||
|
||||
**Use when:**
|
||||
- You want to completely hide features from the GraphQL schema
|
||||
- The feature is experimental or in beta
|
||||
- You're doing a gradual rollout
|
||||
|
||||
### Method 2: checkFeatureFlag Function (Runtime)
|
||||
|
||||
The `checkFeatureFlag` function provides runtime feature flag checking within resolver methods. It throws a `ForbiddenException` if the feature is disabled.
|
||||
|
||||
```typescript
|
||||
import { checkFeatureFlag } from '@app/unraid-api/utils/feature-flag.helper.js';
|
||||
import { FeatureFlags } from '@app/consts.js';
|
||||
import { Query, ResolveField } from '@nestjs/graphql';
|
||||
|
||||
@Resolver()
|
||||
export class MyResolver {
|
||||
|
||||
@Query(() => String)
|
||||
async myQuery(
|
||||
@Args('useNewAlgorithm', { nullable: true }) useNewAlgorithm?: boolean
|
||||
) {
|
||||
// Conditionally use new logic based on feature flag
|
||||
if (useNewAlgorithm) {
|
||||
checkFeatureFlag(FeatureFlags, 'ENABLE_MY_NEW_FEATURE');
|
||||
return this.newAlgorithm();
|
||||
}
|
||||
|
||||
return this.oldAlgorithm();
|
||||
}
|
||||
|
||||
@ResolveField(() => String)
|
||||
async dataField() {
|
||||
// Check flag at the start of the method
|
||||
checkFeatureFlag(FeatureFlags, 'ENABLE_MY_NEW_FEATURE');
|
||||
|
||||
// Feature-specific logic here
|
||||
return this.computeExperimentalData();
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Benefits:**
|
||||
- More granular control within methods
|
||||
- Can conditionally execute parts of a method
|
||||
- Useful for A/B testing scenarios
|
||||
- Good for gradual migration strategies
|
||||
|
||||
**Use when:**
|
||||
- You need conditional logic within a method
|
||||
- The field should exist but behavior changes based on the flag
|
||||
- You're migrating from old to new implementation gradually
|
||||
|
||||
## Feature Flag Patterns
|
||||
|
||||
### Pattern 1: Complete Feature Toggle
|
||||
|
||||
Hide an entire feature behind a flag:
|
||||
|
||||
```typescript
|
||||
@UseFeatureFlag('ENABLE_DOCKER_TEMPLATES')
|
||||
@Resolver(() => DockerTemplate)
|
||||
export class DockerTemplateResolver {
|
||||
// All resolvers in this class are toggled by the flag
|
||||
}
|
||||
```
|
||||
|
||||
### Pattern 2: Gradual Migration
|
||||
|
||||
Migrate from old to new implementation:
|
||||
|
||||
```typescript
|
||||
@Query(() => [Container])
|
||||
async getContainers(@Args('version') version?: string) {
|
||||
if (version === 'v2') {
|
||||
checkFeatureFlag(FeatureFlags, 'ENABLE_CONTAINERS_V2');
|
||||
return this.getContainersV2();
|
||||
}
|
||||
|
||||
return this.getContainersV1();
|
||||
}
|
||||
```
|
||||
|
||||
### Pattern 3: Beta Features
|
||||
|
||||
Mark features as beta:
|
||||
|
||||
```typescript
|
||||
@UseFeatureFlag('ENABLE_BETA_FEATURES')
|
||||
@ResolveField(() => BetaMetrics, {
|
||||
description: 'BETA: Advanced metrics (requires ENABLE_BETA_FEATURES flag)'
|
||||
})
|
||||
async betaMetrics() {
|
||||
return this.computeBetaMetrics();
|
||||
}
|
||||
```
|
||||
|
||||
### Pattern 4: Performance Optimizations
|
||||
|
||||
Toggle expensive operations:
|
||||
|
||||
```typescript
|
||||
@ResolveField(() => Statistics)
|
||||
async statistics() {
|
||||
const basicStats = await this.getBasicStats();
|
||||
|
||||
try {
|
||||
checkFeatureFlag(FeatureFlags, 'ENABLE_ADVANCED_ANALYTICS');
|
||||
const advancedStats = await this.getAdvancedStats();
|
||||
return { ...basicStats, ...advancedStats };
|
||||
} catch {
|
||||
// Feature disabled, return only basic stats
|
||||
return basicStats;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Testing with Feature Flags
|
||||
|
||||
When writing tests for feature-flagged code, create a mock to control feature flag values:
|
||||
|
||||
```typescript
|
||||
import { vi } from 'vitest';
|
||||
|
||||
// Mock the entire consts module
|
||||
vi.mock('@app/consts.js', async () => {
|
||||
const actual = await vi.importActual('@app/consts.js');
|
||||
return {
|
||||
...actual,
|
||||
FeatureFlags: {
|
||||
ENABLE_MY_NEW_FEATURE: true, // Set your test value
|
||||
ENABLE_NEXT_DOCKER_RELEASE: false,
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
describe('MyResolver', () => {
|
||||
it('should execute new logic when feature is enabled', async () => {
|
||||
// Test new behavior with mocked flag
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Naming Convention**: Use `ENABLE_` prefix for boolean feature flags
|
||||
2. **Environment Variables**: Always use uppercase with underscores
|
||||
3. **Documentation**: Document what each feature flag controls
|
||||
4. **Cleanup**: Remove feature flags once features are stable and fully rolled out
|
||||
5. **Default State**: New features should default to `false` (disabled)
|
||||
6. **Granularity**: Keep feature flags focused on a single feature or capability
|
||||
7. **Testing**: Always test both enabled and disabled states
|
||||
|
||||
## Common Use Cases
|
||||
|
||||
- **Experimental Features**: Hide unstable features in production
|
||||
- **Gradual Rollouts**: Enable features for specific environments first
|
||||
- **A/B Testing**: Toggle between different implementations
|
||||
- **Performance**: Disable expensive operations when not needed
|
||||
- **Breaking Changes**: Provide migration path with both old and new behavior
|
||||
- **Debug Features**: Enable additional logging or debugging tools
|
||||
|
||||
## Checking Active Feature Flags
|
||||
|
||||
To see which feature flags are currently active:
|
||||
|
||||
```typescript
|
||||
// Log all feature flags on startup
|
||||
console.log('Active Feature Flags:', FeatureFlags);
|
||||
```
|
||||
|
||||
Or check via GraphQL introspection to see which fields are available based on current flags.
|
||||
@@ -21,7 +21,14 @@ unraid-api start [--log-level <level>]
|
||||
Starts the Unraid API service.
|
||||
|
||||
Options:
|
||||
- `--log-level`: Set logging level (trace|debug|info|warn|error)
|
||||
|
||||
- `--log-level`: Set logging level (trace|debug|info|warn|error|fatal)
|
||||
|
||||
Alternative: You can also set the log level using the `LOG_LEVEL` environment variable:
|
||||
|
||||
```bash
|
||||
LOG_LEVEL=trace unraid-api start
|
||||
```
|
||||
|
||||
### Stop
|
||||
|
||||
@@ -36,11 +43,21 @@ Stops the Unraid API service.
|
||||
### Restart
|
||||
|
||||
```bash
|
||||
unraid-api restart
|
||||
unraid-api restart [--log-level <level>]
|
||||
```
|
||||
|
||||
Restarts the Unraid API service.
|
||||
|
||||
Options:
|
||||
|
||||
- `--log-level`: Set logging level (trace|debug|info|warn|error|fatal)
|
||||
|
||||
Alternative: You can also set the log level using the `LOG_LEVEL` environment variable:
|
||||
|
||||
```bash
|
||||
LOG_LEVEL=trace unraid-api restart
|
||||
```
|
||||
|
||||
### Logs
|
||||
|
||||
```bash
|
||||
|
||||
252
api/docs/public/programmatic-api-key-management.md
Normal file
252
api/docs/public/programmatic-api-key-management.md
Normal file
@@ -0,0 +1,252 @@
|
||||
---
|
||||
title: Programmatic API Key Management
|
||||
description: Create, use, and delete API keys programmatically for automated workflows
|
||||
sidebar_position: 4
|
||||
---
|
||||
|
||||
# Programmatic API Key Management
|
||||
|
||||
This guide explains how to create, use, and delete API keys programmatically using the Unraid API CLI, enabling automated workflows and scripts.
|
||||
|
||||
## Overview
|
||||
|
||||
The `unraid-api apikey` command supports both interactive and non-interactive modes, making it suitable for:
|
||||
|
||||
- Automated deployment scripts
|
||||
- CI/CD pipelines
|
||||
- Temporary access provisioning
|
||||
- Infrastructure as code workflows
|
||||
|
||||
:::tip[Quick Start]
|
||||
Jump to the [Complete Workflow Example](#complete-workflow-example) to see everything in action.
|
||||
:::
|
||||
|
||||
## Creating API Keys Programmatically
|
||||
|
||||
### Basic Creation with JSON Output
|
||||
|
||||
Use the `--json` flag to get machine-readable output:
|
||||
|
||||
```bash
|
||||
unraid-api apikey --create --name "workflow key" --roles ADMIN --json
|
||||
```
|
||||
|
||||
**Output:**
|
||||
|
||||
```json
|
||||
{
|
||||
"key": "your-generated-api-key-here",
|
||||
"name": "workflow key",
|
||||
"id": "generated-uuid"
|
||||
}
|
||||
```
|
||||
|
||||
### Advanced Creation with Permissions
|
||||
|
||||
```bash
|
||||
unraid-api apikey --create \
|
||||
--name "limited access key" \
|
||||
--permissions "DOCKER:READ_ANY,ARRAY:READ_ANY" \
|
||||
--description "Read-only access for monitoring" \
|
||||
--json
|
||||
```
|
||||
|
||||
### Handling Existing Keys
|
||||
|
||||
If a key with the same name exists, use `--overwrite`:
|
||||
|
||||
```bash
|
||||
unraid-api apikey --create --name "existing key" --roles ADMIN --overwrite --json
|
||||
```
|
||||
|
||||
:::warning[Key Replacement]
|
||||
The `--overwrite` flag will permanently replace the existing key. The old key will be immediately invalidated.
|
||||
:::
|
||||
|
||||
## Deleting API Keys Programmatically
|
||||
|
||||
### Non-Interactive Deletion
|
||||
|
||||
Delete a key by name without prompts:
|
||||
|
||||
```bash
|
||||
unraid-api apikey --delete --name "workflow key"
|
||||
```
|
||||
|
||||
**Output:**
|
||||
|
||||
```
|
||||
Successfully deleted 1 API key
|
||||
```
|
||||
|
||||
### JSON Output for Deletion
|
||||
|
||||
Use `--json` flag for machine-readable delete confirmation:
|
||||
|
||||
```bash
|
||||
unraid-api apikey --delete --name "workflow key" --json
|
||||
```
|
||||
|
||||
**Success Output:**
|
||||
|
||||
```json
|
||||
{
|
||||
"deleted": 1,
|
||||
"keys": [
|
||||
{
|
||||
"id": "generated-uuid",
|
||||
"name": "workflow key"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
**Error Output:**
|
||||
|
||||
```json
|
||||
{
|
||||
"deleted": 0,
|
||||
"error": "No API key found with name: nonexistent key"
|
||||
}
|
||||
```
|
||||
|
||||
### Error Handling
|
||||
|
||||
When the specified key doesn't exist:
|
||||
|
||||
```bash
|
||||
unraid-api apikey --delete --name "nonexistent key"
|
||||
# Output: No API keys found to delete
|
||||
```
|
||||
|
||||
**JSON Error Output:**
|
||||
|
||||
```json
|
||||
{
|
||||
"deleted": 0,
|
||||
"message": "No API keys found to delete"
|
||||
}
|
||||
```
|
||||
|
||||
## Complete Workflow Example
|
||||
|
||||
Here's a complete example for temporary access provisioning:
|
||||
|
||||
```bash
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# 1. Create temporary API key
|
||||
echo "Creating temporary API key..."
|
||||
KEY_DATA=$(unraid-api apikey --create \
|
||||
--name "temp deployment key" \
|
||||
--roles ADMIN \
|
||||
--description "Temporary key for deployment $(date)" \
|
||||
--json)
|
||||
|
||||
# 2. Extract the API key
|
||||
API_KEY=$(echo "$KEY_DATA" | jq -r '.key')
|
||||
echo "API key created successfully"
|
||||
|
||||
# 3. Use the key for operations
|
||||
echo "Configuring services..."
|
||||
curl -H "Authorization: Bearer $API_KEY" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{"provider": "azure", "clientId": "your-client-id"}' \
|
||||
http://localhost:3001/graphql
|
||||
|
||||
# 4. Clean up (always runs, even on error)
|
||||
trap 'echo "Cleaning up..."; unraid-api apikey --delete --name "temp deployment key"' EXIT
|
||||
|
||||
echo "Deployment completed successfully"
|
||||
```
|
||||
|
||||
## Command Reference
|
||||
|
||||
### Create Command Options
|
||||
|
||||
| Flag | Description | Example |
|
||||
| ----------------------- | ----------------------- | --------------------------------- |
|
||||
| `--name <name>` | Key name (required) | `--name "my key"` |
|
||||
| `--roles <roles>` | Comma-separated roles | `--roles ADMIN,VIEWER` |
|
||||
| `--permissions <perms>` | Resource:action pairs | `--permissions "DOCKER:READ_ANY"` |
|
||||
| `--description <desc>` | Key description | `--description "CI/CD key"` |
|
||||
| `--overwrite` | Replace existing key | `--overwrite` |
|
||||
| `--json` | Machine-readable output | `--json` |
|
||||
|
||||
### Available Roles
|
||||
|
||||
- `ADMIN` - Full system access
|
||||
- `CONNECT` - Unraid Connect features
|
||||
- `VIEWER` - Read-only access
|
||||
- `GUEST` - Limited access
|
||||
|
||||
### Available Resources and Actions
|
||||
|
||||
**Resources:** `ACTIVATION_CODE`, `API_KEY`, `ARRAY`, `CLOUD`, `CONFIG`, `CONNECT`, `CONNECT__REMOTE_ACCESS`, `CUSTOMIZATIONS`, `DASHBOARD`, `DISK`, `DISPLAY`, `DOCKER`, `FLASH`, `INFO`, `LOGS`, `ME`, `NETWORK`, `NOTIFICATIONS`, `ONLINE`, `OS`, `OWNER`, `PERMISSION`, `REGISTRATION`, `SERVERS`, `SERVICES`, `SHARE`, `VARS`, `VMS`, `WELCOME`
|
||||
|
||||
**Actions:** `CREATE_ANY`, `CREATE_OWN`, `READ_ANY`, `READ_OWN`, `UPDATE_ANY`, `UPDATE_OWN`, `DELETE_ANY`, `DELETE_OWN`
|
||||
|
||||
### Delete Command Options
|
||||
|
||||
| Flag | Description | Example |
|
||||
| --------------- | ------------------------ | ----------------- |
|
||||
| `--delete` | Enable delete mode | `--delete` |
|
||||
| `--name <name>` | Key to delete (optional) | `--name "my key"` |
|
||||
|
||||
**Note:** If `--name` is omitted, the command runs interactively.
|
||||
|
||||
## Best Practices
|
||||
|
||||
:::info[Security Best Practices]
|
||||
**Minimal Permissions**
|
||||
|
||||
- Use specific permissions instead of ADMIN role when possible
|
||||
- Example: `--permissions "DOCKER:READ_ANY"` instead of `--roles ADMIN`
|
||||
|
||||
**Key Lifecycle Management**
|
||||
|
||||
- Always clean up temporary keys after use
|
||||
- Store API keys securely (environment variables, secrets management)
|
||||
- Use descriptive names and descriptions for audit trails
|
||||
:::
|
||||
|
||||
### Error Handling
|
||||
|
||||
- Check exit codes (`$?`) after each command
|
||||
- Use `set -e` in bash scripts to fail fast
|
||||
- Implement proper cleanup with `trap`
|
||||
|
||||
### Key Naming
|
||||
|
||||
- Use descriptive names that include purpose and date
|
||||
- Names must contain only letters, numbers, and spaces
|
||||
- Unicode letters are supported
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
:::note[Common Error Messages]
|
||||
|
||||
**"API key name must contain only letters, numbers, and spaces"**
|
||||
|
||||
- **Solution:** Remove special characters like hyphens, underscores, or symbols
|
||||
|
||||
**"API key with name 'x' already exists"**
|
||||
|
||||
- **Solution:** Use `--overwrite` flag or choose a different name
|
||||
|
||||
**"Please add at least one role or permission to the key"**
|
||||
|
||||
- **Solution:** Specify either `--roles` or `--permissions` (or both)
|
||||
|
||||
:::
|
||||
|
||||
### Debug Mode
|
||||
|
||||
For troubleshooting, run with debug logging:
|
||||
|
||||
```bash
|
||||
LOG_LEVEL=debug unraid-api apikey --create --name "debug key" --roles ADMIN
|
||||
```
|
||||
@@ -13,7 +13,9 @@
|
||||
"watch": false,
|
||||
"interpreter": "/usr/local/bin/node",
|
||||
"ignore_watch": ["node_modules", "src", ".env.*", "myservers.cfg"],
|
||||
"log_file": "/var/log/graphql-api.log",
|
||||
"out_file": "/var/log/graphql-api.log",
|
||||
"error_file": "/var/log/graphql-api.log",
|
||||
"merge_logs": true,
|
||||
"kill_timeout": 10000
|
||||
}
|
||||
]
|
||||
|
||||
@@ -139,6 +139,9 @@ type ArrayDisk implements Node {
|
||||
"""ata | nvme | usb | (others)"""
|
||||
transport: String
|
||||
color: ArrayDiskFsColor
|
||||
|
||||
"""Whether the disk is currently spinning"""
|
||||
isSpinning: Boolean
|
||||
}
|
||||
|
||||
interface Node {
|
||||
@@ -346,6 +349,9 @@ type Disk implements Node {
|
||||
|
||||
"""The partitions on the disk"""
|
||||
partitions: [DiskPartition!]!
|
||||
|
||||
"""Whether the disk is spinning or not"""
|
||||
isSpinning: Boolean!
|
||||
}
|
||||
|
||||
"""The type of interface the disk uses to connect to the system"""
|
||||
@@ -1044,6 +1050,19 @@ enum ThemeName {
|
||||
white
|
||||
}
|
||||
|
||||
type ExplicitStatusItem {
|
||||
name: String!
|
||||
updateStatus: UpdateStatus!
|
||||
}
|
||||
|
||||
"""Update status of a container."""
|
||||
enum UpdateStatus {
|
||||
UP_TO_DATE
|
||||
UPDATE_AVAILABLE
|
||||
REBUILD_READY
|
||||
UNKNOWN
|
||||
}
|
||||
|
||||
type ContainerPort {
|
||||
ip: String
|
||||
privatePort: Port
|
||||
@@ -1083,6 +1102,8 @@ type DockerContainer implements Node {
|
||||
networkSettings: JSON
|
||||
mounts: [JSON!]
|
||||
autoStart: Boolean!
|
||||
isUpdateAvailable: Boolean
|
||||
isRebuildReady: Boolean
|
||||
}
|
||||
|
||||
enum ContainerState {
|
||||
@@ -1113,6 +1134,7 @@ type Docker implements Node {
|
||||
containers(skipCache: Boolean! = false): [DockerContainer!]!
|
||||
networks(skipCache: Boolean! = false): [DockerNetwork!]!
|
||||
organizer: ResolvedOrganizerV1!
|
||||
containerUpdateStatuses: [ExplicitStatusItem!]!
|
||||
}
|
||||
|
||||
type ResolvedOrganizerView {
|
||||
@@ -1361,6 +1383,12 @@ type CpuLoad {
|
||||
|
||||
"""The percentage of time the CPU spent servicing hardware interrupts."""
|
||||
percentIrq: Float!
|
||||
|
||||
"""The percentage of time the CPU spent running virtual machines (guest)."""
|
||||
percentGuest: Float!
|
||||
|
||||
"""The percentage of CPU time stolen by the hypervisor."""
|
||||
percentSteal: Float!
|
||||
}
|
||||
|
||||
type CpuUtilization implements Node {
|
||||
@@ -1798,6 +1826,8 @@ type Server implements Node {
|
||||
guid: String!
|
||||
apikey: String!
|
||||
name: String!
|
||||
|
||||
"""Whether this server is online or offline"""
|
||||
status: ServerStatus!
|
||||
wanip: String!
|
||||
lanip: String!
|
||||
@@ -1854,7 +1884,7 @@ type OidcProvider {
|
||||
"""
|
||||
OIDC issuer URL (e.g., https://accounts.google.com). Required for auto-discovery via /.well-known/openid-configuration
|
||||
"""
|
||||
issuer: String!
|
||||
issuer: String
|
||||
|
||||
"""
|
||||
OAuth2 authorization endpoint URL. If omitted, will be auto-discovered from issuer/.well-known/openid-configuration
|
||||
@@ -1907,6 +1937,16 @@ enum AuthorizationRuleMode {
|
||||
AND
|
||||
}
|
||||
|
||||
type OidcConfiguration {
|
||||
"""List of configured OIDC providers"""
|
||||
providers: [OidcProvider!]!
|
||||
|
||||
"""
|
||||
Default allowed redirect origins that apply to all OIDC providers (e.g., Tailscale domains)
|
||||
"""
|
||||
defaultAllowedOrigins: [String!]
|
||||
}
|
||||
|
||||
type OidcSessionValidation {
|
||||
valid: Boolean!
|
||||
username: String
|
||||
@@ -2307,8 +2347,6 @@ type Query {
|
||||
getApiKeyCreationFormSchema: ApiKeyFormSettings!
|
||||
config: Config!
|
||||
flash: Flash!
|
||||
logFiles: [LogFile!]!
|
||||
logFile(path: String!, lines: Int, startLine: Int): LogFileContent!
|
||||
me: UserAccount!
|
||||
|
||||
"""Get all notifications"""
|
||||
@@ -2335,6 +2373,8 @@ type Query {
|
||||
disk(id: PrefixedID!): Disk!
|
||||
rclone: RCloneBackupSettings!
|
||||
info: Info!
|
||||
logFiles: [LogFile!]!
|
||||
logFile(path: String!, lines: Int, startLine: Int): LogFileContent!
|
||||
settings: Settings!
|
||||
isSSOEnabled: Boolean!
|
||||
|
||||
@@ -2347,6 +2387,9 @@ type Query {
|
||||
"""Get a specific OIDC provider by ID"""
|
||||
oidcProvider(id: PrefixedID!): OidcProvider
|
||||
|
||||
"""Get the full OIDC configuration (admin only)"""
|
||||
oidcConfiguration: OidcConfiguration!
|
||||
|
||||
"""Validate an OIDC session token (internal use for CLI validation)"""
|
||||
validateOidcSession(token: String!): OidcSessionValidation!
|
||||
metrics: Metrics!
|
||||
@@ -2392,6 +2435,7 @@ type Mutation {
|
||||
setDockerFolderChildren(folderId: String, childrenIds: [String!]!): ResolvedOrganizerV1!
|
||||
deleteDockerEntries(entryIds: [String!]!): ResolvedOrganizerV1!
|
||||
moveDockerEntriesToFolder(sourceEntryIds: [String!]!, destinationFolderId: String!): ResolvedOrganizerV1!
|
||||
refreshDockerDigests: Boolean!
|
||||
|
||||
"""Initiates a flash drive backup using a configured remote."""
|
||||
initiateFlashBackup(input: InitiateFlashBackupInput!): FlashBackupStatus!
|
||||
@@ -2590,13 +2634,13 @@ input AccessUrlInput {
|
||||
}
|
||||
|
||||
type Subscription {
|
||||
logFile(path: String!): LogFileContent!
|
||||
notificationAdded: Notification!
|
||||
notificationsOverview: NotificationOverview!
|
||||
ownerSubscription: Owner!
|
||||
serversSubscription: Server!
|
||||
parityHistorySubscription: ParityCheck!
|
||||
arraySubscription: UnraidArray!
|
||||
logFile(path: String!): LogFileContent!
|
||||
systemMetricsCpu: CpuUtilization!
|
||||
systemMetricsMemory: MemoryUtilization!
|
||||
upsUpdates: UPSDevice!
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@unraid/api",
|
||||
"version": "4.16.0",
|
||||
"version": "4.22.2",
|
||||
"main": "src/cli/index.ts",
|
||||
"type": "module",
|
||||
"corepack": {
|
||||
@@ -56,7 +56,7 @@
|
||||
"@as-integrations/fastify": "2.1.1",
|
||||
"@fastify/cookie": "11.0.2",
|
||||
"@fastify/helmet": "13.0.1",
|
||||
"@graphql-codegen/client-preset": "4.8.3",
|
||||
"@graphql-codegen/client-preset": "5.0.0",
|
||||
"@graphql-tools/load-files": "7.0.1",
|
||||
"@graphql-tools/merge": "9.1.1",
|
||||
"@graphql-tools/schema": "10.0.25",
|
||||
@@ -84,7 +84,7 @@
|
||||
"bytes": "3.1.2",
|
||||
"cache-manager": "7.2.0",
|
||||
"cacheable-lookup": "7.0.0",
|
||||
"camelcase-keys": "9.1.3",
|
||||
"camelcase-keys": "10.0.0",
|
||||
"casbin": "5.38.0",
|
||||
"change-case": "5.4.4",
|
||||
"chokidar": "4.0.3",
|
||||
@@ -94,15 +94,16 @@
|
||||
"command-exists": "1.2.9",
|
||||
"convert": "5.12.0",
|
||||
"cookie": "1.0.2",
|
||||
"cron": "4.3.3",
|
||||
"cron": "4.3.0",
|
||||
"cross-fetch": "4.1.0",
|
||||
"diff": "8.0.2",
|
||||
"dockerode": "4.0.7",
|
||||
"dotenv": "17.2.1",
|
||||
"escape-html": "1.0.3",
|
||||
"execa": "9.6.0",
|
||||
"exit-hook": "4.0.0",
|
||||
"fastify": "5.5.0",
|
||||
"filenamify": "6.0.0",
|
||||
"filenamify": "7.0.0",
|
||||
"fs-extra": "11.3.1",
|
||||
"glob": "11.0.3",
|
||||
"global-agent": "3.0.0",
|
||||
@@ -126,7 +127,7 @@
|
||||
"node-cache": "5.1.2",
|
||||
"node-window-polyfill": "1.0.4",
|
||||
"openid-client": "6.6.4",
|
||||
"p-retry": "6.2.1",
|
||||
"p-retry": "7.0.0",
|
||||
"passport-custom": "1.1.1",
|
||||
"passport-http-header-strategy": "1.1.0",
|
||||
"path-type": "6.0.0",
|
||||
@@ -140,7 +141,7 @@
|
||||
"strftime": "0.10.3",
|
||||
"systeminformation": "5.27.8",
|
||||
"undici": "7.15.0",
|
||||
"uuid": "11.1.0",
|
||||
"uuid": "13.0.0",
|
||||
"ws": "8.18.3",
|
||||
"zen-observable-ts": "1.1.0",
|
||||
"zod": "3.25.76"
|
||||
@@ -155,14 +156,14 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/js": "9.34.0",
|
||||
"@graphql-codegen/add": "5.0.3",
|
||||
"@graphql-codegen/cli": "5.0.7",
|
||||
"@graphql-codegen/fragment-matcher": "5.1.0",
|
||||
"@graphql-codegen/add": "6.0.0",
|
||||
"@graphql-codegen/cli": "6.0.0",
|
||||
"@graphql-codegen/fragment-matcher": "6.0.0",
|
||||
"@graphql-codegen/import-types-preset": "3.0.1",
|
||||
"@graphql-codegen/typed-document-node": "5.1.2",
|
||||
"@graphql-codegen/typescript": "4.1.6",
|
||||
"@graphql-codegen/typescript-operations": "4.6.1",
|
||||
"@graphql-codegen/typescript-resolvers": "4.5.1",
|
||||
"@graphql-codegen/typed-document-node": "6.0.0",
|
||||
"@graphql-codegen/typescript": "5.0.0",
|
||||
"@graphql-codegen/typescript-operations": "5.0.0",
|
||||
"@graphql-codegen/typescript-resolvers": "5.0.0",
|
||||
"@graphql-typed-document-node/core": "3.2.0",
|
||||
"@ianvs/prettier-plugin-sort-imports": "4.6.3",
|
||||
"@nestjs/testing": "11.1.6",
|
||||
@@ -204,7 +205,7 @@
|
||||
"rollup-plugin-node-externals": "8.1.0",
|
||||
"supertest": "7.1.4",
|
||||
"tsx": "4.20.5",
|
||||
"type-fest": "4.41.0",
|
||||
"type-fest": "5.0.0",
|
||||
"typescript": "5.9.2",
|
||||
"typescript-eslint": "8.41.0",
|
||||
"unplugin-swc": "1.5.7",
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
import { expect, test } from 'vitest';
|
||||
import { expect, test, vi } from 'vitest';
|
||||
|
||||
import { store } from '@app/store/index.js';
|
||||
import { FileLoadStatus, StateFileKey } from '@app/store/types.js';
|
||||
|
||||
import '@app/core/utils/misc/get-key-file.js';
|
||||
import '@app/store/modules/emhttp.js';
|
||||
|
||||
vi.mock('fs/promises');
|
||||
|
||||
test('Before loading key returns null', async () => {
|
||||
const { getKeyFile } = await import('@app/core/utils/misc/get-key-file.js');
|
||||
const { status } = store.getState().registration;
|
||||
@@ -48,21 +49,70 @@ test('Returns empty key if key location is empty', async () => {
|
||||
await expect(getKeyFile()).resolves.toBe('');
|
||||
});
|
||||
|
||||
test(
|
||||
'Returns decoded key file if key location exists',
|
||||
async () => {
|
||||
const { getKeyFile } = await import('@app/core/utils/misc/get-key-file.js');
|
||||
const { loadStateFiles } = await import('@app/store/modules/emhttp.js');
|
||||
const { loadRegistrationKey } = await import('@app/store/modules/registration.js');
|
||||
// Load state files into store
|
||||
await store.dispatch(loadStateFiles());
|
||||
await store.dispatch(loadRegistrationKey());
|
||||
// Check if store has state files loaded
|
||||
const { status } = store.getState().registration;
|
||||
expect(status).toBe(FileLoadStatus.LOADED);
|
||||
await expect(getKeyFile()).resolves.toMatchInlineSnapshot(
|
||||
'"hVs1tLjvC9FiiQsIwIQ7G1KszAcexf0IneThhnmf22SB0dGs5WzRkqMiSMmt2DtR5HOXFUD32YyxuzGeUXmky3zKpSu6xhZNKVg5atGM1OfvkzHBMldI3SeBLuUFSgejLbpNUMdTrbk64JJdbzle4O8wiQgkIpAMIGxeYLwLBD4zHBcfyzq40QnxG--HcX6j25eE0xqa2zWj-j0b0rCAXahJV2a3ySCbPzr1MvfPRTVb0rr7KJ-25R592hYrz4H7Sc1B3p0lr6QUxHE6o7bcYrWKDRtIVoZ8SMPpd1_0gzYIcl5GsDFzFumTXUh8NEnl0Q8hwW1YE-tRc6Y_rrvd7w"'
|
||||
);
|
||||
},
|
||||
{ timeout: 10000 }
|
||||
);
|
||||
test('Returns empty string when key file does not exist (ENOENT)', async () => {
|
||||
const { readFile } = await import('fs/promises');
|
||||
|
||||
// Mock readFile to throw ENOENT error
|
||||
const readFileMock = vi.mocked(readFile);
|
||||
readFileMock.mockRejectedValueOnce(
|
||||
Object.assign(new Error('ENOENT: no such file or directory'), { code: 'ENOENT' })
|
||||
);
|
||||
|
||||
// Clear the module cache and re-import to get fresh module with mock
|
||||
vi.resetModules();
|
||||
const { getKeyFile } = await import('@app/core/utils/misc/get-key-file.js');
|
||||
const { updateEmhttpState } = await import('@app/store/modules/emhttp.js');
|
||||
const { store: freshStore } = await import('@app/store/index.js');
|
||||
|
||||
// Set key file location to a non-existent file
|
||||
freshStore.dispatch(
|
||||
updateEmhttpState({
|
||||
field: StateFileKey.var,
|
||||
state: {
|
||||
regFile: '/boot/config/Pro.key',
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
// Should return empty string when file doesn't exist
|
||||
await expect(getKeyFile()).resolves.toBe('');
|
||||
|
||||
// Clear mock
|
||||
readFileMock.mockReset();
|
||||
vi.resetModules();
|
||||
});
|
||||
|
||||
test('Returns decoded key file if key location exists', async () => {
|
||||
const { readFile } = await import('fs/promises');
|
||||
|
||||
// Mock a valid key file content
|
||||
const mockKeyContent =
|
||||
'hVs1tLjvC9FiiQsIwIQ7G1KszAcexf0IneThhnmf22SB0dGs5WzRkqMiSMmt2DtR5HOXFUD32YyxuzGeUXmky3zKpSu6xhZNKVg5atGM1OfvkzHBMldI3SeBLuUFSgejLbpNUMdTrbk64JJdbzle4O8wiQgkIpAMIGxeYLwLBD4zHBcfyzq40QnxG--HcX6j25eE0xqa2zWj-j0b0rCAXahJV2a3ySCbPzr1MvfPRTVb0rr7KJ-25R592hYrz4H7Sc1B3p0lr6QUxHE6o7bcYrWKDRtIVoZ8SMPpd1_0gzYIcl5GsDFzFumTXUh8NEnl0Q8hwW1YE-tRc6Y_rrvd7w==';
|
||||
const binaryContent = Buffer.from(mockKeyContent, 'base64').toString('binary');
|
||||
|
||||
const readFileMock = vi.mocked(readFile);
|
||||
readFileMock.mockResolvedValue(binaryContent);
|
||||
|
||||
// Clear the module cache and re-import to get fresh module with mock
|
||||
vi.resetModules();
|
||||
const { getKeyFile } = await import('@app/core/utils/misc/get-key-file.js');
|
||||
const { loadStateFiles } = await import('@app/store/modules/emhttp.js');
|
||||
const { loadRegistrationKey } = await import('@app/store/modules/registration.js');
|
||||
const { store: freshStore } = await import('@app/store/index.js');
|
||||
|
||||
// Load state files into store
|
||||
await freshStore.dispatch(loadStateFiles());
|
||||
await freshStore.dispatch(loadRegistrationKey());
|
||||
// Check if store has state files loaded
|
||||
const { status } = freshStore.getState().registration;
|
||||
expect(status).toBe(FileLoadStatus.LOADED);
|
||||
|
||||
const result = await getKeyFile();
|
||||
expect(result).toBe(
|
||||
'hVs1tLjvC9FiiQsIwIQ7G1KszAcexf0IneThhnmf22SB0dGs5WzRkqMiSMmt2DtR5HOXFUD32YyxuzGeUXmky3zKpSu6xhZNKVg5atGM1OfvkzHBMldI3SeBLuUFSgejLbpNUMdTrbk64JJdbzle4O8wiQgkIpAMIGxeYLwLBD4zHBcfyzq40QnxG--HcX6j25eE0xqa2zWj-j0b0rCAXahJV2a3ySCbPzr1MvfPRTVb0rr7KJ-25R592hYrz4H7Sc1B3p0lr6QUxHE6o7bcYrWKDRtIVoZ8SMPpd1_0gzYIcl5GsDFzFumTXUh8NEnl0Q8hwW1YE-tRc6Y_rrvd7w'
|
||||
);
|
||||
|
||||
// Clear mock
|
||||
readFileMock.mockReset();
|
||||
vi.resetModules();
|
||||
}, 10000);
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import { existsSync } from 'node:fs';
|
||||
import { homedir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
|
||||
import { execa } from 'execa';
|
||||
import pm2 from 'pm2';
|
||||
import { afterAll, afterEach, beforeAll, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
import { afterAll, afterEach, beforeAll, describe, expect, it } from 'vitest';
|
||||
|
||||
import { isUnraidApiRunning } from '@app/core/utils/pm2/unraid-api-running.js';
|
||||
|
||||
@@ -17,11 +18,6 @@ const TEST_PROCESS_NAME = 'test-unraid-api';
|
||||
// Shared PM2 connection state
|
||||
let pm2Connected = false;
|
||||
|
||||
// Helper function to run CLI command (assumes CLI is built)
|
||||
async function runCliCommand(command: string, options: any = {}) {
|
||||
return await execa('node', [CLI_PATH, command], options);
|
||||
}
|
||||
|
||||
// Helper to ensure PM2 connection is established
|
||||
async function ensurePM2Connection() {
|
||||
if (pm2Connected) return;
|
||||
@@ -57,7 +53,7 @@ async function deleteTestProcesses() {
|
||||
}
|
||||
|
||||
const processName = processNames[deletedCount];
|
||||
pm2.delete(processName, (deleteErr) => {
|
||||
pm2.delete(processName, () => {
|
||||
// Ignore errors, process might not exist
|
||||
deletedCount++;
|
||||
deleteNext();
|
||||
@@ -92,7 +88,7 @@ async function cleanupAllPM2Processes() {
|
||||
}
|
||||
|
||||
// Kill the daemon to ensure fresh state
|
||||
pm2.killDaemon((killErr) => {
|
||||
pm2.killDaemon(() => {
|
||||
pm2.disconnect();
|
||||
pm2Connected = false;
|
||||
// Small delay to let PM2 fully shutdown
|
||||
@@ -104,6 +100,9 @@ async function cleanupAllPM2Processes() {
|
||||
|
||||
describe.skipIf(!!process.env.CI)('PM2 integration tests', () => {
|
||||
beforeAll(async () => {
|
||||
// Set PM2_HOME to use home directory for testing (not /var/log)
|
||||
process.env.PM2_HOME = join(homedir(), '.pm2');
|
||||
|
||||
// Build the CLI if it doesn't exist (only for CLI tests)
|
||||
if (!existsSync(CLI_PATH)) {
|
||||
console.log('Building CLI for integration tests...');
|
||||
@@ -198,6 +197,13 @@ describe.skipIf(!!process.env.CI)('PM2 integration tests', () => {
|
||||
}, 30000);
|
||||
|
||||
it('should handle PM2 connection errors gracefully', async () => {
|
||||
// Disconnect PM2 first to ensure we're testing fresh connection
|
||||
await new Promise<void>((resolve) => {
|
||||
pm2.disconnect();
|
||||
pm2Connected = false;
|
||||
setTimeout(resolve, 100);
|
||||
});
|
||||
|
||||
// Set an invalid PM2_HOME to force connection failure
|
||||
const originalPM2Home = process.env.PM2_HOME;
|
||||
process.env.PM2_HOME = '/invalid/path/that/does/not/exist';
|
||||
|
||||
@@ -12,7 +12,22 @@ import {
|
||||
UpdateRCloneRemoteDto,
|
||||
} from '@app/unraid-api/graph/resolvers/rclone/rclone.model.js';
|
||||
|
||||
vi.mock('got');
|
||||
vi.mock('got', () => {
|
||||
const mockPost = vi.fn();
|
||||
const gotMock = {
|
||||
post: mockPost,
|
||||
};
|
||||
return {
|
||||
default: gotMock,
|
||||
HTTPError: class HTTPError extends Error {
|
||||
response?: any;
|
||||
constructor(response?: any) {
|
||||
super('HTTP Error');
|
||||
this.response = response;
|
||||
}
|
||||
},
|
||||
};
|
||||
});
|
||||
vi.mock('execa');
|
||||
vi.mock('p-retry');
|
||||
vi.mock('node:fs', () => ({
|
||||
@@ -60,7 +75,7 @@ vi.mock('@nestjs/common', async (importOriginal) => {
|
||||
|
||||
describe('RCloneApiService', () => {
|
||||
let service: RCloneApiService;
|
||||
let mockGot: any;
|
||||
let mockGotPost: any;
|
||||
let mockExeca: any;
|
||||
let mockPRetry: any;
|
||||
let mockExistsSync: any;
|
||||
@@ -68,19 +83,19 @@ describe('RCloneApiService', () => {
|
||||
beforeEach(async () => {
|
||||
vi.clearAllMocks();
|
||||
|
||||
const { default: got } = await import('got');
|
||||
const got = await import('got');
|
||||
const { execa } = await import('execa');
|
||||
const pRetry = await import('p-retry');
|
||||
const { existsSync } = await import('node:fs');
|
||||
const { fileExists } = await import('@app/core/utils/files/file-exists.js');
|
||||
|
||||
mockGot = vi.mocked(got);
|
||||
mockGotPost = vi.mocked(got.default.post);
|
||||
mockExeca = vi.mocked(execa);
|
||||
mockPRetry = vi.mocked(pRetry.default);
|
||||
mockExistsSync = vi.mocked(existsSync);
|
||||
|
||||
// Mock successful RClone API response for socket check
|
||||
mockGot.post = vi.fn().mockResolvedValue({ body: { pid: 12345 } });
|
||||
mockGotPost.mockResolvedValue({ body: { pid: 12345 } });
|
||||
|
||||
// Mock RClone binary exists check
|
||||
vi.mocked(fileExists).mockResolvedValue(true);
|
||||
@@ -97,10 +112,10 @@ describe('RCloneApiService', () => {
|
||||
mockPRetry.mockResolvedValue(undefined);
|
||||
|
||||
service = new RCloneApiService();
|
||||
await service.onModuleInit();
|
||||
await service.onApplicationBootstrap();
|
||||
|
||||
// Reset the mock after initialization to prepare for test-specific responses
|
||||
mockGot.post.mockClear();
|
||||
mockGotPost.mockClear();
|
||||
});
|
||||
|
||||
describe('getProviders', () => {
|
||||
@@ -109,15 +124,15 @@ describe('RCloneApiService', () => {
|
||||
{ name: 'aws', prefix: 's3', description: 'Amazon S3' },
|
||||
{ name: 'google', prefix: 'drive', description: 'Google Drive' },
|
||||
];
|
||||
mockGot.post.mockResolvedValue({
|
||||
mockGotPost.mockResolvedValue({
|
||||
body: { providers: mockProviders },
|
||||
});
|
||||
|
||||
const result = await service.getProviders();
|
||||
|
||||
expect(result).toEqual(mockProviders);
|
||||
expect(mockGot.post).toHaveBeenCalledWith(
|
||||
'http://unix:/tmp/rclone.sock:/config/providers',
|
||||
expect(mockGotPost).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/config\/providers$/),
|
||||
expect.objectContaining({
|
||||
json: {},
|
||||
responseType: 'json',
|
||||
@@ -130,7 +145,7 @@ describe('RCloneApiService', () => {
|
||||
});
|
||||
|
||||
it('should return empty array when no providers', async () => {
|
||||
mockGot.post.mockResolvedValue({ body: {} });
|
||||
mockGotPost.mockResolvedValue({ body: {} });
|
||||
|
||||
const result = await service.getProviders();
|
||||
|
||||
@@ -141,15 +156,15 @@ describe('RCloneApiService', () => {
|
||||
describe('listRemotes', () => {
|
||||
it('should return list of remotes', async () => {
|
||||
const mockRemotes = ['backup-s3', 'drive-storage'];
|
||||
mockGot.post.mockResolvedValue({
|
||||
mockGotPost.mockResolvedValue({
|
||||
body: { remotes: mockRemotes },
|
||||
});
|
||||
|
||||
const result = await service.listRemotes();
|
||||
|
||||
expect(result).toEqual(mockRemotes);
|
||||
expect(mockGot.post).toHaveBeenCalledWith(
|
||||
'http://unix:/tmp/rclone.sock:/config/listremotes',
|
||||
expect(mockGotPost).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/config\/listremotes$/),
|
||||
expect.objectContaining({
|
||||
json: {},
|
||||
responseType: 'json',
|
||||
@@ -162,7 +177,7 @@ describe('RCloneApiService', () => {
|
||||
});
|
||||
|
||||
it('should return empty array when no remotes', async () => {
|
||||
mockGot.post.mockResolvedValue({ body: {} });
|
||||
mockGotPost.mockResolvedValue({ body: {} });
|
||||
|
||||
const result = await service.listRemotes();
|
||||
|
||||
@@ -174,13 +189,13 @@ describe('RCloneApiService', () => {
|
||||
it('should return remote details', async () => {
|
||||
const input: GetRCloneRemoteDetailsDto = { name: 'test-remote' };
|
||||
const mockConfig = { type: 's3', provider: 'AWS' };
|
||||
mockGot.post.mockResolvedValue({ body: mockConfig });
|
||||
mockGotPost.mockResolvedValue({ body: mockConfig });
|
||||
|
||||
const result = await service.getRemoteDetails(input);
|
||||
|
||||
expect(result).toEqual(mockConfig);
|
||||
expect(mockGot.post).toHaveBeenCalledWith(
|
||||
'http://unix:/tmp/rclone.sock:/config/get',
|
||||
expect(mockGotPost).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/config\/get$/),
|
||||
expect.objectContaining({
|
||||
json: { name: 'test-remote' },
|
||||
responseType: 'json',
|
||||
@@ -197,7 +212,7 @@ describe('RCloneApiService', () => {
|
||||
it('should return remote configuration', async () => {
|
||||
const input: GetRCloneRemoteConfigDto = { name: 'test-remote' };
|
||||
const mockConfig = { type: 's3', access_key_id: 'AKIA...' };
|
||||
mockGot.post.mockResolvedValue({ body: mockConfig });
|
||||
mockGotPost.mockResolvedValue({ body: mockConfig });
|
||||
|
||||
const result = await service.getRemoteConfig(input);
|
||||
|
||||
@@ -213,13 +228,13 @@ describe('RCloneApiService', () => {
|
||||
parameters: { access_key_id: 'AKIA...', secret_access_key: 'secret' },
|
||||
};
|
||||
const mockResponse = { success: true };
|
||||
mockGot.post.mockResolvedValue({ body: mockResponse });
|
||||
mockGotPost.mockResolvedValue({ body: mockResponse });
|
||||
|
||||
const result = await service.createRemote(input);
|
||||
|
||||
expect(result).toEqual(mockResponse);
|
||||
expect(mockGot.post).toHaveBeenCalledWith(
|
||||
'http://unix:/tmp/rclone.sock:/config/create',
|
||||
expect(mockGotPost).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/config\/create$/),
|
||||
expect.objectContaining({
|
||||
json: {
|
||||
name: 'new-remote',
|
||||
@@ -243,13 +258,13 @@ describe('RCloneApiService', () => {
|
||||
parameters: { access_key_id: 'NEW_AKIA...' },
|
||||
};
|
||||
const mockResponse = { success: true };
|
||||
mockGot.post.mockResolvedValue({ body: mockResponse });
|
||||
mockGotPost.mockResolvedValue({ body: mockResponse });
|
||||
|
||||
const result = await service.updateRemote(input);
|
||||
|
||||
expect(result).toEqual(mockResponse);
|
||||
expect(mockGot.post).toHaveBeenCalledWith(
|
||||
'http://unix:/tmp/rclone.sock:/config/update',
|
||||
expect(mockGotPost).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/config\/update$/),
|
||||
expect.objectContaining({
|
||||
json: {
|
||||
name: 'existing-remote',
|
||||
@@ -269,13 +284,13 @@ describe('RCloneApiService', () => {
|
||||
it('should delete a remote', async () => {
|
||||
const input: DeleteRCloneRemoteDto = { name: 'remote-to-delete' };
|
||||
const mockResponse = { success: true };
|
||||
mockGot.post.mockResolvedValue({ body: mockResponse });
|
||||
mockGotPost.mockResolvedValue({ body: mockResponse });
|
||||
|
||||
const result = await service.deleteRemote(input);
|
||||
|
||||
expect(result).toEqual(mockResponse);
|
||||
expect(mockGot.post).toHaveBeenCalledWith(
|
||||
'http://unix:/tmp/rclone.sock:/config/delete',
|
||||
expect(mockGotPost).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/config\/delete$/),
|
||||
expect.objectContaining({
|
||||
json: { name: 'remote-to-delete' },
|
||||
responseType: 'json',
|
||||
@@ -296,13 +311,13 @@ describe('RCloneApiService', () => {
|
||||
options: { delete_on: 'dst' },
|
||||
};
|
||||
const mockResponse = { jobid: 'job-123' };
|
||||
mockGot.post.mockResolvedValue({ body: mockResponse });
|
||||
mockGotPost.mockResolvedValue({ body: mockResponse });
|
||||
|
||||
const result = await service.startBackup(input);
|
||||
|
||||
expect(result).toEqual(mockResponse);
|
||||
expect(mockGot.post).toHaveBeenCalledWith(
|
||||
'http://unix:/tmp/rclone.sock:/sync/copy',
|
||||
expect(mockGotPost).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/sync\/copy$/),
|
||||
expect.objectContaining({
|
||||
json: {
|
||||
srcFs: '/source/path',
|
||||
@@ -323,13 +338,13 @@ describe('RCloneApiService', () => {
|
||||
it('should return job status', async () => {
|
||||
const input: GetRCloneJobStatusDto = { jobId: 'job-123' };
|
||||
const mockStatus = { status: 'running', progress: 0.5 };
|
||||
mockGot.post.mockResolvedValue({ body: mockStatus });
|
||||
mockGotPost.mockResolvedValue({ body: mockStatus });
|
||||
|
||||
const result = await service.getJobStatus(input);
|
||||
|
||||
expect(result).toEqual(mockStatus);
|
||||
expect(mockGot.post).toHaveBeenCalledWith(
|
||||
'http://unix:/tmp/rclone.sock:/job/status',
|
||||
expect(mockGotPost).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/job\/status$/),
|
||||
expect.objectContaining({
|
||||
json: { jobid: 'job-123' },
|
||||
responseType: 'json',
|
||||
@@ -348,13 +363,13 @@ describe('RCloneApiService', () => {
|
||||
{ id: 'job-1', status: 'running' },
|
||||
{ id: 'job-2', status: 'finished' },
|
||||
];
|
||||
mockGot.post.mockResolvedValue({ body: mockJobs });
|
||||
mockGotPost.mockResolvedValue({ body: mockJobs });
|
||||
|
||||
const result = await service.listRunningJobs();
|
||||
|
||||
expect(result).toEqual(mockJobs);
|
||||
expect(mockGot.post).toHaveBeenCalledWith(
|
||||
'http://unix:/tmp/rclone.sock:/job/list',
|
||||
expect(mockGotPost).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/job\/list$/),
|
||||
expect.objectContaining({
|
||||
json: {},
|
||||
responseType: 'json',
|
||||
@@ -378,7 +393,7 @@ describe('RCloneApiService', () => {
|
||||
},
|
||||
};
|
||||
Object.setPrototypeOf(httpError, HTTPError.prototype);
|
||||
mockGot.post.mockRejectedValue(httpError);
|
||||
mockGotPost.mockRejectedValue(httpError);
|
||||
|
||||
await expect(service.getProviders()).rejects.toThrow(
|
||||
'Rclone API Error (config/providers, HTTP 500): Rclone Error: Internal server error'
|
||||
@@ -395,7 +410,7 @@ describe('RCloneApiService', () => {
|
||||
},
|
||||
};
|
||||
Object.setPrototypeOf(httpError, HTTPError.prototype);
|
||||
mockGot.post.mockRejectedValue(httpError);
|
||||
mockGotPost.mockRejectedValue(httpError);
|
||||
|
||||
await expect(service.getProviders()).rejects.toThrow(
|
||||
'Rclone API Error (config/providers, HTTP 404): Failed to process error response body. Raw body:'
|
||||
@@ -412,7 +427,7 @@ describe('RCloneApiService', () => {
|
||||
},
|
||||
};
|
||||
Object.setPrototypeOf(httpError, HTTPError.prototype);
|
||||
mockGot.post.mockRejectedValue(httpError);
|
||||
mockGotPost.mockRejectedValue(httpError);
|
||||
|
||||
await expect(service.getProviders()).rejects.toThrow(
|
||||
'Rclone API Error (config/providers, HTTP 400): Failed to process error response body. Raw body: invalid json'
|
||||
@@ -421,17 +436,108 @@ describe('RCloneApiService', () => {
|
||||
|
||||
it('should handle non-HTTP errors', async () => {
|
||||
const networkError = new Error('Network connection failed');
|
||||
mockGot.post.mockRejectedValue(networkError);
|
||||
mockGotPost.mockRejectedValue(networkError);
|
||||
|
||||
await expect(service.getProviders()).rejects.toThrow('Network connection failed');
|
||||
});
|
||||
|
||||
it('should handle unknown errors', async () => {
|
||||
mockGot.post.mockRejectedValue('unknown error');
|
||||
mockGotPost.mockRejectedValue('unknown error');
|
||||
|
||||
await expect(service.getProviders()).rejects.toThrow(
|
||||
'Unknown error calling RClone API (config/providers) with params {}: unknown error'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('checkRcloneBinaryExists', () => {
|
||||
beforeEach(() => {
|
||||
// Create a new service instance without initializing for these tests
|
||||
service = new RCloneApiService();
|
||||
});
|
||||
|
||||
it('should return true when rclone version is 1.70.0', async () => {
|
||||
mockExeca.mockResolvedValueOnce({
|
||||
stdout: 'rclone v1.70.0\n- os/version: darwin 14.0 (64 bit)\n- os/kernel: 23.0.0 (arm64)',
|
||||
stderr: '',
|
||||
} as any);
|
||||
|
||||
const result = await (service as any).checkRcloneBinaryExists();
|
||||
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it('should return true when rclone version is newer than 1.70.0', async () => {
|
||||
mockExeca.mockResolvedValueOnce({
|
||||
stdout: 'rclone v1.75.2\n- os/version: darwin 14.0 (64 bit)\n- os/kernel: 23.0.0 (arm64)',
|
||||
stderr: '',
|
||||
} as any);
|
||||
|
||||
const result = await (service as any).checkRcloneBinaryExists();
|
||||
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false when rclone version is older than 1.70.0', async () => {
|
||||
mockExeca.mockResolvedValueOnce({
|
||||
stdout: 'rclone v1.69.0\n- os/version: darwin 14.0 (64 bit)\n- os/kernel: 23.0.0 (arm64)',
|
||||
stderr: '',
|
||||
} as any);
|
||||
|
||||
const result = await (service as any).checkRcloneBinaryExists();
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false when rclone version is much older', async () => {
|
||||
mockExeca.mockResolvedValueOnce({
|
||||
stdout: 'rclone v1.50.0\n- os/version: darwin 14.0 (64 bit)\n- os/kernel: 23.0.0 (arm64)',
|
||||
stderr: '',
|
||||
} as any);
|
||||
|
||||
const result = await (service as any).checkRcloneBinaryExists();
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false when version cannot be parsed', async () => {
|
||||
mockExeca.mockResolvedValueOnce({
|
||||
stdout: 'rclone unknown version format',
|
||||
stderr: '',
|
||||
} as any);
|
||||
|
||||
const result = await (service as any).checkRcloneBinaryExists();
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false when rclone binary is not found', async () => {
|
||||
const error = new Error('Command not found') as any;
|
||||
error.code = 'ENOENT';
|
||||
mockExeca.mockRejectedValueOnce(error);
|
||||
|
||||
const result = await (service as any).checkRcloneBinaryExists();
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false and log error for other exceptions', async () => {
|
||||
mockExeca.mockRejectedValueOnce(new Error('Some other error'));
|
||||
|
||||
const result = await (service as any).checkRcloneBinaryExists();
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle beta/rc versions correctly', async () => {
|
||||
mockExeca.mockResolvedValueOnce({
|
||||
stdout: 'rclone v1.70.0-beta.1\n- os/version: darwin 14.0 (64 bit)\n- os/kernel: 23.0.0 (arm64)',
|
||||
stderr: '',
|
||||
} as any);
|
||||
|
||||
const result = await (service as any).checkRcloneBinaryExists();
|
||||
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -211,6 +211,7 @@ test('After init returns values from cfg file for all fields', { timeout: 30000
|
||||
"fsUsed": null,
|
||||
"id": "ST18000NM000J-2TV103_ZR585CPY",
|
||||
"idx": 0,
|
||||
"isSpinning": true,
|
||||
"name": "parity",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
@@ -235,6 +236,7 @@ test('After init returns values from cfg file for all fields', { timeout: 30000
|
||||
"fsUsed": 4116003021,
|
||||
"id": "ST18000NM000J-2TV103_ZR5B1W9X",
|
||||
"idx": 1,
|
||||
"isSpinning": true,
|
||||
"name": "disk1",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
@@ -259,6 +261,7 @@ test('After init returns values from cfg file for all fields', { timeout: 30000
|
||||
"fsUsed": 11904860828,
|
||||
"id": "WDC_WD120EDAZ-11F3RA0_5PJRD45C",
|
||||
"idx": 2,
|
||||
"isSpinning": true,
|
||||
"name": "disk2",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
@@ -283,6 +286,7 @@ test('After init returns values from cfg file for all fields', { timeout: 30000
|
||||
"fsUsed": 6478056481,
|
||||
"id": "WDC_WD120EMAZ-11BLFA0_5PH8BTYD",
|
||||
"idx": 3,
|
||||
"isSpinning": true,
|
||||
"name": "disk3",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
@@ -307,6 +311,7 @@ test('After init returns values from cfg file for all fields', { timeout: 30000
|
||||
"fsUsed": 137273827,
|
||||
"id": "Samsung_SSD_850_EVO_250GB_S2R5NX0H643734Z",
|
||||
"idx": 30,
|
||||
"isSpinning": true,
|
||||
"name": "cache",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
@@ -331,6 +336,7 @@ test('After init returns values from cfg file for all fields', { timeout: 30000
|
||||
"fsUsed": null,
|
||||
"id": "KINGSTON_SA2000M8250G_50026B7282669D9E",
|
||||
"idx": 31,
|
||||
"isSpinning": true,
|
||||
"name": "cache2",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
@@ -355,6 +361,7 @@ test('After init returns values from cfg file for all fields', { timeout: 30000
|
||||
"fsUsed": 851325,
|
||||
"id": "Cruzer",
|
||||
"idx": 32,
|
||||
"isSpinning": true,
|
||||
"name": "flash",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
|
||||
@@ -28,6 +28,7 @@ test('Returns parsed state file', async () => {
|
||||
"fsUsed": null,
|
||||
"id": "ST18000NM000J-2TV103_ZR585CPY",
|
||||
"idx": 0,
|
||||
"isSpinning": true,
|
||||
"name": "parity",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
@@ -52,6 +53,7 @@ test('Returns parsed state file', async () => {
|
||||
"fsUsed": 4116003021,
|
||||
"id": "ST18000NM000J-2TV103_ZR5B1W9X",
|
||||
"idx": 1,
|
||||
"isSpinning": true,
|
||||
"name": "disk1",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
@@ -76,6 +78,7 @@ test('Returns parsed state file', async () => {
|
||||
"fsUsed": 11904860828,
|
||||
"id": "WDC_WD120EDAZ-11F3RA0_5PJRD45C",
|
||||
"idx": 2,
|
||||
"isSpinning": true,
|
||||
"name": "disk2",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
@@ -100,6 +103,7 @@ test('Returns parsed state file', async () => {
|
||||
"fsUsed": 6478056481,
|
||||
"id": "WDC_WD120EMAZ-11BLFA0_5PH8BTYD",
|
||||
"idx": 3,
|
||||
"isSpinning": true,
|
||||
"name": "disk3",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
@@ -124,6 +128,7 @@ test('Returns parsed state file', async () => {
|
||||
"fsUsed": 137273827,
|
||||
"id": "Samsung_SSD_850_EVO_250GB_S2R5NX0H643734Z",
|
||||
"idx": 30,
|
||||
"isSpinning": true,
|
||||
"name": "cache",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
@@ -148,6 +153,7 @@ test('Returns parsed state file', async () => {
|
||||
"fsUsed": null,
|
||||
"id": "KINGSTON_SA2000M8250G_50026B7282669D9E",
|
||||
"idx": 31,
|
||||
"isSpinning": true,
|
||||
"name": "cache2",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
@@ -172,6 +178,7 @@ test('Returns parsed state file', async () => {
|
||||
"fsUsed": 851325,
|
||||
"id": "Cruzer",
|
||||
"idx": 32,
|
||||
"isSpinning": true,
|
||||
"name": "flash",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
|
||||
@@ -2,7 +2,7 @@ import { join } from 'path';
|
||||
|
||||
import type { JSONWebKeySet } from 'jose';
|
||||
|
||||
import { PORT } from '@app/environment.js';
|
||||
import { ENABLE_NEXT_DOCKER_RELEASE, PORT } from '@app/environment.js';
|
||||
|
||||
export const getInternalApiAddress = (isHttp = true, nginxPort = 80) => {
|
||||
const envPort = PORT;
|
||||
@@ -79,3 +79,14 @@ export const KEYSERVER_VALIDATION_ENDPOINT = 'https://keys.lime-technology.com/v
|
||||
|
||||
/** Set the max retries for the GraphQL Client */
|
||||
export const MAX_RETRIES_FOR_LINEAR_BACKOFF = 100;
|
||||
|
||||
/**
|
||||
* Feature flags are used to conditionally enable or disable functionality in the Unraid API.
|
||||
*
|
||||
* Keys are human readable feature flag names -- will be used to construct error messages.
|
||||
*
|
||||
* Values are boolean/truthy values.
|
||||
*/
|
||||
export const FeatureFlags = Object.freeze({
|
||||
ENABLE_NEXT_DOCKER_RELEASE,
|
||||
});
|
||||
|
||||
@@ -29,8 +29,24 @@ const stream = SUPPRESS_LOGS
|
||||
singleLine: true,
|
||||
hideObject: false,
|
||||
colorize: true,
|
||||
colorizeObjects: true,
|
||||
levelFirst: false,
|
||||
ignore: 'hostname,pid',
|
||||
destination: logDestination,
|
||||
translateTime: 'HH:mm:ss',
|
||||
customPrettifiers: {
|
||||
time: (timestamp: string | object) => `[${timestamp}`,
|
||||
level: (logLevel: string | object, key: string, log: any, extras: any) => {
|
||||
// Use labelColorized which preserves the colors
|
||||
const { labelColorized } = extras;
|
||||
const context = log.context || log.logger || 'app';
|
||||
return `${labelColorized} ${context}]`;
|
||||
},
|
||||
},
|
||||
messageFormat: (log: any, messageKey: string) => {
|
||||
const msg = log[messageKey] || log.msg || '';
|
||||
return msg;
|
||||
},
|
||||
})
|
||||
: logDestination;
|
||||
|
||||
|
||||
@@ -13,10 +13,11 @@ export const pubsub = new PubSub({ eventEmitter });
|
||||
|
||||
/**
|
||||
* Create a pubsub subscription.
|
||||
* @param channel The pubsub channel to subscribe to.
|
||||
* @param channel The pubsub channel to subscribe to. Can be either a predefined GRAPHQL_PUBSUB_CHANNEL
|
||||
* or a dynamic string for runtime-generated topics (e.g., log file paths like "LOG_FILE:/var/log/test.log")
|
||||
*/
|
||||
export const createSubscription = <T = any>(
|
||||
channel: GRAPHQL_PUBSUB_CHANNEL
|
||||
channel: GRAPHQL_PUBSUB_CHANNEL | string
|
||||
): AsyncIterableIterator<T> => {
|
||||
return pubsub.asyncIterableIterator<T>(channel);
|
||||
};
|
||||
|
||||
@@ -16,11 +16,22 @@ export const getKeyFile = async function (appStore: RootState = store.getState()
|
||||
|
||||
const keyFileName = basename(emhttp.var?.regFile);
|
||||
const registrationKeyFilePath = join(paths['keyfile-base'], keyFileName);
|
||||
const keyFile = await readFile(registrationKeyFilePath, 'binary');
|
||||
return Buffer.from(keyFile, 'binary')
|
||||
.toString('base64')
|
||||
.trim()
|
||||
.replace(/\+/g, '-')
|
||||
.replace(/\//g, '_')
|
||||
.replace(/=/g, '');
|
||||
|
||||
try {
|
||||
const keyFile = await readFile(registrationKeyFilePath, 'binary');
|
||||
return Buffer.from(keyFile, 'binary')
|
||||
.toString('base64')
|
||||
.trim()
|
||||
.replace(/\+/g, '-')
|
||||
.replace(/\//g, '_')
|
||||
.replace(/=/g, '');
|
||||
} catch (error) {
|
||||
// Handle ENOENT error when Pro.key file doesn't exist
|
||||
if (error instanceof Error && 'code' in error && error.code === 'ENOENT') {
|
||||
// Return empty string when key file is missing (ENOKEYFILE state)
|
||||
return '';
|
||||
}
|
||||
// Re-throw other errors
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
// Non-function exports from this module are loaded into the NestJS Config at runtime.
|
||||
|
||||
import { readFileSync } from 'node:fs';
|
||||
import { homedir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
|
||||
@@ -99,7 +98,7 @@ export const MOTHERSHIP_GRAPHQL_LINK = process.env.MOTHERSHIP_GRAPHQL_LINK
|
||||
? 'https://staging.mothership.unraid.net/ws'
|
||||
: 'https://mothership.unraid.net/ws';
|
||||
|
||||
export const PM2_HOME = process.env.PM2_HOME ?? join(homedir(), '.pm2');
|
||||
export const PM2_HOME = process.env.PM2_HOME ?? '/var/log/.pm2';
|
||||
export const PM2_PATH = join(import.meta.dirname, '../../', 'node_modules', 'pm2', 'bin', 'pm2');
|
||||
export const ECOSYSTEM_PATH = join(import.meta.dirname, '../../', 'ecosystem.config.json');
|
||||
export const PATHS_LOGS_DIR =
|
||||
@@ -111,3 +110,6 @@ export const PATHS_CONFIG_MODULES =
|
||||
|
||||
export const PATHS_LOCAL_SESSION_FILE =
|
||||
process.env.PATHS_LOCAL_SESSION_FILE ?? '/var/run/unraid-api/local-session';
|
||||
|
||||
/** feature flag for the upcoming docker release */
|
||||
export const ENABLE_NEXT_DOCKER_RELEASE = process.env.ENABLE_NEXT_DOCKER_RELEASE === 'true';
|
||||
|
||||
@@ -36,6 +36,7 @@ export type IniSlot = {
|
||||
size: string;
|
||||
sizeSb: string;
|
||||
slots: string;
|
||||
spundown: string;
|
||||
status: SlotStatus;
|
||||
temp: string;
|
||||
type: SlotType;
|
||||
@@ -82,6 +83,7 @@ export const parse: StateFileToIniParserMap['disks'] = (disksIni) =>
|
||||
fsType: slot.fsType ?? null,
|
||||
format: slot.format === '-' ? null : slot.format,
|
||||
transport: slot.transport ?? null,
|
||||
isSpinning: slot.spundown ? slot.spundown === '0' : null,
|
||||
};
|
||||
// @TODO Zod Parse This
|
||||
return result;
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { CacheModule } from '@nestjs/cache-manager';
|
||||
import { Test } from '@nestjs/testing';
|
||||
|
||||
import { describe, expect, it } from 'vitest';
|
||||
@@ -9,7 +10,7 @@ describe('Module Dependencies Integration', () => {
|
||||
let module;
|
||||
try {
|
||||
module = await Test.createTestingModule({
|
||||
imports: [RestModule],
|
||||
imports: [CacheModule.register({ isGlobal: true }), RestModule],
|
||||
}).compile();
|
||||
|
||||
expect(module).toBeDefined();
|
||||
|
||||
@@ -14,6 +14,7 @@ import { AuthModule } from '@app/unraid-api/auth/auth.module.js';
|
||||
import { AuthenticationGuard } from '@app/unraid-api/auth/authentication.guard.js';
|
||||
import { LegacyConfigModule } from '@app/unraid-api/config/legacy-config.module.js';
|
||||
import { CronModule } from '@app/unraid-api/cron/cron.module.js';
|
||||
import { JobModule } from '@app/unraid-api/cron/job.module.js';
|
||||
import { GraphModule } from '@app/unraid-api/graph/graph.module.js';
|
||||
import { GlobalDepsModule } from '@app/unraid-api/plugin/global-deps.module.js';
|
||||
import { RestModule } from '@app/unraid-api/rest/rest.module.js';
|
||||
@@ -24,7 +25,7 @@ import { UnraidFileModifierModule } from '@app/unraid-api/unraid-file-modifier/u
|
||||
GlobalDepsModule,
|
||||
LegacyConfigModule,
|
||||
PubSubModule,
|
||||
ScheduleModule.forRoot(),
|
||||
JobModule,
|
||||
LoggerModule.forRoot({
|
||||
pinoHttp: {
|
||||
logger: apiLogger,
|
||||
@@ -34,6 +35,15 @@ import { UnraidFileModifierModule } from '@app/unraid-api/unraid-file-modifier/u
|
||||
req: () => undefined,
|
||||
res: () => undefined,
|
||||
},
|
||||
formatters: {
|
||||
log: (obj) => {
|
||||
// Map NestJS context to Pino context field for pino-pretty
|
||||
if (obj.context && !obj.logger) {
|
||||
return { ...obj, logger: obj.context };
|
||||
}
|
||||
return obj;
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
AuthModule,
|
||||
|
||||
@@ -681,4 +681,104 @@ describe('ApiKeyService', () => {
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('convertRolesStringArrayToRoles', () => {
|
||||
beforeEach(async () => {
|
||||
vi.mocked(getters.paths).mockReturnValue({
|
||||
'auth-keys': mockBasePath,
|
||||
} as ReturnType<typeof getters.paths>);
|
||||
|
||||
// Create a fresh mock logger for each test
|
||||
mockLogger = {
|
||||
log: vi.fn(),
|
||||
error: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
verbose: vi.fn(),
|
||||
};
|
||||
|
||||
apiKeyService = new ApiKeyService();
|
||||
// Replace the logger with our mock
|
||||
(apiKeyService as any).logger = mockLogger;
|
||||
});
|
||||
|
||||
it('should convert uppercase role strings to Role enum values', () => {
|
||||
const roles = ['ADMIN', 'CONNECT', 'VIEWER'];
|
||||
const result = apiKeyService.convertRolesStringArrayToRoles(roles);
|
||||
|
||||
expect(result).toEqual([Role.ADMIN, Role.CONNECT, Role.VIEWER]);
|
||||
});
|
||||
|
||||
it('should convert lowercase role strings to Role enum values', () => {
|
||||
const roles = ['admin', 'connect', 'guest'];
|
||||
const result = apiKeyService.convertRolesStringArrayToRoles(roles);
|
||||
|
||||
expect(result).toEqual([Role.ADMIN, Role.CONNECT, Role.GUEST]);
|
||||
});
|
||||
|
||||
it('should convert mixed case role strings to Role enum values', () => {
|
||||
const roles = ['Admin', 'CoNnEcT', 'ViEwEr'];
|
||||
const result = apiKeyService.convertRolesStringArrayToRoles(roles);
|
||||
|
||||
expect(result).toEqual([Role.ADMIN, Role.CONNECT, Role.VIEWER]);
|
||||
});
|
||||
|
||||
it('should handle roles with whitespace', () => {
|
||||
const roles = [' ADMIN ', ' CONNECT ', 'VIEWER '];
|
||||
const result = apiKeyService.convertRolesStringArrayToRoles(roles);
|
||||
|
||||
expect(result).toEqual([Role.ADMIN, Role.CONNECT, Role.VIEWER]);
|
||||
});
|
||||
|
||||
it('should filter out invalid roles and warn', () => {
|
||||
const roles = ['ADMIN', 'INVALID_ROLE', 'VIEWER', 'ANOTHER_INVALID'];
|
||||
const result = apiKeyService.convertRolesStringArrayToRoles(roles);
|
||||
|
||||
expect(result).toEqual([Role.ADMIN, Role.VIEWER]);
|
||||
expect(mockLogger.warn).toHaveBeenCalledWith(
|
||||
'Ignoring invalid roles: INVALID_ROLE, ANOTHER_INVALID'
|
||||
);
|
||||
});
|
||||
|
||||
it('should return empty array when all roles are invalid', () => {
|
||||
const roles = ['INVALID1', 'INVALID2', 'INVALID3'];
|
||||
const result = apiKeyService.convertRolesStringArrayToRoles(roles);
|
||||
|
||||
expect(result).toEqual([]);
|
||||
expect(mockLogger.warn).toHaveBeenCalledWith(
|
||||
'Ignoring invalid roles: INVALID1, INVALID2, INVALID3'
|
||||
);
|
||||
});
|
||||
|
||||
it('should return empty array for empty input', () => {
|
||||
const result = apiKeyService.convertRolesStringArrayToRoles([]);
|
||||
|
||||
expect(result).toEqual([]);
|
||||
expect(mockLogger.warn).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle all valid Role enum values', () => {
|
||||
const roles = Object.values(Role);
|
||||
const result = apiKeyService.convertRolesStringArrayToRoles(roles);
|
||||
|
||||
expect(result).toEqual(Object.values(Role));
|
||||
expect(mockLogger.warn).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should deduplicate roles', () => {
|
||||
const roles = ['ADMIN', 'admin', 'ADMIN', 'VIEWER', 'viewer'];
|
||||
const result = apiKeyService.convertRolesStringArrayToRoles(roles);
|
||||
|
||||
// Note: Current implementation doesn't deduplicate, but this test documents the behavior
|
||||
expect(result).toEqual([Role.ADMIN, Role.ADMIN, Role.ADMIN, Role.VIEWER, Role.VIEWER]);
|
||||
});
|
||||
|
||||
it('should handle mixed valid and invalid roles correctly', () => {
|
||||
const roles = ['ADMIN', 'invalid', 'CONNECT', 'bad_role', 'GUEST', 'VIEWER'];
|
||||
const result = apiKeyService.convertRolesStringArrayToRoles(roles);
|
||||
|
||||
expect(result).toEqual([Role.ADMIN, Role.CONNECT, Role.GUEST, Role.VIEWER]);
|
||||
expect(mockLogger.warn).toHaveBeenCalledWith('Ignoring invalid roles: invalid, bad_role');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -35,11 +35,29 @@ export class ApiKeyService implements OnModuleInit {
|
||||
|
||||
async onModuleInit() {
|
||||
this.memoryApiKeys = await this.loadAllFromDisk();
|
||||
await this.cleanupLegacyInternalKeys();
|
||||
if (environment.IS_MAIN_PROCESS) {
|
||||
this.setupWatch();
|
||||
}
|
||||
}
|
||||
|
||||
private async cleanupLegacyInternalKeys() {
|
||||
const legacyNames = ['CliInternal', 'ConnectInternal'];
|
||||
const keysToDelete = this.memoryApiKeys.filter((key) => legacyNames.includes(key.name));
|
||||
|
||||
if (keysToDelete.length > 0) {
|
||||
try {
|
||||
await this.deleteApiKeys(keysToDelete.map((key) => key.id));
|
||||
this.logger.log(`Cleaned up ${keysToDelete.length} legacy internal keys`);
|
||||
} catch (error) {
|
||||
this.logger.debug(
|
||||
error,
|
||||
`Failed to delete legacy internal keys: ${keysToDelete.map((key) => key.name).join(', ')}`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public async findAll(): Promise<ApiKey[]> {
|
||||
return this.memoryApiKeys;
|
||||
}
|
||||
@@ -92,9 +110,25 @@ export class ApiKeyService implements OnModuleInit {
|
||||
}
|
||||
|
||||
public convertRolesStringArrayToRoles(roles: string[]): Role[] {
|
||||
return roles
|
||||
.map((roleStr) => Role[roleStr.trim().toUpperCase() as keyof typeof Role])
|
||||
.filter(Boolean);
|
||||
const validRoles: Role[] = [];
|
||||
const invalidRoles: string[] = [];
|
||||
|
||||
for (const roleStr of roles) {
|
||||
const upperRole = roleStr.trim().toUpperCase();
|
||||
const role = Role[upperRole as keyof typeof Role];
|
||||
|
||||
if (role && ApiKeyService.validRoles.has(role)) {
|
||||
validRoles.push(role);
|
||||
} else {
|
||||
invalidRoles.push(roleStr);
|
||||
}
|
||||
}
|
||||
|
||||
if (invalidRoles.length > 0) {
|
||||
this.logger.warn(`Ignoring invalid roles: ${invalidRoles.join(', ')}`);
|
||||
}
|
||||
|
||||
return validRoles;
|
||||
}
|
||||
|
||||
async create({
|
||||
|
||||
192
api/src/unraid-api/cli/__test__/api-key.command.test.ts
Normal file
192
api/src/unraid-api/cli/__test__/api-key.command.test.ts
Normal file
@@ -0,0 +1,192 @@
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
|
||||
import { InquirerService } from 'nest-commander';
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { ApiKeyService } from '@app/unraid-api/auth/api-key.service.js';
|
||||
import { AddApiKeyQuestionSet } from '@app/unraid-api/cli/apikey/add-api-key.questions.js';
|
||||
import { ApiKeyCommand } from '@app/unraid-api/cli/apikey/api-key.command.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
|
||||
describe('ApiKeyCommand', () => {
|
||||
let command: ApiKeyCommand;
|
||||
let apiKeyService: ApiKeyService;
|
||||
let logService: LogService;
|
||||
let inquirerService: InquirerService;
|
||||
let questionSet: AddApiKeyQuestionSet;
|
||||
|
||||
beforeEach(async () => {
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [
|
||||
ApiKeyCommand,
|
||||
AddApiKeyQuestionSet,
|
||||
{
|
||||
provide: ApiKeyService,
|
||||
useValue: {
|
||||
findByField: vi.fn(),
|
||||
create: vi.fn(),
|
||||
findAll: vi.fn(),
|
||||
deleteApiKeys: vi.fn(),
|
||||
convertRolesStringArrayToRoles: vi.fn((roles) => roles),
|
||||
convertPermissionsStringArrayToPermissions: vi.fn((perms) => perms),
|
||||
getAllValidPermissions: vi.fn(() => []),
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: LogService,
|
||||
useValue: {
|
||||
log: vi.fn(),
|
||||
error: vi.fn(),
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: InquirerService,
|
||||
useValue: {
|
||||
prompt: vi.fn(),
|
||||
},
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
command = module.get<ApiKeyCommand>(ApiKeyCommand);
|
||||
apiKeyService = module.get<ApiKeyService>(ApiKeyService);
|
||||
logService = module.get<LogService>(LogService);
|
||||
inquirerService = module.get<InquirerService>(InquirerService);
|
||||
questionSet = module.get<AddApiKeyQuestionSet>(AddApiKeyQuestionSet);
|
||||
});
|
||||
|
||||
describe('AddApiKeyQuestionSet', () => {
|
||||
describe('shouldAskOverwrite', () => {
|
||||
it('should return true when an API key with the given name exists', () => {
|
||||
vi.mocked(apiKeyService.findByField).mockReturnValue({
|
||||
key: 'existing-key',
|
||||
name: 'test-key',
|
||||
description: 'Test key',
|
||||
roles: [],
|
||||
permissions: [],
|
||||
} as any);
|
||||
|
||||
const result = questionSet.shouldAskOverwrite({ name: 'test-key' });
|
||||
|
||||
expect(result).toBe(true);
|
||||
expect(apiKeyService.findByField).toHaveBeenCalledWith('name', 'test-key');
|
||||
});
|
||||
|
||||
it('should return false when no API key with the given name exists', () => {
|
||||
vi.mocked(apiKeyService.findByField).mockReturnValue(null);
|
||||
|
||||
const result = questionSet.shouldAskOverwrite({ name: 'non-existent-key' });
|
||||
|
||||
expect(result).toBe(false);
|
||||
expect(apiKeyService.findByField).toHaveBeenCalledWith('name', 'non-existent-key');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('run', () => {
|
||||
it('should find and return existing key when not creating', async () => {
|
||||
const mockKey = { key: 'test-api-key-123', name: 'test-key' };
|
||||
vi.mocked(apiKeyService.findByField).mockReturnValue(mockKey as any);
|
||||
|
||||
await command.run([], { name: 'test-key', create: false });
|
||||
|
||||
expect(apiKeyService.findByField).toHaveBeenCalledWith('name', 'test-key');
|
||||
expect(logService.log).toHaveBeenCalledWith('test-api-key-123');
|
||||
});
|
||||
|
||||
it('should create new key when key does not exist and create flag is set', async () => {
|
||||
vi.mocked(apiKeyService.findByField).mockReturnValue(null);
|
||||
vi.mocked(apiKeyService.create).mockResolvedValue({ key: 'new-api-key-456' } as any);
|
||||
|
||||
await command.run([], {
|
||||
name: 'new-key',
|
||||
create: true,
|
||||
roles: ['ADMIN'] as any,
|
||||
description: 'Test description',
|
||||
});
|
||||
|
||||
expect(apiKeyService.create).toHaveBeenCalledWith({
|
||||
name: 'new-key',
|
||||
description: 'Test description',
|
||||
roles: ['ADMIN'],
|
||||
permissions: undefined,
|
||||
overwrite: false,
|
||||
});
|
||||
expect(logService.log).toHaveBeenCalledWith('new-api-key-456');
|
||||
});
|
||||
|
||||
it('should error when key exists and overwrite is not set in non-interactive mode', async () => {
|
||||
const mockKey = { key: 'existing-key', name: 'test-key' };
|
||||
vi.mocked(apiKeyService.findByField)
|
||||
.mockReturnValueOnce(null) // First call in line 131
|
||||
.mockReturnValueOnce(mockKey as any); // Second call in non-interactive check
|
||||
const exitSpy = vi.spyOn(process, 'exit').mockImplementation(() => {
|
||||
throw new Error('process.exit');
|
||||
});
|
||||
|
||||
await expect(
|
||||
command.run([], {
|
||||
name: 'test-key',
|
||||
create: true,
|
||||
roles: ['ADMIN'] as any,
|
||||
})
|
||||
).rejects.toThrow();
|
||||
|
||||
expect(logService.error).toHaveBeenCalledWith(
|
||||
"API key with name 'test-key' already exists. Use --overwrite to replace it."
|
||||
);
|
||||
expect(exitSpy).toHaveBeenCalledWith(1);
|
||||
exitSpy.mockRestore();
|
||||
});
|
||||
|
||||
it('should create key with overwrite when key exists and overwrite is set', async () => {
|
||||
const mockKey = { key: 'existing-key', name: 'test-key' };
|
||||
vi.mocked(apiKeyService.findByField)
|
||||
.mockReturnValueOnce(null) // First call in line 131
|
||||
.mockReturnValueOnce(mockKey as any); // Second call in non-interactive check
|
||||
vi.mocked(apiKeyService.create).mockResolvedValue({ key: 'overwritten-key' } as any);
|
||||
|
||||
await command.run([], {
|
||||
name: 'test-key',
|
||||
create: true,
|
||||
roles: ['ADMIN'] as any,
|
||||
overwrite: true,
|
||||
});
|
||||
|
||||
expect(apiKeyService.create).toHaveBeenCalledWith({
|
||||
name: 'test-key',
|
||||
description: 'CLI generated key: test-key',
|
||||
roles: ['ADMIN'],
|
||||
permissions: undefined,
|
||||
overwrite: true,
|
||||
});
|
||||
expect(logService.log).toHaveBeenCalledWith('overwritten-key');
|
||||
});
|
||||
|
||||
it('should prompt for missing fields when creating without sufficient info', async () => {
|
||||
vi.mocked(apiKeyService.findByField).mockReturnValue(null);
|
||||
vi.mocked(inquirerService.prompt).mockResolvedValue({
|
||||
name: 'prompted-key',
|
||||
roles: ['USER'],
|
||||
permissions: [],
|
||||
description: 'Prompted description',
|
||||
overwrite: false,
|
||||
} as any);
|
||||
vi.mocked(apiKeyService.create).mockResolvedValue({ key: 'prompted-api-key' } as any);
|
||||
|
||||
await command.run([], { name: '', create: true });
|
||||
|
||||
expect(inquirerService.prompt).toHaveBeenCalledWith('add-api-key', {
|
||||
name: '',
|
||||
create: true,
|
||||
});
|
||||
expect(apiKeyService.create).toHaveBeenCalledWith({
|
||||
name: 'prompted-key',
|
||||
description: 'Prompted description',
|
||||
roles: ['USER'],
|
||||
permissions: [],
|
||||
overwrite: false,
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
111
api/src/unraid-api/cli/__test__/version.command.test.ts
Normal file
111
api/src/unraid-api/cli/__test__/version.command.test.ts
Normal file
@@ -0,0 +1,111 @@
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
|
||||
import { afterEach, beforeEach, describe, expect, it, MockInstance, vi } from 'vitest';
|
||||
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
import { VersionCommand } from '@app/unraid-api/cli/version.command.js';
|
||||
|
||||
let API_VERSION_MOCK = '4.18.2+build123';
|
||||
|
||||
vi.mock('@app/environment.js', async (importOriginal) => {
|
||||
const actual = (await importOriginal()) as any;
|
||||
return {
|
||||
...actual,
|
||||
get API_VERSION() {
|
||||
return API_VERSION_MOCK;
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
describe('VersionCommand', () => {
|
||||
let command: VersionCommand;
|
||||
let logService: LogService;
|
||||
let consoleLogSpy: MockInstance<typeof console.log>;
|
||||
|
||||
beforeEach(async () => {
|
||||
API_VERSION_MOCK = '4.18.2+build123'; // Reset to default before each test
|
||||
consoleLogSpy = vi.spyOn(console, 'log').mockImplementation(() => {});
|
||||
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [
|
||||
VersionCommand,
|
||||
{
|
||||
provide: LogService,
|
||||
useValue: {
|
||||
info: vi.fn(),
|
||||
},
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
command = module.get<VersionCommand>(VersionCommand);
|
||||
logService = module.get<LogService>(LogService);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
describe('run', () => {
|
||||
it('should output version with logger when no options provided', async () => {
|
||||
await command.run([]);
|
||||
|
||||
expect(logService.info).toHaveBeenCalledWith('Unraid API v4.18.2+build123');
|
||||
expect(consoleLogSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should output version with logger when json option is false', async () => {
|
||||
await command.run([], { json: false });
|
||||
|
||||
expect(logService.info).toHaveBeenCalledWith('Unraid API v4.18.2+build123');
|
||||
expect(consoleLogSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should output JSON when json option is true', async () => {
|
||||
await command.run([], { json: true });
|
||||
|
||||
expect(logService.info).not.toHaveBeenCalled();
|
||||
expect(consoleLogSpy).toHaveBeenCalledWith(
|
||||
JSON.stringify({
|
||||
version: '4.18.2',
|
||||
build: 'build123',
|
||||
combined: '4.18.2+build123',
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle version without build info', async () => {
|
||||
API_VERSION_MOCK = '4.18.2'; // Set version without build info
|
||||
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [
|
||||
VersionCommand,
|
||||
{
|
||||
provide: LogService,
|
||||
useValue: {
|
||||
info: vi.fn(),
|
||||
},
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
const commandWithoutBuild = module.get<VersionCommand>(VersionCommand);
|
||||
|
||||
await commandWithoutBuild.run([], { json: true });
|
||||
|
||||
expect(consoleLogSpy).toHaveBeenCalledWith(
|
||||
JSON.stringify({
|
||||
version: '4.18.2',
|
||||
build: undefined,
|
||||
combined: '4.18.2',
|
||||
})
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseJson', () => {
|
||||
it('should return true', () => {
|
||||
expect(command.parseJson()).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -39,6 +39,12 @@ export class AddApiKeyQuestionSet {
|
||||
return this.apiKeyService.convertRolesStringArrayToRoles(val);
|
||||
}
|
||||
|
||||
@WhenFor({ name: 'roles' })
|
||||
shouldAskRoles(options: { roles?: Role[]; permissions?: Permission[] }): boolean {
|
||||
// Ask for roles if they weren't provided or are empty
|
||||
return !options.roles || options.roles.length === 0;
|
||||
}
|
||||
|
||||
@ChoicesFor({ name: 'roles' })
|
||||
async getRoles() {
|
||||
return Object.values(Role);
|
||||
@@ -53,6 +59,12 @@ export class AddApiKeyQuestionSet {
|
||||
return this.apiKeyService.convertPermissionsStringArrayToPermissions(val);
|
||||
}
|
||||
|
||||
@WhenFor({ name: 'permissions' })
|
||||
shouldAskPermissions(options: { roles?: Role[]; permissions?: Permission[] }): boolean {
|
||||
// Ask for permissions if they weren't provided or are empty
|
||||
return !options.permissions || options.permissions.length === 0;
|
||||
}
|
||||
|
||||
@ChoicesFor({ name: 'permissions' })
|
||||
async getPermissions() {
|
||||
return this.apiKeyService
|
||||
@@ -72,6 +84,6 @@ export class AddApiKeyQuestionSet {
|
||||
|
||||
@WhenFor({ name: 'overwrite' })
|
||||
shouldAskOverwrite(options: { name: string }): boolean {
|
||||
return Boolean(this.apiKeyService.findByKey(options.name));
|
||||
return Boolean(this.apiKeyService.findByField('name', options.name));
|
||||
}
|
||||
}
|
||||
|
||||
434
api/src/unraid-api/cli/apikey/api-key.command.spec.ts
Normal file
434
api/src/unraid-api/cli/apikey/api-key.command.spec.ts
Normal file
@@ -0,0 +1,434 @@
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
|
||||
import { AuthAction, Resource, Role } from '@unraid/shared/graphql.model.js';
|
||||
import { InquirerService } from 'nest-commander';
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { ApiKeyService } from '@app/unraid-api/auth/api-key.service.js';
|
||||
import { ApiKeyCommand } from '@app/unraid-api/cli/apikey/api-key.command.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
|
||||
describe('ApiKeyCommand', () => {
|
||||
let command: ApiKeyCommand;
|
||||
let apiKeyService: ApiKeyService;
|
||||
let logService: LogService;
|
||||
|
||||
beforeEach(async () => {
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [
|
||||
ApiKeyCommand,
|
||||
{
|
||||
provide: ApiKeyService,
|
||||
useValue: {
|
||||
findByField: vi.fn(),
|
||||
create: vi.fn(),
|
||||
convertRolesStringArrayToRoles: vi.fn(),
|
||||
convertPermissionsStringArrayToPermissions: vi.fn(),
|
||||
findAll: vi.fn(),
|
||||
deleteApiKeys: vi.fn(),
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: LogService,
|
||||
useValue: {
|
||||
log: vi.fn(),
|
||||
error: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: InquirerService,
|
||||
useValue: {
|
||||
prompt: vi.fn(),
|
||||
},
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
command = module.get<ApiKeyCommand>(ApiKeyCommand);
|
||||
apiKeyService = module.get<ApiKeyService>(ApiKeyService);
|
||||
logService = module.get<LogService>(LogService);
|
||||
});
|
||||
|
||||
describe('parseRoles', () => {
|
||||
it('should parse valid roles correctly', () => {
|
||||
const mockConvert = vi
|
||||
.spyOn(apiKeyService, 'convertRolesStringArrayToRoles')
|
||||
.mockReturnValue([Role.ADMIN, Role.CONNECT]);
|
||||
|
||||
const result = command.parseRoles('ADMIN,CONNECT');
|
||||
|
||||
expect(mockConvert).toHaveBeenCalledWith(['ADMIN', 'CONNECT']);
|
||||
expect(result).toEqual([Role.ADMIN, Role.CONNECT]);
|
||||
});
|
||||
|
||||
it('should return GUEST role when no roles provided', () => {
|
||||
const result = command.parseRoles('');
|
||||
|
||||
expect(result).toEqual([Role.GUEST]);
|
||||
});
|
||||
|
||||
it('should handle roles with spaces', () => {
|
||||
const mockConvert = vi
|
||||
.spyOn(apiKeyService, 'convertRolesStringArrayToRoles')
|
||||
.mockReturnValue([Role.ADMIN, Role.VIEWER]);
|
||||
|
||||
const result = command.parseRoles('ADMIN, VIEWER');
|
||||
|
||||
expect(mockConvert).toHaveBeenCalledWith(['ADMIN', ' VIEWER']);
|
||||
expect(result).toEqual([Role.ADMIN, Role.VIEWER]);
|
||||
});
|
||||
|
||||
it('should throw error when no valid roles found', () => {
|
||||
vi.spyOn(apiKeyService, 'convertRolesStringArrayToRoles').mockReturnValue([]);
|
||||
|
||||
expect(() => command.parseRoles('INVALID_ROLE')).toThrow(
|
||||
`Invalid roles. Valid options are: ${Object.values(Role).join(', ')}`
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle mixed valid and invalid roles with warning', () => {
|
||||
const mockConvert = vi
|
||||
.spyOn(apiKeyService, 'convertRolesStringArrayToRoles')
|
||||
.mockImplementation((roles) => {
|
||||
const validRoles: Role[] = [];
|
||||
const invalidRoles: string[] = [];
|
||||
|
||||
for (const roleStr of roles) {
|
||||
const upperRole = roleStr.trim().toUpperCase();
|
||||
const role = Role[upperRole as keyof typeof Role];
|
||||
|
||||
if (role) {
|
||||
validRoles.push(role);
|
||||
} else {
|
||||
invalidRoles.push(roleStr);
|
||||
}
|
||||
}
|
||||
|
||||
if (invalidRoles.length > 0) {
|
||||
logService.warn(`Ignoring invalid roles: ${invalidRoles.join(', ')}`);
|
||||
}
|
||||
|
||||
return validRoles;
|
||||
});
|
||||
|
||||
const result = command.parseRoles('ADMIN,INVALID,VIEWER');
|
||||
|
||||
expect(mockConvert).toHaveBeenCalledWith(['ADMIN', 'INVALID', 'VIEWER']);
|
||||
expect(logService.warn).toHaveBeenCalledWith('Ignoring invalid roles: INVALID');
|
||||
expect(result).toEqual([Role.ADMIN, Role.VIEWER]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('run', () => {
|
||||
it('should create API key with roles without prompting', async () => {
|
||||
const mockKey = {
|
||||
id: 'test-id',
|
||||
key: 'test-key-123',
|
||||
name: 'TEST',
|
||||
roles: [Role.ADMIN],
|
||||
createdAt: new Date().toISOString(),
|
||||
permissions: [],
|
||||
};
|
||||
vi.spyOn(apiKeyService, 'findByField').mockReturnValue(null);
|
||||
vi.spyOn(apiKeyService, 'create').mockResolvedValue(mockKey);
|
||||
|
||||
await command.run([], {
|
||||
name: 'TEST',
|
||||
create: true,
|
||||
roles: [Role.ADMIN],
|
||||
permissions: undefined,
|
||||
description: 'Test description',
|
||||
});
|
||||
|
||||
expect(apiKeyService.create).toHaveBeenCalledWith({
|
||||
name: 'TEST',
|
||||
description: 'Test description',
|
||||
roles: [Role.ADMIN],
|
||||
permissions: undefined,
|
||||
overwrite: false,
|
||||
});
|
||||
expect(logService.log).toHaveBeenCalledWith('test-key-123');
|
||||
});
|
||||
|
||||
it('should create API key with permissions only without prompting', async () => {
|
||||
const mockKey = {
|
||||
id: 'test-id',
|
||||
key: 'test-key-456',
|
||||
name: 'TEST_PERMS',
|
||||
roles: [],
|
||||
createdAt: new Date().toISOString(),
|
||||
permissions: [],
|
||||
};
|
||||
const mockPermissions = [
|
||||
{
|
||||
resource: Resource.DOCKER,
|
||||
actions: [AuthAction.READ_ANY],
|
||||
},
|
||||
];
|
||||
|
||||
vi.spyOn(apiKeyService, 'findByField').mockReturnValue(null);
|
||||
vi.spyOn(apiKeyService, 'create').mockResolvedValue(mockKey);
|
||||
|
||||
await command.run([], {
|
||||
name: 'TEST_PERMS',
|
||||
create: true,
|
||||
roles: undefined,
|
||||
permissions: mockPermissions,
|
||||
description: 'Test with permissions',
|
||||
});
|
||||
|
||||
expect(apiKeyService.create).toHaveBeenCalledWith({
|
||||
name: 'TEST_PERMS',
|
||||
description: 'Test with permissions',
|
||||
roles: undefined,
|
||||
permissions: mockPermissions,
|
||||
overwrite: false,
|
||||
});
|
||||
expect(logService.log).toHaveBeenCalledWith('test-key-456');
|
||||
});
|
||||
|
||||
it('should use default description when not provided', async () => {
|
||||
const mockKey = {
|
||||
id: 'test-id',
|
||||
key: 'test-key-789',
|
||||
name: 'NO_DESC',
|
||||
roles: [Role.VIEWER],
|
||||
createdAt: new Date().toISOString(),
|
||||
permissions: [],
|
||||
};
|
||||
vi.spyOn(apiKeyService, 'findByField').mockReturnValue(null);
|
||||
vi.spyOn(apiKeyService, 'create').mockResolvedValue(mockKey);
|
||||
|
||||
await command.run([], {
|
||||
name: 'NO_DESC',
|
||||
create: true,
|
||||
roles: [Role.VIEWER],
|
||||
permissions: undefined,
|
||||
});
|
||||
|
||||
expect(apiKeyService.create).toHaveBeenCalledWith({
|
||||
name: 'NO_DESC',
|
||||
description: 'CLI generated key: NO_DESC',
|
||||
roles: [Role.VIEWER],
|
||||
permissions: undefined,
|
||||
overwrite: false,
|
||||
});
|
||||
});
|
||||
|
||||
it('should return existing key when found', async () => {
|
||||
const existingKey = {
|
||||
id: 'existing-id',
|
||||
key: 'existing-key-123',
|
||||
name: 'EXISTING',
|
||||
roles: [Role.ADMIN],
|
||||
createdAt: new Date().toISOString(),
|
||||
permissions: [],
|
||||
};
|
||||
vi.spyOn(apiKeyService, 'findByField').mockReturnValue(existingKey);
|
||||
|
||||
await command.run([], {
|
||||
name: 'EXISTING',
|
||||
create: false,
|
||||
});
|
||||
|
||||
expect(apiKeyService.findByField).toHaveBeenCalledWith('name', 'EXISTING');
|
||||
expect(logService.log).toHaveBeenCalledWith('existing-key-123');
|
||||
expect(apiKeyService.create).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle uppercase role conversion', () => {
|
||||
const mockConvert = vi
|
||||
.spyOn(apiKeyService, 'convertRolesStringArrayToRoles')
|
||||
.mockImplementation((roles) => {
|
||||
return roles
|
||||
.map((roleStr) => Role[roleStr.trim().toUpperCase() as keyof typeof Role])
|
||||
.filter(Boolean);
|
||||
});
|
||||
|
||||
const result = command.parseRoles('admin,connect');
|
||||
|
||||
expect(mockConvert).toHaveBeenCalledWith(['admin', 'connect']);
|
||||
expect(result).toEqual([Role.ADMIN, Role.CONNECT]);
|
||||
});
|
||||
|
||||
it('should handle lowercase role conversion', () => {
|
||||
const mockConvert = vi
|
||||
.spyOn(apiKeyService, 'convertRolesStringArrayToRoles')
|
||||
.mockImplementation((roles) => {
|
||||
return roles
|
||||
.map((roleStr) => Role[roleStr.trim().toUpperCase() as keyof typeof Role])
|
||||
.filter(Boolean);
|
||||
});
|
||||
|
||||
const result = command.parseRoles('viewer');
|
||||
|
||||
expect(mockConvert).toHaveBeenCalledWith(['viewer']);
|
||||
expect(result).toEqual([Role.VIEWER]);
|
||||
});
|
||||
|
||||
it('should handle mixed case role conversion', () => {
|
||||
const mockConvert = vi
|
||||
.spyOn(apiKeyService, 'convertRolesStringArrayToRoles')
|
||||
.mockImplementation((roles) => {
|
||||
return roles
|
||||
.map((roleStr) => Role[roleStr.trim().toUpperCase() as keyof typeof Role])
|
||||
.filter(Boolean);
|
||||
});
|
||||
|
||||
const result = command.parseRoles('Admin,CoNnEcT');
|
||||
|
||||
expect(mockConvert).toHaveBeenCalledWith(['Admin', 'CoNnEcT']);
|
||||
expect(result).toEqual([Role.ADMIN, Role.CONNECT]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('JSON output functionality', () => {
|
||||
let consoleSpy: ReturnType<typeof vi.spyOn>;
|
||||
|
||||
beforeEach(() => {
|
||||
consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {});
|
||||
});
|
||||
|
||||
it('should output JSON when creating key with --json flag', async () => {
|
||||
const mockKey = {
|
||||
id: 'test-id-123',
|
||||
key: 'test-key-456',
|
||||
name: 'JSON_TEST',
|
||||
roles: [Role.ADMIN],
|
||||
createdAt: new Date().toISOString(),
|
||||
permissions: [],
|
||||
};
|
||||
vi.spyOn(apiKeyService, 'findByField').mockReturnValue(null);
|
||||
vi.spyOn(apiKeyService, 'create').mockResolvedValue(mockKey);
|
||||
|
||||
await command.run([], {
|
||||
name: 'JSON_TEST',
|
||||
create: true,
|
||||
roles: [Role.ADMIN],
|
||||
json: true,
|
||||
});
|
||||
|
||||
expect(consoleSpy).toHaveBeenCalledWith(
|
||||
JSON.stringify({ key: 'test-key-456', name: 'JSON_TEST', id: 'test-id-123' })
|
||||
);
|
||||
expect(logService.log).not.toHaveBeenCalledWith('test-key-456');
|
||||
});
|
||||
|
||||
it('should output JSON when fetching existing key with --json flag', async () => {
|
||||
const existingKey = {
|
||||
id: 'existing-id-456',
|
||||
key: 'existing-key-789',
|
||||
name: 'EXISTING_JSON',
|
||||
roles: [Role.VIEWER],
|
||||
createdAt: new Date().toISOString(),
|
||||
permissions: [],
|
||||
};
|
||||
vi.spyOn(apiKeyService, 'findByField').mockReturnValue(existingKey);
|
||||
|
||||
await command.run([], {
|
||||
name: 'EXISTING_JSON',
|
||||
create: false,
|
||||
json: true,
|
||||
});
|
||||
|
||||
expect(consoleSpy).toHaveBeenCalledWith(
|
||||
JSON.stringify({ key: 'existing-key-789', name: 'EXISTING_JSON', id: 'existing-id-456' })
|
||||
);
|
||||
expect(logService.log).not.toHaveBeenCalledWith('existing-key-789');
|
||||
});
|
||||
|
||||
it('should output JSON when deleting key with --json flag', async () => {
|
||||
const existingKeys = [
|
||||
{
|
||||
id: 'delete-id-123',
|
||||
name: 'DELETE_JSON',
|
||||
key: 'delete-key-456',
|
||||
roles: [Role.GUEST],
|
||||
createdAt: new Date().toISOString(),
|
||||
permissions: [],
|
||||
},
|
||||
];
|
||||
vi.spyOn(apiKeyService, 'findAll').mockResolvedValue(existingKeys);
|
||||
vi.spyOn(apiKeyService, 'deleteApiKeys').mockResolvedValue();
|
||||
|
||||
await command.run([], {
|
||||
name: 'DELETE_JSON',
|
||||
delete: true,
|
||||
json: true,
|
||||
});
|
||||
|
||||
expect(consoleSpy).toHaveBeenCalledWith(
|
||||
JSON.stringify({
|
||||
deleted: 1,
|
||||
keys: [{ id: 'delete-id-123', name: 'DELETE_JSON' }],
|
||||
})
|
||||
);
|
||||
expect(logService.log).not.toHaveBeenCalledWith('Successfully deleted 1 API key');
|
||||
});
|
||||
|
||||
it('should output JSON error when deleting non-existent key with --json flag', async () => {
|
||||
vi.spyOn(apiKeyService, 'findAll').mockResolvedValue([]);
|
||||
|
||||
await command.run([], {
|
||||
name: 'NONEXISTENT',
|
||||
delete: true,
|
||||
json: true,
|
||||
});
|
||||
|
||||
expect(consoleSpy).toHaveBeenCalledWith(
|
||||
JSON.stringify({ deleted: 0, message: 'No API keys found to delete' })
|
||||
);
|
||||
expect(logService.log).not.toHaveBeenCalledWith('No API keys found to delete');
|
||||
});
|
||||
|
||||
it('should not suppress creation message when not using JSON', async () => {
|
||||
const mockKey = {
|
||||
id: 'test-id',
|
||||
key: 'test-key',
|
||||
name: 'NO_JSON_TEST',
|
||||
roles: [Role.ADMIN],
|
||||
createdAt: new Date().toISOString(),
|
||||
permissions: [],
|
||||
};
|
||||
vi.spyOn(apiKeyService, 'findByField').mockReturnValue(null);
|
||||
vi.spyOn(apiKeyService, 'create').mockResolvedValue(mockKey);
|
||||
|
||||
await command.run([], {
|
||||
name: 'NO_JSON_TEST',
|
||||
create: true,
|
||||
roles: [Role.ADMIN],
|
||||
json: false,
|
||||
});
|
||||
|
||||
expect(logService.log).toHaveBeenCalledWith('Creating API Key...');
|
||||
expect(logService.log).toHaveBeenCalledWith('test-key');
|
||||
expect(consoleSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should suppress creation message when using JSON', async () => {
|
||||
const mockKey = {
|
||||
id: 'test-id',
|
||||
key: 'test-key',
|
||||
name: 'JSON_SUPPRESS_TEST',
|
||||
roles: [Role.ADMIN],
|
||||
createdAt: new Date().toISOString(),
|
||||
permissions: [],
|
||||
};
|
||||
vi.spyOn(apiKeyService, 'findByField').mockReturnValue(null);
|
||||
vi.spyOn(apiKeyService, 'create').mockResolvedValue(mockKey);
|
||||
|
||||
await command.run([], {
|
||||
name: 'JSON_SUPPRESS_TEST',
|
||||
create: true,
|
||||
roles: [Role.ADMIN],
|
||||
json: true,
|
||||
});
|
||||
|
||||
expect(logService.log).not.toHaveBeenCalledWith('Creating API Key...');
|
||||
expect(consoleSpy).toHaveBeenCalledWith(
|
||||
JSON.stringify({ key: 'test-key', name: 'JSON_SUPPRESS_TEST', id: 'test-id' })
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -10,11 +10,13 @@ import { Permission } from '@app/unraid-api/graph/resolvers/api-key/api-key.mode
|
||||
|
||||
interface KeyOptions {
|
||||
name: string;
|
||||
create: boolean;
|
||||
create?: boolean;
|
||||
delete?: boolean;
|
||||
description?: string;
|
||||
roles?: Role[];
|
||||
permissions?: Permission[];
|
||||
overwrite?: boolean;
|
||||
json?: boolean;
|
||||
}
|
||||
|
||||
@Command({
|
||||
@@ -52,22 +54,15 @@ export class ApiKeyCommand extends CommandRunner {
|
||||
})
|
||||
parseRoles(roles: string): Role[] {
|
||||
if (!roles) return [Role.GUEST];
|
||||
const validRoles: Set<Role> = new Set(Object.values(Role));
|
||||
|
||||
const requestedRoles = roles.split(',').map((role) => role.trim().toLocaleLowerCase() as Role);
|
||||
const validRequestedRoles = requestedRoles.filter((role) => validRoles.has(role));
|
||||
const roleArray = roles.split(',').filter(Boolean);
|
||||
const validRoles = this.apiKeyService.convertRolesStringArrayToRoles(roleArray);
|
||||
|
||||
if (validRequestedRoles.length === 0) {
|
||||
throw new Error(`Invalid roles. Valid options are: ${Array.from(validRoles).join(', ')}`);
|
||||
if (validRoles.length === 0) {
|
||||
throw new Error(`Invalid roles. Valid options are: ${Object.values(Role).join(', ')}`);
|
||||
}
|
||||
|
||||
const invalidRoles = requestedRoles.filter((role) => !validRoles.has(role));
|
||||
|
||||
if (invalidRoles.length > 0) {
|
||||
this.logger.warn(`Ignoring invalid roles: ${invalidRoles.join(', ')}`);
|
||||
}
|
||||
|
||||
return validRequestedRoles;
|
||||
return validRoles;
|
||||
}
|
||||
|
||||
@Option({
|
||||
@@ -98,48 +93,137 @@ ACTIONS: ${Object.values(AuthAction).join(', ')}`,
|
||||
return true;
|
||||
}
|
||||
|
||||
/** Prompt the user to select API keys to delete. Then, delete the selected keys. */
|
||||
private async deleteKeys() {
|
||||
@Option({
|
||||
flags: '--overwrite',
|
||||
description: 'Overwrite existing API key if it exists',
|
||||
})
|
||||
parseOverwrite(): boolean {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Option({
|
||||
flags: '--json',
|
||||
description: 'Output machine-readable JSON format',
|
||||
})
|
||||
parseJson(): boolean {
|
||||
return true;
|
||||
}
|
||||
|
||||
/** Helper to output either JSON or regular log message */
|
||||
private output(message: string, jsonData?: object, jsonOutput?: boolean): void {
|
||||
if (jsonOutput && jsonData) {
|
||||
console.log(JSON.stringify(jsonData));
|
||||
} else {
|
||||
this.logger.log(message);
|
||||
}
|
||||
}
|
||||
|
||||
/** Helper to output either JSON or regular error message */
|
||||
private outputError(message: string, jsonData?: object, jsonOutput?: boolean): void {
|
||||
if (jsonOutput && jsonData) {
|
||||
console.log(JSON.stringify(jsonData));
|
||||
} else {
|
||||
this.logger.error(message);
|
||||
}
|
||||
}
|
||||
|
||||
/** Delete API keys either by name (non-interactive) or by prompting user selection (interactive). */
|
||||
private async deleteKeys(name?: string, jsonOutput?: boolean) {
|
||||
const allKeys = await this.apiKeyService.findAll();
|
||||
if (allKeys.length === 0) {
|
||||
this.logger.log('No API keys found to delete');
|
||||
this.output(
|
||||
'No API keys found to delete',
|
||||
{ deleted: 0, message: 'No API keys found to delete' },
|
||||
jsonOutput
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const answers = await this.inquirerService.prompt<DeleteApiKeyAnswers>(
|
||||
DeleteApiKeyQuestionSet.name,
|
||||
{}
|
||||
);
|
||||
if (!answers.selectedKeys || answers.selectedKeys.length === 0) {
|
||||
this.logger.log('No keys selected for deletion');
|
||||
return;
|
||||
let selectedKeyIds: string[];
|
||||
let deletedKeys: { id: string; name: string }[] = [];
|
||||
|
||||
if (name) {
|
||||
// Non-interactive mode: delete by name
|
||||
const keyToDelete = allKeys.find((key) => key.name === name);
|
||||
if (!keyToDelete) {
|
||||
this.outputError(
|
||||
`No API key found with name: ${name}`,
|
||||
{ deleted: 0, error: `No API key found with name: ${name}` },
|
||||
jsonOutput
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
selectedKeyIds = [keyToDelete.id];
|
||||
deletedKeys = [{ id: keyToDelete.id, name: keyToDelete.name }];
|
||||
} else {
|
||||
// Interactive mode: prompt user to select keys
|
||||
const answers = await this.inquirerService.prompt<DeleteApiKeyAnswers>(
|
||||
DeleteApiKeyQuestionSet.name,
|
||||
{}
|
||||
);
|
||||
if (!answers.selectedKeys || answers.selectedKeys.length === 0) {
|
||||
this.output(
|
||||
'No keys selected for deletion',
|
||||
{ deleted: 0, message: 'No keys selected for deletion' },
|
||||
jsonOutput
|
||||
);
|
||||
return;
|
||||
}
|
||||
selectedKeyIds = answers.selectedKeys;
|
||||
deletedKeys = allKeys
|
||||
.filter((key) => selectedKeyIds.includes(key.id))
|
||||
.map((key) => ({ id: key.id, name: key.name }));
|
||||
}
|
||||
|
||||
try {
|
||||
await this.apiKeyService.deleteApiKeys(answers.selectedKeys);
|
||||
this.logger.log(`Successfully deleted ${answers.selectedKeys.length} API keys`);
|
||||
await this.apiKeyService.deleteApiKeys(selectedKeyIds);
|
||||
const message = `Successfully deleted ${selectedKeyIds.length} API key${selectedKeyIds.length === 1 ? '' : 's'}`;
|
||||
this.output(message, { deleted: selectedKeyIds.length, keys: deletedKeys }, jsonOutput);
|
||||
} catch (error) {
|
||||
this.logger.error(error as any);
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
this.outputError(errorMessage, { deleted: 0, error: errorMessage }, jsonOutput);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
async run(
|
||||
_: string[],
|
||||
options: KeyOptions = { create: false, name: '', delete: false }
|
||||
): Promise<void> {
|
||||
async run(_: string[], options: KeyOptions = { name: '', delete: false }): Promise<void> {
|
||||
try {
|
||||
if (options.delete) {
|
||||
await this.deleteKeys();
|
||||
await this.deleteKeys(options.name, options.json);
|
||||
return;
|
||||
}
|
||||
|
||||
const key = this.apiKeyService.findByField('name', options.name);
|
||||
if (key) {
|
||||
this.logger.log(key.key);
|
||||
} else if (options.create) {
|
||||
options = await this.inquirerService.prompt(AddApiKeyQuestionSet.name, options);
|
||||
this.logger.log('Creating API Key...' + JSON.stringify(options));
|
||||
this.output(key.key, { key: key.key, name: key.name, id: key.id }, options.json);
|
||||
} else if (options.create === true) {
|
||||
// Check if we have minimum required info from flags (name + at least one role or permission)
|
||||
const hasMinimumInfo =
|
||||
options.name &&
|
||||
((options.roles && options.roles.length > 0) ||
|
||||
(options.permissions && options.permissions.length > 0));
|
||||
|
||||
if (!hasMinimumInfo) {
|
||||
// Interactive mode - prompt for missing fields
|
||||
options = await this.inquirerService.prompt(AddApiKeyQuestionSet.name, options);
|
||||
} else {
|
||||
// Non-interactive mode - check if key exists and handle overwrite
|
||||
const existingKey = this.apiKeyService.findByField('name', options.name);
|
||||
if (existingKey && !options.overwrite) {
|
||||
this.outputError(
|
||||
`API key with name '${options.name}' already exists. Use --overwrite to replace it.`,
|
||||
{
|
||||
error: `API key with name '${options.name}' already exists. Use --overwrite to replace it.`,
|
||||
},
|
||||
options.json
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
if (!options.json) {
|
||||
this.logger.log('Creating API Key...');
|
||||
}
|
||||
|
||||
if (!options.roles && !options.permissions) {
|
||||
this.logger.error('Please add at least one role or permission to the key.');
|
||||
@@ -154,10 +238,10 @@ ACTIONS: ${Object.values(AuthAction).join(', ')}`,
|
||||
description: options.description || `CLI generated key: ${options.name}`,
|
||||
roles: options.roles,
|
||||
permissions: options.permissions,
|
||||
overwrite: true,
|
||||
overwrite: options.overwrite ?? false,
|
||||
});
|
||||
|
||||
this.logger.log(key.key);
|
||||
this.output(key.key, { key: key.key, name: key.name, id: key.id }, options.json);
|
||||
} else {
|
||||
this.logger.log('No Key Found');
|
||||
process.exit(1);
|
||||
|
||||
@@ -241,6 +241,8 @@ export type ArrayDisk = Node & {
|
||||
id: Scalars['PrefixedID']['output'];
|
||||
/** Array slot number. Parity1 is always 0 and Parity2 is always 29. Array slots will be 1 - 28. Cache slots are 30 - 53. Flash is 54. */
|
||||
idx: Scalars['Int']['output'];
|
||||
/** Whether the disk is currently spinning */
|
||||
isSpinning?: Maybe<Scalars['Boolean']['output']>;
|
||||
name?: Maybe<Scalars['String']['output']>;
|
||||
/** Number of unrecoverable errors reported by the device I/O drivers. Missing data due to unrecoverable array read errors is filled in on-the-fly using parity reconstruct (and we attempt to write this data back to the sector(s) which failed). Any unrecoverable write error results in disabling the disk. */
|
||||
numErrors?: Maybe<Scalars['BigInt']['output']>;
|
||||
@@ -539,12 +541,16 @@ export type CoreVersions = {
|
||||
/** CPU load for a single core */
|
||||
export type CpuLoad = {
|
||||
__typename?: 'CpuLoad';
|
||||
/** The percentage of time the CPU spent running virtual machines (guest). */
|
||||
percentGuest: Scalars['Float']['output'];
|
||||
/** The percentage of time the CPU was idle. */
|
||||
percentIdle: Scalars['Float']['output'];
|
||||
/** The percentage of time the CPU spent servicing hardware interrupts. */
|
||||
percentIrq: Scalars['Float']['output'];
|
||||
/** The percentage of time the CPU spent on low-priority (niced) user space processes. */
|
||||
percentNice: Scalars['Float']['output'];
|
||||
/** The percentage of CPU time stolen by the hypervisor. */
|
||||
percentSteal: Scalars['Float']['output'];
|
||||
/** The percentage of time the CPU spent in kernel space. */
|
||||
percentSystem: Scalars['Float']['output'];
|
||||
/** The total CPU load on a single core, in percent. */
|
||||
@@ -603,6 +609,8 @@ export type Disk = Node & {
|
||||
id: Scalars['PrefixedID']['output'];
|
||||
/** The interface type of the disk */
|
||||
interfaceType: DiskInterfaceType;
|
||||
/** Whether the disk is spinning or not */
|
||||
isSpinning: Scalars['Boolean']['output'];
|
||||
/** The model name of the disk */
|
||||
name: Scalars['String']['output'];
|
||||
/** The partitions on the disk */
|
||||
@@ -670,6 +678,7 @@ export enum DiskSmartStatus {
|
||||
|
||||
export type Docker = Node & {
|
||||
__typename?: 'Docker';
|
||||
containerUpdateStatuses: Array<ExplicitStatusItem>;
|
||||
containers: Array<DockerContainer>;
|
||||
id: Scalars['PrefixedID']['output'];
|
||||
networks: Array<DockerNetwork>;
|
||||
@@ -695,6 +704,8 @@ export type DockerContainer = Node & {
|
||||
id: Scalars['PrefixedID']['output'];
|
||||
image: Scalars['String']['output'];
|
||||
imageId: Scalars['String']['output'];
|
||||
isRebuildReady?: Maybe<Scalars['Boolean']['output']>;
|
||||
isUpdateAvailable?: Maybe<Scalars['Boolean']['output']>;
|
||||
labels?: Maybe<Scalars['JSON']['output']>;
|
||||
mounts?: Maybe<Array<Scalars['JSON']['output']>>;
|
||||
names: Array<Scalars['String']['output']>;
|
||||
@@ -766,6 +777,12 @@ export type EnableDynamicRemoteAccessInput = {
|
||||
url: AccessUrlInput;
|
||||
};
|
||||
|
||||
export type ExplicitStatusItem = {
|
||||
__typename?: 'ExplicitStatusItem';
|
||||
name: Scalars['String']['output'];
|
||||
updateStatus: UpdateStatus;
|
||||
};
|
||||
|
||||
export type Flash = Node & {
|
||||
__typename?: 'Flash';
|
||||
guid: Scalars['String']['output'];
|
||||
@@ -1221,6 +1238,7 @@ export type Mutation = {
|
||||
rclone: RCloneMutations;
|
||||
/** Reads each notification to recompute & update the overview. */
|
||||
recalculateOverview: NotificationOverview;
|
||||
refreshDockerDigests: Scalars['Boolean']['output'];
|
||||
/** Remove one or more plugins from the API. Returns false if restart was triggered automatically, true if manual restart is required. */
|
||||
removePlugin: Scalars['Boolean']['output'];
|
||||
setDockerFolderChildren: ResolvedOrganizerV1;
|
||||
@@ -1432,6 +1450,14 @@ export type OidcAuthorizationRule = {
|
||||
value: Array<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
export type OidcConfiguration = {
|
||||
__typename?: 'OidcConfiguration';
|
||||
/** Default allowed redirect origins that apply to all OIDC providers (e.g., Tailscale domains) */
|
||||
defaultAllowedOrigins?: Maybe<Array<Scalars['String']['output']>>;
|
||||
/** List of configured OIDC providers */
|
||||
providers: Array<OidcProvider>;
|
||||
};
|
||||
|
||||
export type OidcProvider = {
|
||||
__typename?: 'OidcProvider';
|
||||
/** OAuth2 authorization endpoint URL. If omitted, will be auto-discovered from issuer/.well-known/openid-configuration */
|
||||
@@ -1455,7 +1481,7 @@ export type OidcProvider = {
|
||||
/** The unique identifier for the OIDC provider */
|
||||
id: Scalars['PrefixedID']['output'];
|
||||
/** OIDC issuer URL (e.g., https://accounts.google.com). Required for auto-discovery via /.well-known/openid-configuration */
|
||||
issuer: Scalars['String']['output'];
|
||||
issuer?: Maybe<Scalars['String']['output']>;
|
||||
/** JSON Web Key Set URI for token validation. If omitted, will be auto-discovered from issuer/.well-known/openid-configuration */
|
||||
jwksUri?: Maybe<Scalars['String']['output']>;
|
||||
/** Display name of the OIDC provider */
|
||||
@@ -1654,6 +1680,8 @@ export type Query = {
|
||||
network: Network;
|
||||
/** Get all notifications */
|
||||
notifications: Notifications;
|
||||
/** Get the full OIDC configuration (admin only) */
|
||||
oidcConfiguration: OidcConfiguration;
|
||||
/** Get a specific OIDC provider by ID */
|
||||
oidcProvider?: Maybe<OidcProvider>;
|
||||
/** Get all configured OIDC providers (admin only) */
|
||||
@@ -1933,6 +1961,7 @@ export type Server = Node & {
|
||||
name: Scalars['String']['output'];
|
||||
owner: ProfileModel;
|
||||
remoteurl: Scalars['String']['output'];
|
||||
/** Whether this server is online or offline */
|
||||
status: ServerStatus;
|
||||
wanip: Scalars['String']['output'];
|
||||
};
|
||||
@@ -2245,6 +2274,14 @@ export type UpdateSettingsResponse = {
|
||||
warnings?: Maybe<Array<Scalars['String']['output']>>;
|
||||
};
|
||||
|
||||
/** Update status of a container. */
|
||||
export enum UpdateStatus {
|
||||
REBUILD_READY = 'REBUILD_READY',
|
||||
UNKNOWN = 'UNKNOWN',
|
||||
UPDATE_AVAILABLE = 'UPDATE_AVAILABLE',
|
||||
UP_TO_DATE = 'UP_TO_DATE'
|
||||
}
|
||||
|
||||
export type Uptime = {
|
||||
__typename?: 'Uptime';
|
||||
timestamp?: Maybe<Scalars['String']['output']>;
|
||||
|
||||
76
api/src/unraid-api/cli/pm2.service.spec.ts
Normal file
76
api/src/unraid-api/cli/pm2.service.spec.ts
Normal file
@@ -0,0 +1,76 @@
|
||||
import * as fs from 'node:fs/promises';
|
||||
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
import { PM2Service } from '@app/unraid-api/cli/pm2.service.js';
|
||||
|
||||
vi.mock('node:fs/promises');
|
||||
vi.mock('execa');
|
||||
vi.mock('@app/core/utils/files/file-exists.js', () => ({
|
||||
fileExists: vi.fn().mockResolvedValue(false),
|
||||
}));
|
||||
vi.mock('@app/environment.js', () => ({
|
||||
PATHS_LOGS_DIR: '/var/log/unraid-api',
|
||||
PM2_HOME: '/var/log/.pm2',
|
||||
PM2_PATH: '/path/to/pm2',
|
||||
ECOSYSTEM_PATH: '/path/to/ecosystem.config.json',
|
||||
SUPPRESS_LOGS: false,
|
||||
LOG_LEVEL: 'info',
|
||||
}));
|
||||
|
||||
describe('PM2Service', () => {
|
||||
let pm2Service: PM2Service;
|
||||
let logService: LogService;
|
||||
const mockMkdir = vi.mocked(fs.mkdir);
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
logService = {
|
||||
trace: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
log: vi.fn(),
|
||||
info: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
} as unknown as LogService;
|
||||
pm2Service = new PM2Service(logService);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
describe('ensurePm2Dependencies', () => {
|
||||
it('should create logs directory and log that PM2 will handle its own directory', async () => {
|
||||
mockMkdir.mockResolvedValue(undefined);
|
||||
|
||||
await pm2Service.ensurePm2Dependencies();
|
||||
|
||||
expect(mockMkdir).toHaveBeenCalledWith('/var/log/unraid-api', { recursive: true });
|
||||
expect(mockMkdir).toHaveBeenCalledTimes(1); // Only logs directory, not PM2_HOME
|
||||
expect(logService.trace).toHaveBeenCalledWith(
|
||||
'PM2_HOME will be created at /var/log/.pm2 when PM2 daemon starts'
|
||||
);
|
||||
});
|
||||
|
||||
it('should log error but not throw when logs directory creation fails', async () => {
|
||||
mockMkdir.mockRejectedValue(new Error('Disk full'));
|
||||
|
||||
await expect(pm2Service.ensurePm2Dependencies()).resolves.not.toThrow();
|
||||
|
||||
expect(logService.error).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Failed to fully ensure PM2 dependencies: Disk full')
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle mkdir with recursive flag for nested logs path', async () => {
|
||||
mockMkdir.mockResolvedValue(undefined);
|
||||
|
||||
await pm2Service.ensurePm2Dependencies();
|
||||
|
||||
expect(mockMkdir).toHaveBeenCalledWith('/var/log/unraid-api', { recursive: true });
|
||||
expect(mockMkdir).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -42,8 +42,22 @@ export class PM2Service {
|
||||
|
||||
async run(context: CmdContext, ...args: string[]) {
|
||||
const { tag, raw, ...execOptions } = context;
|
||||
execOptions.extendEnv ??= false;
|
||||
// Default to true to match execa's default behavior
|
||||
execOptions.extendEnv ??= true;
|
||||
execOptions.shell ??= 'bash';
|
||||
|
||||
// Ensure /usr/local/bin is in PATH for Node.js
|
||||
const currentPath = execOptions.env?.PATH || process.env.PATH || '/usr/bin:/bin:/usr/sbin:/sbin';
|
||||
const needsPathUpdate = !currentPath.includes('/usr/local/bin');
|
||||
const finalPath = needsPathUpdate ? `/usr/local/bin:${currentPath}` : currentPath;
|
||||
|
||||
// Always ensure PM2_HOME is set in the environment for every PM2 command
|
||||
execOptions.env = {
|
||||
...execOptions.env,
|
||||
PM2_HOME,
|
||||
...(needsPathUpdate && { PATH: finalPath }),
|
||||
};
|
||||
|
||||
const runCommand = () => execa(PM2_PATH, [...args], execOptions satisfies Options);
|
||||
if (raw) {
|
||||
return runCommand();
|
||||
@@ -100,8 +114,20 @@ export class PM2Service {
|
||||
|
||||
/**
|
||||
* Ensures that the dependencies necessary for PM2 to start and operate are present.
|
||||
* Creates PM2_HOME directory with proper permissions if it doesn't exist.
|
||||
*/
|
||||
async ensurePm2Dependencies() {
|
||||
await mkdir(PATHS_LOGS_DIR, { recursive: true });
|
||||
try {
|
||||
// Create logs directory
|
||||
await mkdir(PATHS_LOGS_DIR, { recursive: true });
|
||||
|
||||
// PM2 automatically creates and manages its home directory when the daemon starts
|
||||
this.logger.trace(`PM2_HOME will be created at ${PM2_HOME} when PM2 daemon starts`);
|
||||
} catch (error) {
|
||||
// Log error but don't throw - let PM2 fail with its own error messages if the setup is incomplete
|
||||
this.logger.error(
|
||||
`Failed to fully ensure PM2 dependencies: ${error instanceof Error ? error.message : error}. PM2 may encounter issues during operation.`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +1,23 @@
|
||||
import { Command, CommandRunner } from 'nest-commander';
|
||||
import { Command, CommandRunner, Option } from 'nest-commander';
|
||||
|
||||
import { ECOSYSTEM_PATH } from '@app/environment.js';
|
||||
import type { LogLevel } from '@app/core/log.js';
|
||||
import { levels } from '@app/core/log.js';
|
||||
import { ECOSYSTEM_PATH, LOG_LEVEL } from '@app/environment.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
import { PM2Service } from '@app/unraid-api/cli/pm2.service.js';
|
||||
|
||||
export interface LogLevelOptions {
|
||||
logLevel?: LogLevel;
|
||||
}
|
||||
|
||||
export function parseLogLevelOption(val: string, allowedLevels: string[] = [...levels]): LogLevel {
|
||||
const normalized = val.toLowerCase() as LogLevel;
|
||||
if (allowedLevels.includes(normalized)) {
|
||||
return normalized;
|
||||
}
|
||||
throw new Error(`Invalid --log-level "${val}". Allowed: ${allowedLevels.join(', ')}`);
|
||||
}
|
||||
|
||||
@Command({ name: 'restart', description: 'Restart the Unraid API' })
|
||||
export class RestartCommand extends CommandRunner {
|
||||
constructor(
|
||||
@@ -13,11 +27,12 @@ export class RestartCommand extends CommandRunner {
|
||||
super();
|
||||
}
|
||||
|
||||
async run(): Promise<void> {
|
||||
async run(_?: string[], options: LogLevelOptions = {}): Promise<void> {
|
||||
try {
|
||||
this.logger.info('Restarting the Unraid API...');
|
||||
const env = { LOG_LEVEL: options.logLevel };
|
||||
const { stderr, stdout } = await this.pm2.run(
|
||||
{ tag: 'PM2 Restart', raw: true },
|
||||
{ tag: 'PM2 Restart', raw: true, extendEnv: true, env },
|
||||
'restart',
|
||||
ECOSYSTEM_PATH,
|
||||
'--update-env'
|
||||
@@ -40,4 +55,13 @@ export class RestartCommand extends CommandRunner {
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
@Option({
|
||||
flags: `--log-level <${levels.join('|')}>`,
|
||||
description: 'log level to use',
|
||||
defaultValue: LOG_LEVEL.toLowerCase(),
|
||||
})
|
||||
parseLogLevel(val: string): LogLevel {
|
||||
return parseLogLevelOption(val);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +1,12 @@
|
||||
import { Command, CommandRunner, Option } from 'nest-commander';
|
||||
|
||||
import type { LogLevel } from '@app/core/log.js';
|
||||
import type { LogLevelOptions } from '@app/unraid-api/cli/restart.command.js';
|
||||
import { levels } from '@app/core/log.js';
|
||||
import { ECOSYSTEM_PATH } from '@app/environment.js';
|
||||
import { ECOSYSTEM_PATH, LOG_LEVEL } from '@app/environment.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
import { PM2Service } from '@app/unraid-api/cli/pm2.service.js';
|
||||
|
||||
interface StartCommandOptions {
|
||||
'log-level'?: string;
|
||||
}
|
||||
import { parseLogLevelOption } from '@app/unraid-api/cli/restart.command.js';
|
||||
|
||||
@Command({ name: 'start', description: 'Start the Unraid API' })
|
||||
export class StartCommand extends CommandRunner {
|
||||
@@ -27,17 +25,12 @@ export class StartCommand extends CommandRunner {
|
||||
await this.pm2.run({ tag: 'PM2 Delete' }, 'delete', ECOSYSTEM_PATH);
|
||||
}
|
||||
|
||||
async run(_: string[], options: StartCommandOptions): Promise<void> {
|
||||
async run(_: string[], options: LogLevelOptions): Promise<void> {
|
||||
this.logger.info('Starting the Unraid API');
|
||||
await this.cleanupPM2State();
|
||||
|
||||
const env: Record<string, string> = {};
|
||||
if (options['log-level']) {
|
||||
env.LOG_LEVEL = options['log-level'];
|
||||
}
|
||||
|
||||
const env = { LOG_LEVEL: options.logLevel };
|
||||
const { stderr, stdout } = await this.pm2.run(
|
||||
{ tag: 'PM2 Start', env, raw: true },
|
||||
{ tag: 'PM2 Start', raw: true, extendEnv: true, env },
|
||||
'start',
|
||||
ECOSYSTEM_PATH,
|
||||
'--update-env'
|
||||
@@ -54,9 +47,9 @@ export class StartCommand extends CommandRunner {
|
||||
@Option({
|
||||
flags: `--log-level <${levels.join('|')}>`,
|
||||
description: 'log level to use',
|
||||
defaultValue: 'info',
|
||||
defaultValue: LOG_LEVEL.toLowerCase(),
|
||||
})
|
||||
parseLogLevel(val: string): LogLevel {
|
||||
return levels.includes(val as LogLevel) ? (val as LogLevel) : 'info';
|
||||
return parseLogLevelOption(val);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +1,37 @@
|
||||
import { Command, CommandRunner } from 'nest-commander';
|
||||
import { Command, CommandRunner, Option } from 'nest-commander';
|
||||
|
||||
import { API_VERSION } from '@app/environment.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
|
||||
@Command({ name: 'version' })
|
||||
interface VersionOptions {
|
||||
json?: boolean;
|
||||
}
|
||||
|
||||
@Command({ name: 'version', description: 'Display API version information' })
|
||||
export class VersionCommand extends CommandRunner {
|
||||
constructor(private readonly logger: LogService) {
|
||||
super();
|
||||
}
|
||||
async run(): Promise<void> {
|
||||
this.logger.info(`Unraid API v${API_VERSION}`);
|
||||
|
||||
@Option({
|
||||
flags: '-j, --json',
|
||||
description: 'Output version information as JSON',
|
||||
})
|
||||
parseJson(): boolean {
|
||||
return true;
|
||||
}
|
||||
|
||||
async run(passedParam: string[], options?: VersionOptions): Promise<void> {
|
||||
if (options?.json) {
|
||||
const [baseVersion, buildInfo] = API_VERSION.split('+');
|
||||
const versionInfo = {
|
||||
version: baseVersion || API_VERSION,
|
||||
build: buildInfo || undefined,
|
||||
combined: API_VERSION,
|
||||
};
|
||||
console.log(JSON.stringify(versionInfo));
|
||||
} else {
|
||||
this.logger.info(`Unraid API v${API_VERSION}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
import { ScheduleModule } from '@nestjs/schedule';
|
||||
|
||||
import { JobModule } from '@app/unraid-api/cron/job.module.js';
|
||||
import { LogRotateService } from '@app/unraid-api/cron/log-rotate.service.js';
|
||||
import { WriteFlashFileService } from '@app/unraid-api/cron/write-flash-file.service.js';
|
||||
|
||||
@Module({
|
||||
imports: [],
|
||||
imports: [JobModule],
|
||||
providers: [WriteFlashFileService, LogRotateService],
|
||||
})
|
||||
export class CronModule {}
|
||||
|
||||
13
api/src/unraid-api/cron/job.module.ts
Normal file
13
api/src/unraid-api/cron/job.module.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
import { ScheduleModule } from '@nestjs/schedule';
|
||||
|
||||
/**
|
||||
* Sets up common dependencies for initializing jobs (e.g. scheduler registry, cron jobs).
|
||||
*
|
||||
* Simplifies testing setup & application dependency tree by ensuring `forRoot` is called only once.
|
||||
*/
|
||||
@Module({
|
||||
imports: [ScheduleModule.forRoot()],
|
||||
exports: [ScheduleModule],
|
||||
})
|
||||
export class JobModule {}
|
||||
172
api/src/unraid-api/decorators/omit-if.decorator.spec.ts
Normal file
172
api/src/unraid-api/decorators/omit-if.decorator.spec.ts
Normal file
@@ -0,0 +1,172 @@
|
||||
import { Reflector } from '@nestjs/core';
|
||||
import { Field, Mutation, ObjectType, Query, ResolveField, Resolver } from '@nestjs/graphql';
|
||||
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { OMIT_IF_METADATA_KEY, OmitIf } from '@app/unraid-api/decorators/omit-if.decorator.js';
|
||||
|
||||
describe('OmitIf Decorator', () => {
|
||||
let reflector: Reflector;
|
||||
|
||||
beforeEach(() => {
|
||||
reflector = new Reflector();
|
||||
});
|
||||
|
||||
describe('OmitIf', () => {
|
||||
it('should set metadata when condition is true', () => {
|
||||
class TestResolver {
|
||||
@OmitIf(true)
|
||||
testMethod() {
|
||||
return 'test';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
const metadata = reflector.get(OMIT_IF_METADATA_KEY, instance.testMethod);
|
||||
expect(metadata).toBe(true);
|
||||
});
|
||||
|
||||
it('should not set metadata when condition is false', () => {
|
||||
class TestResolver {
|
||||
@OmitIf(false)
|
||||
testMethod() {
|
||||
return 'test';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
const metadata = reflector.get(OMIT_IF_METADATA_KEY, instance.testMethod);
|
||||
expect(metadata).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should evaluate function conditions', () => {
|
||||
const mockCondition = vi.fn(() => true);
|
||||
|
||||
class TestResolver {
|
||||
@OmitIf(mockCondition)
|
||||
testMethod() {
|
||||
return 'test';
|
||||
}
|
||||
}
|
||||
|
||||
expect(mockCondition).toHaveBeenCalledOnce();
|
||||
const instance = new TestResolver();
|
||||
const metadata = reflector.get(OMIT_IF_METADATA_KEY, instance.testMethod);
|
||||
expect(metadata).toBe(true);
|
||||
});
|
||||
|
||||
it('should evaluate function conditions that return false', () => {
|
||||
const mockCondition = vi.fn(() => false);
|
||||
|
||||
class TestResolver {
|
||||
@OmitIf(mockCondition)
|
||||
testMethod() {
|
||||
return 'test';
|
||||
}
|
||||
}
|
||||
|
||||
expect(mockCondition).toHaveBeenCalledOnce();
|
||||
const instance = new TestResolver();
|
||||
const metadata = reflector.get(OMIT_IF_METADATA_KEY, instance.testMethod);
|
||||
expect(metadata).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should work with environment variables', () => {
|
||||
const originalEnv = process.env.NODE_ENV;
|
||||
process.env.NODE_ENV = 'production';
|
||||
|
||||
class TestResolver {
|
||||
@OmitIf(process.env.NODE_ENV === 'production')
|
||||
testMethod() {
|
||||
return 'test';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
const metadata = reflector.get(OMIT_IF_METADATA_KEY, instance.testMethod);
|
||||
expect(metadata).toBe(true);
|
||||
|
||||
process.env.NODE_ENV = originalEnv;
|
||||
});
|
||||
});
|
||||
|
||||
describe('Integration with NestJS GraphQL decorators', () => {
|
||||
it('should work with @Query decorator', () => {
|
||||
@Resolver()
|
||||
class TestResolver {
|
||||
@OmitIf(true)
|
||||
@Query(() => String)
|
||||
omittedQuery() {
|
||||
return 'test';
|
||||
}
|
||||
|
||||
@OmitIf(false)
|
||||
@Query(() => String)
|
||||
includedQuery() {
|
||||
return 'test';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
const omittedMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.omittedQuery);
|
||||
const includedMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.includedQuery);
|
||||
|
||||
expect(omittedMetadata).toBe(true);
|
||||
expect(includedMetadata).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should work with @Mutation decorator', () => {
|
||||
@Resolver()
|
||||
class TestResolver {
|
||||
@OmitIf(true)
|
||||
@Mutation(() => String)
|
||||
omittedMutation() {
|
||||
return 'test';
|
||||
}
|
||||
|
||||
@OmitIf(false)
|
||||
@Mutation(() => String)
|
||||
includedMutation() {
|
||||
return 'test';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
const omittedMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.omittedMutation);
|
||||
const includedMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.includedMutation);
|
||||
|
||||
expect(omittedMetadata).toBe(true);
|
||||
expect(includedMetadata).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should work with @ResolveField decorator', () => {
|
||||
@ObjectType()
|
||||
class TestType {
|
||||
@Field()
|
||||
id: string = '';
|
||||
}
|
||||
|
||||
@Resolver(() => TestType)
|
||||
class TestResolver {
|
||||
@OmitIf(true)
|
||||
@ResolveField(() => String)
|
||||
omittedField() {
|
||||
return 'test';
|
||||
}
|
||||
|
||||
@OmitIf(false)
|
||||
@ResolveField(() => String)
|
||||
includedField() {
|
||||
return 'test';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
const omittedMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.omittedField);
|
||||
const includedMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.includedField);
|
||||
|
||||
expect(omittedMetadata).toBe(true);
|
||||
expect(includedMetadata).toBeUndefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
80
api/src/unraid-api/decorators/omit-if.decorator.ts
Normal file
80
api/src/unraid-api/decorators/omit-if.decorator.ts
Normal file
@@ -0,0 +1,80 @@
|
||||
import { SetMetadata } from '@nestjs/common';
|
||||
import { Extensions } from '@nestjs/graphql';
|
||||
|
||||
import { MapperKind, mapSchema } from '@graphql-tools/utils';
|
||||
import { GraphQLFieldConfig, GraphQLSchema } from 'graphql';
|
||||
|
||||
export const OMIT_IF_METADATA_KEY = 'omitIf';
|
||||
|
||||
/**
|
||||
* Decorator that conditionally omits a GraphQL field/query/mutation based on a condition.
|
||||
* The field will only be omitted from the schema when the condition evaluates to true.
|
||||
*
|
||||
* @param condition - If the condition evaluates to true, the field will be omitted from the schema
|
||||
* @returns A decorator that wraps the target field/query/mutation
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* @OmitIf(process.env.NODE_ENV === 'production')
|
||||
* @Query(() => String)
|
||||
* async debugQuery() {
|
||||
* return 'This query is omitted in production';
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
export function OmitIf(condition: boolean | (() => boolean)): MethodDecorator & PropertyDecorator {
|
||||
const shouldOmit = typeof condition === 'function' ? condition() : condition;
|
||||
|
||||
return (target: object, propertyKey?: string | symbol, descriptor?: PropertyDescriptor) => {
|
||||
if (shouldOmit) {
|
||||
SetMetadata(OMIT_IF_METADATA_KEY, true)(
|
||||
target,
|
||||
propertyKey as string,
|
||||
descriptor as PropertyDescriptor
|
||||
);
|
||||
Extensions({ omitIf: true })(
|
||||
target,
|
||||
propertyKey as string,
|
||||
descriptor as PropertyDescriptor
|
||||
);
|
||||
}
|
||||
|
||||
return descriptor;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Schema transformer that omits fields/queries/mutations based on the OmitIf decorator.
|
||||
* @param schema - The GraphQL schema to transform
|
||||
* @returns The transformed GraphQL schema
|
||||
*/
|
||||
export function omitIfSchemaTransformer(schema: GraphQLSchema): GraphQLSchema {
|
||||
return mapSchema(schema, {
|
||||
[MapperKind.OBJECT_FIELD]: (
|
||||
fieldConfig: GraphQLFieldConfig<any, any>,
|
||||
fieldName: string,
|
||||
typeName: string
|
||||
) => {
|
||||
const extensions = fieldConfig.extensions || {};
|
||||
|
||||
if (extensions.omitIf === true) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return fieldConfig;
|
||||
},
|
||||
[MapperKind.ROOT_FIELD]: (
|
||||
fieldConfig: GraphQLFieldConfig<any, any>,
|
||||
fieldName: string,
|
||||
typeName: string
|
||||
) => {
|
||||
const extensions = fieldConfig.extensions || {};
|
||||
|
||||
if (extensions.omitIf === true) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return fieldConfig;
|
||||
},
|
||||
});
|
||||
}
|
||||
317
api/src/unraid-api/decorators/use-feature-flag.decorator.spec.ts
Normal file
317
api/src/unraid-api/decorators/use-feature-flag.decorator.spec.ts
Normal file
@@ -0,0 +1,317 @@
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-nocheck
|
||||
// fixme: types don't sync with mocks, and there's no override to simplify testing.
|
||||
|
||||
import { Reflector } from '@nestjs/core';
|
||||
import { Mutation, Query, ResolveField, Resolver } from '@nestjs/graphql';
|
||||
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { OMIT_IF_METADATA_KEY } from '@app/unraid-api/decorators/omit-if.decorator.js';
|
||||
import { UseFeatureFlag } from '@app/unraid-api/decorators/use-feature-flag.decorator.js';
|
||||
|
||||
// Mock the FeatureFlags
|
||||
vi.mock('@app/consts.js', () => ({
|
||||
FeatureFlags: Object.freeze({
|
||||
ENABLE_NEXT_DOCKER_RELEASE: false,
|
||||
ENABLE_EXPERIMENTAL_FEATURE: true,
|
||||
ENABLE_DEBUG_MODE: false,
|
||||
ENABLE_BETA_FEATURES: true,
|
||||
}),
|
||||
}));
|
||||
|
||||
describe('UseFeatureFlag Decorator', () => {
|
||||
let reflector: Reflector;
|
||||
|
||||
beforeEach(() => {
|
||||
reflector = new Reflector();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('Basic functionality', () => {
|
||||
it('should omit field when feature flag is false', () => {
|
||||
@Resolver()
|
||||
class TestResolver {
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@Query(() => String)
|
||||
testQuery() {
|
||||
return 'test';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
const metadata = reflector.get(OMIT_IF_METADATA_KEY, instance.testQuery);
|
||||
expect(metadata).toBe(true); // Should be omitted because flag is false
|
||||
});
|
||||
|
||||
it('should include field when feature flag is true', () => {
|
||||
@Resolver()
|
||||
class TestResolver {
|
||||
@UseFeatureFlag('ENABLE_EXPERIMENTAL_FEATURE')
|
||||
@Query(() => String)
|
||||
testQuery() {
|
||||
return 'test';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
const metadata = reflector.get(OMIT_IF_METADATA_KEY, instance.testQuery);
|
||||
expect(metadata).toBeUndefined(); // Should not be omitted because flag is true
|
||||
});
|
||||
});
|
||||
|
||||
describe('With different decorator types', () => {
|
||||
it('should work with @Query decorator', () => {
|
||||
@Resolver()
|
||||
class TestResolver {
|
||||
@UseFeatureFlag('ENABLE_DEBUG_MODE')
|
||||
@Query(() => String)
|
||||
debugQuery() {
|
||||
return 'debug';
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_BETA_FEATURES')
|
||||
@Query(() => String)
|
||||
betaQuery() {
|
||||
return 'beta';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
const debugMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.debugQuery);
|
||||
const betaMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.betaQuery);
|
||||
|
||||
expect(debugMetadata).toBe(true); // ENABLE_DEBUG_MODE is false
|
||||
expect(betaMetadata).toBeUndefined(); // ENABLE_BETA_FEATURES is true
|
||||
});
|
||||
|
||||
it('should work with @Mutation decorator', () => {
|
||||
@Resolver()
|
||||
class TestResolver {
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@Mutation(() => String)
|
||||
dockerMutation() {
|
||||
return 'docker';
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_EXPERIMENTAL_FEATURE')
|
||||
@Mutation(() => String)
|
||||
experimentalMutation() {
|
||||
return 'experimental';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
const dockerMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.dockerMutation);
|
||||
const experimentalMetadata = reflector.get(
|
||||
OMIT_IF_METADATA_KEY,
|
||||
instance.experimentalMutation
|
||||
);
|
||||
|
||||
expect(dockerMetadata).toBe(true); // ENABLE_NEXT_DOCKER_RELEASE is false
|
||||
expect(experimentalMetadata).toBeUndefined(); // ENABLE_EXPERIMENTAL_FEATURE is true
|
||||
});
|
||||
|
||||
it('should work with @ResolveField decorator', () => {
|
||||
@Resolver()
|
||||
class TestResolver {
|
||||
@UseFeatureFlag('ENABLE_DEBUG_MODE')
|
||||
@ResolveField(() => String)
|
||||
debugField() {
|
||||
return 'debug';
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_BETA_FEATURES')
|
||||
@ResolveField(() => String)
|
||||
betaField() {
|
||||
return 'beta';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
const debugMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.debugField);
|
||||
const betaMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.betaField);
|
||||
|
||||
expect(debugMetadata).toBe(true); // ENABLE_DEBUG_MODE is false
|
||||
expect(betaMetadata).toBeUndefined(); // ENABLE_BETA_FEATURES is true
|
||||
});
|
||||
});
|
||||
|
||||
describe('Multiple decorators on same class', () => {
|
||||
it('should handle multiple feature flags independently', () => {
|
||||
@Resolver()
|
||||
class TestResolver {
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@Query(() => String)
|
||||
dockerQuery() {
|
||||
return 'docker';
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_EXPERIMENTAL_FEATURE')
|
||||
@Query(() => String)
|
||||
experimentalQuery() {
|
||||
return 'experimental';
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_DEBUG_MODE')
|
||||
@Query(() => String)
|
||||
debugQuery() {
|
||||
return 'debug';
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_BETA_FEATURES')
|
||||
@Query(() => String)
|
||||
betaQuery() {
|
||||
return 'beta';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
|
||||
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.dockerQuery)).toBe(true);
|
||||
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.experimentalQuery)).toBeUndefined();
|
||||
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.debugQuery)).toBe(true);
|
||||
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.betaQuery)).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Type safety', () => {
|
||||
it('should only accept valid feature flag keys', () => {
|
||||
// This test verifies TypeScript compile-time type safety
|
||||
// The following would cause a TypeScript error if uncommented:
|
||||
// @UseFeatureFlag('INVALID_FLAG')
|
||||
|
||||
@Resolver()
|
||||
class TestResolver {
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@Query(() => String)
|
||||
validQuery() {
|
||||
return 'valid';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
expect(instance.validQuery).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Integration scenarios', () => {
|
||||
it('should work correctly with other decorators', () => {
|
||||
const customDecorator = (
|
||||
target: any,
|
||||
propertyKey: string | symbol,
|
||||
descriptor: PropertyDescriptor
|
||||
) => {
|
||||
Reflect.defineMetadata('custom', true, target, propertyKey);
|
||||
return descriptor;
|
||||
};
|
||||
|
||||
@Resolver()
|
||||
class TestResolver {
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@customDecorator
|
||||
@Query(() => String)
|
||||
multiDecoratorQuery() {
|
||||
return 'multi';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
const omitMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.multiDecoratorQuery);
|
||||
const customMetadata = Reflect.getMetadata('custom', instance, 'multiDecoratorQuery');
|
||||
|
||||
expect(omitMetadata).toBe(true);
|
||||
expect(customMetadata).toBe(true);
|
||||
});
|
||||
|
||||
it('should maintain correct decorator order', () => {
|
||||
const orderTracker: string[] = [];
|
||||
|
||||
const trackingDecorator = (name: string) => {
|
||||
return (target: any, propertyKey: string | symbol, descriptor: PropertyDescriptor) => {
|
||||
orderTracker.push(name);
|
||||
return descriptor;
|
||||
};
|
||||
};
|
||||
|
||||
@Resolver()
|
||||
class TestResolver {
|
||||
@trackingDecorator('first')
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@trackingDecorator('last')
|
||||
@Query(() => String)
|
||||
orderedQuery() {
|
||||
return 'ordered';
|
||||
}
|
||||
}
|
||||
|
||||
// Decorators are applied bottom-up
|
||||
expect(orderTracker).toEqual(['last', 'first']);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Real-world usage patterns', () => {
|
||||
it('should work with Docker resolver pattern', () => {
|
||||
@Resolver()
|
||||
class DockerResolver {
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@Mutation(() => String)
|
||||
async createDockerFolder(name: string) {
|
||||
return `Created folder: ${name}`;
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@Mutation(() => String)
|
||||
async deleteDockerEntries(entryIds: string[]) {
|
||||
return `Deleted entries: ${entryIds.join(', ')}`;
|
||||
}
|
||||
|
||||
@Query(() => String)
|
||||
async getDockerInfo() {
|
||||
return 'Docker info';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new DockerResolver();
|
||||
|
||||
// Feature flag is false, so these should be omitted
|
||||
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.createDockerFolder)).toBe(true);
|
||||
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.deleteDockerEntries)).toBe(true);
|
||||
|
||||
// No feature flag, so this should not be omitted
|
||||
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.getDockerInfo)).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should handle mixed feature flags in same resolver', () => {
|
||||
@Resolver()
|
||||
class MixedResolver {
|
||||
@UseFeatureFlag('ENABLE_EXPERIMENTAL_FEATURE')
|
||||
@Query(() => String)
|
||||
experimentalQuery() {
|
||||
return 'experimental';
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@Query(() => String)
|
||||
dockerQuery() {
|
||||
return 'docker';
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_BETA_FEATURES')
|
||||
@Mutation(() => String)
|
||||
betaMutation() {
|
||||
return 'beta';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new MixedResolver();
|
||||
|
||||
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.experimentalQuery)).toBeUndefined();
|
||||
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.dockerQuery)).toBe(true);
|
||||
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.betaMutation)).toBeUndefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
22
api/src/unraid-api/decorators/use-feature-flag.decorator.ts
Normal file
22
api/src/unraid-api/decorators/use-feature-flag.decorator.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
import { FeatureFlags } from '@app/consts.js';
|
||||
import { OmitIf } from '@app/unraid-api/decorators/omit-if.decorator.js';
|
||||
|
||||
/**
|
||||
* Decorator that conditionally includes a GraphQL field/query/mutation based on a feature flag.
|
||||
* The field will only be included in the schema when the feature flag is enabled.
|
||||
*
|
||||
* @param flagKey - The key of the feature flag in FeatureFlags
|
||||
* @returns A decorator that wraps OmitIf
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* @UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
* @Mutation(() => String)
|
||||
* async experimentalMutation() {
|
||||
* return 'This mutation is only available when ENABLE_NEXT_DOCKER_RELEASE is true';
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
export function UseFeatureFlag(flagKey: keyof typeof FeatureFlags): MethodDecorator & PropertyDecorator {
|
||||
return OmitIf(!FeatureFlags[flagKey]);
|
||||
}
|
||||
@@ -12,6 +12,7 @@ import { NoUnusedVariablesRule } from 'graphql';
|
||||
|
||||
import { ENVIRONMENT } from '@app/environment.js';
|
||||
import { ApiConfigModule } from '@app/unraid-api/config/api-config.module.js';
|
||||
import { omitIfSchemaTransformer } from '@app/unraid-api/decorators/omit-if.decorator.js';
|
||||
|
||||
// Import enum registrations to ensure they're registered with GraphQL
|
||||
import '@app/unraid-api/graph/auth/auth-action.enum.js';
|
||||
@@ -64,7 +65,12 @@ import { PluginModule } from '@app/unraid-api/plugin/plugin.module.js';
|
||||
},
|
||||
// Only add transform when not in test environment to avoid GraphQL version conflicts
|
||||
transformSchema:
|
||||
process.env.NODE_ENV === 'test' ? undefined : usePermissionsSchemaTransformer,
|
||||
process.env.NODE_ENV === 'test'
|
||||
? undefined
|
||||
: (schema) => {
|
||||
const schemaWithPermissions = usePermissionsSchemaTransformer(schema);
|
||||
return omitIfSchemaTransformer(schemaWithPermissions);
|
||||
},
|
||||
validationRules: [NoUnusedVariablesRule],
|
||||
};
|
||||
},
|
||||
|
||||
@@ -126,6 +126,9 @@ export class ArrayDisk extends Node {
|
||||
|
||||
@Field(() => ArrayDiskFsColor, { nullable: true })
|
||||
color?: ArrayDiskFsColor | null;
|
||||
|
||||
@Field(() => Boolean, { nullable: true, description: 'Whether the disk is currently spinning' })
|
||||
isSpinning?: boolean | null;
|
||||
}
|
||||
|
||||
@ObjectType({
|
||||
|
||||
@@ -3,7 +3,15 @@ import { Field, ObjectType, registerEnumType } from '@nestjs/graphql';
|
||||
import { Node } from '@unraid/shared/graphql.model.js';
|
||||
import { PrefixedID } from '@unraid/shared/prefixed-id-scalar.js';
|
||||
import { Type } from 'class-transformer';
|
||||
import { IsArray, IsEnum, IsNumber, IsOptional, IsString, ValidateNested } from 'class-validator';
|
||||
import {
|
||||
IsArray,
|
||||
IsBoolean,
|
||||
IsEnum,
|
||||
IsNumber,
|
||||
IsOptional,
|
||||
IsString,
|
||||
ValidateNested,
|
||||
} from 'class-validator';
|
||||
|
||||
export enum DiskFsType {
|
||||
XFS = 'XFS',
|
||||
@@ -136,4 +144,8 @@ export class Disk extends Node {
|
||||
@ValidateNested({ each: true })
|
||||
@Type(() => DiskPartition)
|
||||
partitions!: DiskPartition[];
|
||||
|
||||
@Field(() => Boolean, { description: 'Whether the disk is spinning or not' })
|
||||
@IsBoolean()
|
||||
isSpinning!: boolean;
|
||||
}
|
||||
|
||||
@@ -66,6 +66,7 @@ describe('DisksResolver', () => {
|
||||
smartStatus: DiskSmartStatus.OK,
|
||||
temperature: -1,
|
||||
partitions: [],
|
||||
isSpinning: false,
|
||||
},
|
||||
];
|
||||
mockDisksService.getDisks.mockResolvedValue(mockResult);
|
||||
@@ -92,6 +93,7 @@ describe('DisksResolver', () => {
|
||||
const mockDisk: Disk = {
|
||||
id: 'SERIAL123',
|
||||
device: '/dev/sda',
|
||||
isSpinning: false,
|
||||
type: 'SSD',
|
||||
name: 'Samsung SSD 860 EVO 1TB',
|
||||
vendor: 'Samsung',
|
||||
|
||||
@@ -33,4 +33,9 @@ export class DisksResolver {
|
||||
public async temperature(@Parent() disk: Disk) {
|
||||
return this.disksService.getTemperature(disk.device);
|
||||
}
|
||||
|
||||
@ResolveField(() => Boolean)
|
||||
public async isSpinning(@Parent() disk: Disk) {
|
||||
return disk.isSpinning;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,17 @@
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
|
||||
import type { Systeminformation } from 'systeminformation';
|
||||
import { execa } from 'execa';
|
||||
import { blockDevices, diskLayout } from 'systeminformation';
|
||||
// Vitest imports
|
||||
import { beforeEach, describe, expect, it, Mock, MockedFunction, vi } from 'vitest';
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import {
|
||||
ArrayDisk,
|
||||
ArrayDiskStatus,
|
||||
ArrayDiskType,
|
||||
} from '@app/unraid-api/graph/resolvers/array/array.model.js';
|
||||
import {
|
||||
Disk,
|
||||
DiskFsType,
|
||||
@@ -33,6 +39,86 @@ const mockBatchProcess = batchProcess as any;
|
||||
|
||||
describe('DisksService', () => {
|
||||
let service: DisksService;
|
||||
let configService: ConfigService;
|
||||
|
||||
// Mock ArrayDisk data from state
|
||||
const mockArrayDisks: ArrayDisk[] = [
|
||||
{
|
||||
id: 'S4ENNF0N123456',
|
||||
device: 'sda',
|
||||
name: 'cache',
|
||||
size: 512110190592,
|
||||
idx: 30,
|
||||
type: ArrayDiskType.CACHE,
|
||||
status: ArrayDiskStatus.DISK_OK,
|
||||
isSpinning: null, // NVMe/SSD doesn't spin
|
||||
rotational: false,
|
||||
exportable: false,
|
||||
numErrors: 0,
|
||||
numReads: 1000,
|
||||
numWrites: 2000,
|
||||
temp: 42,
|
||||
comment: 'NVMe Cache',
|
||||
format: 'GPT: 4KiB-aligned',
|
||||
fsType: 'btrfs',
|
||||
transport: 'nvme',
|
||||
warning: null,
|
||||
critical: null,
|
||||
fsFree: null,
|
||||
fsSize: null,
|
||||
fsUsed: null,
|
||||
},
|
||||
{
|
||||
id: 'WD-WCC7K7YL9876',
|
||||
device: 'sdb',
|
||||
name: 'disk1',
|
||||
size: 4000787030016,
|
||||
idx: 1,
|
||||
type: ArrayDiskType.DATA,
|
||||
status: ArrayDiskStatus.DISK_OK,
|
||||
isSpinning: true, // Currently spinning
|
||||
rotational: true,
|
||||
exportable: false,
|
||||
numErrors: 0,
|
||||
numReads: 5000,
|
||||
numWrites: 3000,
|
||||
temp: 35,
|
||||
comment: 'Data Disk 1',
|
||||
format: 'GPT: 4KiB-aligned',
|
||||
fsType: 'xfs',
|
||||
transport: 'sata',
|
||||
warning: null,
|
||||
critical: null,
|
||||
fsFree: 1000000000,
|
||||
fsSize: 4000000000,
|
||||
fsUsed: 3000000000,
|
||||
},
|
||||
{
|
||||
id: 'WD-SPUNDOWN123',
|
||||
device: 'sdd',
|
||||
name: 'disk2',
|
||||
size: 4000787030016,
|
||||
idx: 2,
|
||||
type: ArrayDiskType.DATA,
|
||||
status: ArrayDiskStatus.DISK_OK,
|
||||
isSpinning: false, // Spun down
|
||||
rotational: true,
|
||||
exportable: false,
|
||||
numErrors: 0,
|
||||
numReads: 3000,
|
||||
numWrites: 1000,
|
||||
temp: 30,
|
||||
comment: 'Data Disk 2 (spun down)',
|
||||
format: 'GPT: 4KiB-aligned',
|
||||
fsType: 'xfs',
|
||||
transport: 'sata',
|
||||
warning: null,
|
||||
critical: null,
|
||||
fsFree: 2000000000,
|
||||
fsSize: 4000000000,
|
||||
fsUsed: 2000000000,
|
||||
},
|
||||
];
|
||||
|
||||
const mockDiskLayoutData: Systeminformation.DiskLayoutData[] = [
|
||||
{
|
||||
@@ -92,6 +178,25 @@ describe('DisksService', () => {
|
||||
smartStatus: 'unknown', // Simulate unknown status
|
||||
temperature: null,
|
||||
},
|
||||
{
|
||||
device: '/dev/sdd',
|
||||
type: 'HD',
|
||||
name: 'WD Spun Down',
|
||||
vendor: 'Western Digital',
|
||||
size: 4000787030016,
|
||||
bytesPerSector: 512,
|
||||
totalCylinders: 486401,
|
||||
totalHeads: 255,
|
||||
totalSectors: 7814037168,
|
||||
totalTracks: 124032255,
|
||||
tracksPerCylinder: 255,
|
||||
sectorsPerTrack: 63,
|
||||
firmwareRevision: '82.00A82',
|
||||
serialNum: 'WD-SPUNDOWN123',
|
||||
interfaceType: 'SATA',
|
||||
smartStatus: 'Ok',
|
||||
temperature: null,
|
||||
},
|
||||
];
|
||||
|
||||
const mockBlockDeviceData: Systeminformation.BlockDevicesData[] = [
|
||||
@@ -174,17 +279,50 @@ describe('DisksService', () => {
|
||||
protocol: 'SATA', // Assume SATA even if interface type unknown for disk
|
||||
identifier: '/dev/sdc1',
|
||||
},
|
||||
// Partition for sdd
|
||||
{
|
||||
name: 'sdd1',
|
||||
type: 'part',
|
||||
fsType: 'xfs',
|
||||
mount: '/mnt/disk2',
|
||||
size: 4000787030016,
|
||||
physical: 'HDD',
|
||||
uuid: 'UUID-SDD1',
|
||||
label: 'Data2',
|
||||
model: 'WD Spun Down',
|
||||
serial: 'WD-SPUNDOWN123',
|
||||
removable: false,
|
||||
protocol: 'SATA',
|
||||
identifier: '/dev/sdd1',
|
||||
},
|
||||
];
|
||||
|
||||
beforeEach(async () => {
|
||||
// Reset mocks before each test using vi
|
||||
vi.clearAllMocks();
|
||||
|
||||
// Create mock ConfigService
|
||||
const mockConfigService = {
|
||||
get: vi.fn().mockImplementation((key: string, defaultValue?: any) => {
|
||||
if (key === 'store.emhttp.disks') {
|
||||
return mockArrayDisks;
|
||||
}
|
||||
return defaultValue;
|
||||
}),
|
||||
};
|
||||
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [DisksService],
|
||||
providers: [
|
||||
DisksService,
|
||||
{
|
||||
provide: ConfigService,
|
||||
useValue: mockConfigService,
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
service = module.get<DisksService>(DisksService);
|
||||
configService = module.get<ConfigService>(ConfigService);
|
||||
|
||||
// Setup default mock implementations
|
||||
mockDiskLayout.mockResolvedValue(mockDiskLayoutData);
|
||||
@@ -207,46 +345,112 @@ describe('DisksService', () => {
|
||||
// --- Test getDisks ---
|
||||
|
||||
describe('getDisks', () => {
|
||||
it('should return disks without temperature', async () => {
|
||||
it('should return disks with spinning state from store', async () => {
|
||||
const disks = await service.getDisks();
|
||||
|
||||
expect(mockDiskLayout).toHaveBeenCalledTimes(1);
|
||||
expect(mockBlockDevices).toHaveBeenCalledTimes(1);
|
||||
expect(mockExeca).not.toHaveBeenCalled(); // Temperature should not be fetched
|
||||
expect(mockBatchProcess).toHaveBeenCalledTimes(1); // Still uses batchProcess for parsing
|
||||
expect(configService.get).toHaveBeenCalledWith('store.emhttp.disks', []);
|
||||
expect(mockBatchProcess).toHaveBeenCalledTimes(1);
|
||||
|
||||
expect(disks).toHaveLength(mockDiskLayoutData.length);
|
||||
expect(disks[0]).toMatchObject({
|
||||
id: 'S4ENNF0N123456',
|
||||
device: '/dev/sda',
|
||||
type: 'HD',
|
||||
name: 'SAMSUNG MZVLB512HBJQ-000L7',
|
||||
vendor: 'Samsung',
|
||||
size: 512110190592,
|
||||
interfaceType: DiskInterfaceType.PCIE,
|
||||
smartStatus: DiskSmartStatus.OK,
|
||||
temperature: null, // Temperature is now null by default
|
||||
partitions: [
|
||||
{ name: 'sda1', fsType: DiskFsType.VFAT, size: 536870912 },
|
||||
{ name: 'sda2', fsType: DiskFsType.EXT4, size: 511560000000 },
|
||||
],
|
||||
|
||||
// Check NVMe disk with null spinning state
|
||||
const nvmeDisk = disks.find((d) => d.id === 'S4ENNF0N123456');
|
||||
expect(nvmeDisk).toBeDefined();
|
||||
expect(nvmeDisk?.isSpinning).toBe(false); // null from state defaults to false
|
||||
expect(nvmeDisk?.interfaceType).toBe(DiskInterfaceType.PCIE);
|
||||
expect(nvmeDisk?.smartStatus).toBe(DiskSmartStatus.OK);
|
||||
expect(nvmeDisk?.partitions).toHaveLength(2);
|
||||
|
||||
// Check spinning disk
|
||||
const spinningDisk = disks.find((d) => d.id === 'WD-WCC7K7YL9876');
|
||||
expect(spinningDisk).toBeDefined();
|
||||
expect(spinningDisk?.isSpinning).toBe(true); // From state
|
||||
expect(spinningDisk?.interfaceType).toBe(DiskInterfaceType.SATA);
|
||||
|
||||
// Check spun down disk
|
||||
const spunDownDisk = disks.find((d) => d.id === 'WD-SPUNDOWN123');
|
||||
expect(spunDownDisk).toBeDefined();
|
||||
expect(spunDownDisk?.isSpinning).toBe(false); // From state
|
||||
|
||||
// Check disk not in state (defaults to not spinning)
|
||||
const unknownDisk = disks.find((d) => d.id === 'OTHER-SERIAL-123');
|
||||
expect(unknownDisk).toBeDefined();
|
||||
expect(unknownDisk?.isSpinning).toBe(false); // Not in state, defaults to false
|
||||
expect(unknownDisk?.interfaceType).toBe(DiskInterfaceType.UNKNOWN);
|
||||
expect(unknownDisk?.smartStatus).toBe(DiskSmartStatus.UNKNOWN);
|
||||
});
|
||||
|
||||
it('should handle empty state gracefully', async () => {
|
||||
vi.mocked(configService.get).mockImplementation((key: string, defaultValue?: any) => {
|
||||
if (key === 'store.emhttp.disks') {
|
||||
return [];
|
||||
}
|
||||
return defaultValue;
|
||||
});
|
||||
expect(disks[1]).toMatchObject({
|
||||
id: 'WD-WCC7K7YL9876',
|
||||
device: '/dev/sdb',
|
||||
interfaceType: DiskInterfaceType.SATA,
|
||||
smartStatus: DiskSmartStatus.OK,
|
||||
temperature: null,
|
||||
partitions: [{ name: 'sdb1', fsType: DiskFsType.XFS, size: 4000787030016 }],
|
||||
|
||||
const disks = await service.getDisks();
|
||||
|
||||
// All disks should default to not spinning when state is empty
|
||||
expect(disks).toHaveLength(mockDiskLayoutData.length);
|
||||
disks.forEach((disk) => {
|
||||
expect(disk.isSpinning).toBe(false);
|
||||
});
|
||||
expect(disks[2]).toMatchObject({
|
||||
id: 'OTHER-SERIAL-123',
|
||||
device: '/dev/sdc',
|
||||
interfaceType: DiskInterfaceType.UNKNOWN,
|
||||
smartStatus: DiskSmartStatus.UNKNOWN,
|
||||
temperature: null,
|
||||
partitions: [{ name: 'sdc1', fsType: DiskFsType.NTFS, size: 1000204886016 }],
|
||||
});
|
||||
|
||||
it('should handle trimmed serial numbers correctly', async () => {
|
||||
// Add disk with spaces in ID
|
||||
const disksWithSpaces = [...mockArrayDisks];
|
||||
disksWithSpaces[0] = {
|
||||
...disksWithSpaces[0],
|
||||
id: ' S4ENNF0N123456 ', // spaces around ID
|
||||
};
|
||||
|
||||
vi.mocked(configService.get).mockImplementation((key: string, defaultValue?: any) => {
|
||||
if (key === 'store.emhttp.disks') {
|
||||
return disksWithSpaces;
|
||||
}
|
||||
return defaultValue;
|
||||
});
|
||||
|
||||
const disks = await service.getDisks();
|
||||
const disk = disks.find((d) => d.id === 'S4ENNF0N123456');
|
||||
|
||||
expect(disk).toBeDefined();
|
||||
expect(disk?.isSpinning).toBe(false); // null becomes false
|
||||
});
|
||||
|
||||
it('should correctly map partitions to disks', async () => {
|
||||
const disks = await service.getDisks();
|
||||
|
||||
const disk1 = disks.find((d) => d.id === 'S4ENNF0N123456');
|
||||
expect(disk1?.partitions).toHaveLength(2);
|
||||
expect(disk1?.partitions[0]).toEqual({
|
||||
name: 'sda1',
|
||||
fsType: DiskFsType.VFAT,
|
||||
size: 536870912,
|
||||
});
|
||||
expect(disk1?.partitions[1]).toEqual({
|
||||
name: 'sda2',
|
||||
fsType: DiskFsType.EXT4,
|
||||
size: 511560000000,
|
||||
});
|
||||
|
||||
const disk2 = disks.find((d) => d.id === 'WD-WCC7K7YL9876');
|
||||
expect(disk2?.partitions).toHaveLength(1);
|
||||
expect(disk2?.partitions[0]).toEqual({
|
||||
name: 'sdb1',
|
||||
fsType: DiskFsType.XFS,
|
||||
size: 4000787030016,
|
||||
});
|
||||
});
|
||||
|
||||
it('should use ConfigService to get state data', async () => {
|
||||
await service.getDisks();
|
||||
|
||||
// Verify we're accessing the state through ConfigService
|
||||
expect(configService.get).toHaveBeenCalledWith('store.emhttp.disks', []);
|
||||
});
|
||||
|
||||
it('should handle empty disk layout or block devices', async () => {
|
||||
@@ -267,6 +471,31 @@ describe('DisksService', () => {
|
||||
});
|
||||
});
|
||||
|
||||
// --- Test getDisk ---
|
||||
describe('getDisk', () => {
|
||||
it('should return a specific disk by id', async () => {
|
||||
const disk = await service.getDisk('S4ENNF0N123456');
|
||||
|
||||
expect(disk).toBeDefined();
|
||||
expect(disk.id).toBe('S4ENNF0N123456');
|
||||
expect(disk.isSpinning).toBe(false); // null becomes false
|
||||
});
|
||||
|
||||
it('should return spinning disk correctly', async () => {
|
||||
const disk = await service.getDisk('WD-WCC7K7YL9876');
|
||||
|
||||
expect(disk).toBeDefined();
|
||||
expect(disk.id).toBe('WD-WCC7K7YL9876');
|
||||
expect(disk.isSpinning).toBe(true);
|
||||
});
|
||||
|
||||
it('should throw NotFoundException for non-existent disk', async () => {
|
||||
await expect(service.getDisk('NONEXISTENT')).rejects.toThrow(
|
||||
'Disk with id NONEXISTENT not found'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
// --- Test getTemperature ---
|
||||
describe('getTemperature', () => {
|
||||
it('should return temperature for a disk', async () => {
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
import { Injectable, NotFoundException } from '@nestjs/common';
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
|
||||
import type { Systeminformation } from 'systeminformation';
|
||||
import { execa } from 'execa';
|
||||
import { blockDevices, diskLayout } from 'systeminformation';
|
||||
|
||||
import { ArrayDisk } from '@app/unraid-api/graph/resolvers/array/array.model.js';
|
||||
import {
|
||||
Disk,
|
||||
DiskFsType,
|
||||
@@ -14,6 +16,7 @@ import { batchProcess } from '@app/utils.js';
|
||||
|
||||
@Injectable()
|
||||
export class DisksService {
|
||||
constructor(private readonly configService: ConfigService) {}
|
||||
public async getTemperature(device: string): Promise<number | null> {
|
||||
try {
|
||||
const { stdout } = await execa('smartctl', ['-A', device]);
|
||||
@@ -51,7 +54,8 @@ export class DisksService {
|
||||
|
||||
private async parseDisk(
|
||||
disk: Systeminformation.DiskLayoutData,
|
||||
partitionsToParse: Systeminformation.BlockDevicesData[]
|
||||
partitionsToParse: Systeminformation.BlockDevicesData[],
|
||||
arrayDisks: ArrayDisk[]
|
||||
): Promise<Omit<Disk, 'temperature'>> {
|
||||
const partitions = partitionsToParse
|
||||
// Only get partitions from this disk
|
||||
@@ -115,6 +119,8 @@ export class DisksService {
|
||||
mappedInterfaceType = DiskInterfaceType.UNKNOWN;
|
||||
}
|
||||
|
||||
const arrayDisk = arrayDisks.find((d) => d.id.trim() === disk.serialNum.trim());
|
||||
|
||||
return {
|
||||
...disk,
|
||||
id: disk.serialNum, // Ensure id is set
|
||||
@@ -123,6 +129,7 @@ export class DisksService {
|
||||
DiskSmartStatus.UNKNOWN,
|
||||
interfaceType: mappedInterfaceType,
|
||||
partitions,
|
||||
isSpinning: arrayDisk?.isSpinning ?? false,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -133,9 +140,9 @@ export class DisksService {
|
||||
const partitions = await blockDevices().then((devices) =>
|
||||
devices.filter((device) => device.type === 'part')
|
||||
);
|
||||
|
||||
const arrayDisks = this.configService.get<ArrayDisk[]>('store.emhttp.disks', []);
|
||||
const { data } = await batchProcess(await diskLayout(), async (disk) =>
|
||||
this.parseDisk(disk, partitions)
|
||||
this.parseDisk(disk, partitions, arrayDisks)
|
||||
);
|
||||
return data;
|
||||
}
|
||||
|
||||
@@ -0,0 +1,47 @@
|
||||
import { Injectable, Logger, OnApplicationBootstrap } from '@nestjs/common';
|
||||
import { SchedulerRegistry, Timeout } from '@nestjs/schedule';
|
||||
|
||||
import { CronJob } from 'cron';
|
||||
|
||||
import { DockerConfigService } from '@app/unraid-api/graph/resolvers/docker/docker-config.service.js';
|
||||
import { DockerManifestService } from '@app/unraid-api/graph/resolvers/docker/docker-manifest.service.js';
|
||||
|
||||
@Injectable()
|
||||
export class ContainerStatusJob implements OnApplicationBootstrap {
|
||||
private readonly logger = new Logger(ContainerStatusJob.name);
|
||||
constructor(
|
||||
private readonly dockerManifestService: DockerManifestService,
|
||||
private readonly schedulerRegistry: SchedulerRegistry,
|
||||
private readonly dockerConfigService: DockerConfigService
|
||||
) {}
|
||||
|
||||
/**
|
||||
* Initialize cron job for refreshing the update status for all containers on a user-configurable schedule.
|
||||
*/
|
||||
onApplicationBootstrap() {
|
||||
if (!this.dockerConfigService.enabled()) return;
|
||||
const cronExpression = this.dockerConfigService.getConfig().updateCheckCronSchedule;
|
||||
const cronJob = CronJob.from({
|
||||
cronTime: cronExpression,
|
||||
onTick: () => {
|
||||
this.dockerManifestService.refreshDigests().catch((error) => {
|
||||
this.logger.warn(error, 'Failed to refresh container update status');
|
||||
});
|
||||
},
|
||||
start: true,
|
||||
});
|
||||
this.schedulerRegistry.addCronJob(ContainerStatusJob.name, cronJob);
|
||||
this.logger.verbose(
|
||||
`Initialized cron job for refreshing container update status: ${ContainerStatusJob.name}`
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Refresh container digests 5 seconds after application start.
|
||||
*/
|
||||
@Timeout(5_000)
|
||||
async refreshContainerDigestsAfterStartup() {
|
||||
if (!this.dockerConfigService.enabled()) return;
|
||||
await this.dockerManifestService.refreshDigests();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,7 @@
|
||||
import { Field, ObjectType } from '@nestjs/graphql';
|
||||
|
||||
@ObjectType()
|
||||
export class DockerConfig {
|
||||
@Field(() => String)
|
||||
updateCheckCronSchedule!: string;
|
||||
}
|
||||
@@ -0,0 +1,195 @@
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import { CronExpression } from '@nestjs/schedule';
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
|
||||
import { ValidationError } from 'class-validator';
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { AppError } from '@app/core/errors/app-error.js';
|
||||
import { DockerConfigService } from '@app/unraid-api/graph/resolvers/docker/docker-config.service.js';
|
||||
|
||||
vi.mock('cron', () => ({
|
||||
validateCronExpression: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock('@app/unraid-api/graph/resolvers/validation.utils.js', () => ({
|
||||
validateObject: vi.fn(),
|
||||
}));
|
||||
|
||||
describe('DockerConfigService - validate', () => {
|
||||
let service: DockerConfigService;
|
||||
|
||||
beforeEach(async () => {
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [
|
||||
DockerConfigService,
|
||||
{
|
||||
provide: ConfigService,
|
||||
useValue: {
|
||||
get: vi.fn(),
|
||||
},
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
service = module.get<DockerConfigService>(DockerConfigService);
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('validate', () => {
|
||||
it('should validate and return docker config for valid cron expression', async () => {
|
||||
const inputConfig = { updateCheckCronSchedule: '0 6 * * *' };
|
||||
const validatedConfig = { updateCheckCronSchedule: '0 6 * * *' };
|
||||
|
||||
const { validateObject } = await import(
|
||||
'@app/unraid-api/graph/resolvers/validation.utils.js'
|
||||
);
|
||||
const { validateCronExpression } = await import('cron');
|
||||
|
||||
vi.mocked(validateObject).mockResolvedValue(validatedConfig);
|
||||
vi.mocked(validateCronExpression).mockReturnValue({ valid: true });
|
||||
|
||||
const result = await service.validate(inputConfig);
|
||||
|
||||
expect(validateObject).toHaveBeenCalledWith(expect.any(Function), inputConfig);
|
||||
expect(validateCronExpression).toHaveBeenCalledWith('0 6 * * *');
|
||||
expect(result).toBe(validatedConfig);
|
||||
});
|
||||
|
||||
it('should validate and return docker config for predefined cron expression', async () => {
|
||||
const inputConfig = { updateCheckCronSchedule: CronExpression.EVERY_DAY_AT_6AM };
|
||||
const validatedConfig = { updateCheckCronSchedule: CronExpression.EVERY_DAY_AT_6AM };
|
||||
|
||||
const { validateObject } = await import(
|
||||
'@app/unraid-api/graph/resolvers/validation.utils.js'
|
||||
);
|
||||
const { validateCronExpression } = await import('cron');
|
||||
|
||||
vi.mocked(validateObject).mockResolvedValue(validatedConfig);
|
||||
vi.mocked(validateCronExpression).mockReturnValue({ valid: true });
|
||||
|
||||
const result = await service.validate(inputConfig);
|
||||
|
||||
expect(validateObject).toHaveBeenCalledWith(expect.any(Function), inputConfig);
|
||||
expect(validateCronExpression).toHaveBeenCalledWith(CronExpression.EVERY_DAY_AT_6AM);
|
||||
expect(result).toBe(validatedConfig);
|
||||
});
|
||||
|
||||
it('should throw AppError for invalid cron expression', async () => {
|
||||
const inputConfig = { updateCheckCronSchedule: 'invalid-cron' };
|
||||
const validatedConfig = { updateCheckCronSchedule: 'invalid-cron' };
|
||||
|
||||
const { validateObject } = await import(
|
||||
'@app/unraid-api/graph/resolvers/validation.utils.js'
|
||||
);
|
||||
const { validateCronExpression } = await import('cron');
|
||||
|
||||
vi.mocked(validateObject).mockResolvedValue(validatedConfig);
|
||||
vi.mocked(validateCronExpression).mockReturnValue({ valid: false });
|
||||
|
||||
await expect(service.validate(inputConfig)).rejects.toThrow(
|
||||
new AppError('Cron expression not supported: invalid-cron')
|
||||
);
|
||||
|
||||
expect(validateObject).toHaveBeenCalledWith(expect.any(Function), inputConfig);
|
||||
expect(validateCronExpression).toHaveBeenCalledWith('invalid-cron');
|
||||
});
|
||||
|
||||
it('should throw AppError for empty cron expression', async () => {
|
||||
const inputConfig = { updateCheckCronSchedule: '' };
|
||||
const validatedConfig = { updateCheckCronSchedule: '' };
|
||||
|
||||
const { validateObject } = await import(
|
||||
'@app/unraid-api/graph/resolvers/validation.utils.js'
|
||||
);
|
||||
const { validateCronExpression } = await import('cron');
|
||||
|
||||
vi.mocked(validateObject).mockResolvedValue(validatedConfig);
|
||||
vi.mocked(validateCronExpression).mockReturnValue({ valid: false });
|
||||
|
||||
await expect(service.validate(inputConfig)).rejects.toThrow(
|
||||
new AppError('Cron expression not supported: ')
|
||||
);
|
||||
|
||||
expect(validateObject).toHaveBeenCalledWith(expect.any(Function), inputConfig);
|
||||
expect(validateCronExpression).toHaveBeenCalledWith('');
|
||||
});
|
||||
|
||||
it('should throw AppError for malformed cron expression', async () => {
|
||||
const inputConfig = { updateCheckCronSchedule: '* * * *' };
|
||||
const validatedConfig = { updateCheckCronSchedule: '* * * *' };
|
||||
|
||||
const { validateObject } = await import(
|
||||
'@app/unraid-api/graph/resolvers/validation.utils.js'
|
||||
);
|
||||
const { validateCronExpression } = await import('cron');
|
||||
|
||||
vi.mocked(validateObject).mockResolvedValue(validatedConfig);
|
||||
vi.mocked(validateCronExpression).mockReturnValue({ valid: false });
|
||||
|
||||
await expect(service.validate(inputConfig)).rejects.toThrow(
|
||||
new AppError('Cron expression not supported: * * * *')
|
||||
);
|
||||
|
||||
expect(validateObject).toHaveBeenCalledWith(expect.any(Function), inputConfig);
|
||||
expect(validateCronExpression).toHaveBeenCalledWith('* * * *');
|
||||
});
|
||||
|
||||
it('should propagate validation errors from validateObject', async () => {
|
||||
const inputConfig = { updateCheckCronSchedule: '0 6 * * *' };
|
||||
const validationError = new ValidationError();
|
||||
validationError.property = 'updateCheckCronSchedule';
|
||||
|
||||
const { validateObject } = await import(
|
||||
'@app/unraid-api/graph/resolvers/validation.utils.js'
|
||||
);
|
||||
|
||||
vi.mocked(validateObject).mockRejectedValue(validationError);
|
||||
|
||||
await expect(service.validate(inputConfig)).rejects.toThrow();
|
||||
|
||||
expect(validateObject).toHaveBeenCalledWith(expect.any(Function), inputConfig);
|
||||
});
|
||||
|
||||
it('should handle complex valid cron expressions', async () => {
|
||||
const inputConfig = { updateCheckCronSchedule: '0 0,12 * * 1-5' };
|
||||
const validatedConfig = { updateCheckCronSchedule: '0 0,12 * * 1-5' };
|
||||
|
||||
const { validateObject } = await import(
|
||||
'@app/unraid-api/graph/resolvers/validation.utils.js'
|
||||
);
|
||||
const { validateCronExpression } = await import('cron');
|
||||
|
||||
vi.mocked(validateObject).mockResolvedValue(validatedConfig);
|
||||
vi.mocked(validateCronExpression).mockReturnValue({ valid: true });
|
||||
|
||||
const result = await service.validate(inputConfig);
|
||||
|
||||
expect(validateObject).toHaveBeenCalledWith(expect.any(Function), inputConfig);
|
||||
expect(validateCronExpression).toHaveBeenCalledWith('0 0,12 * * 1-5');
|
||||
expect(result).toBe(validatedConfig);
|
||||
});
|
||||
|
||||
it('should handle input with extra properties', async () => {
|
||||
const inputConfig = {
|
||||
updateCheckCronSchedule: '0 6 * * *',
|
||||
extraProperty: 'should be ignored',
|
||||
};
|
||||
const validatedConfig = { updateCheckCronSchedule: '0 6 * * *' };
|
||||
|
||||
const { validateObject } = await import(
|
||||
'@app/unraid-api/graph/resolvers/validation.utils.js'
|
||||
);
|
||||
const { validateCronExpression } = await import('cron');
|
||||
|
||||
vi.mocked(validateObject).mockResolvedValue(validatedConfig);
|
||||
vi.mocked(validateCronExpression).mockReturnValue({ valid: true });
|
||||
|
||||
const result = await service.validate(inputConfig);
|
||||
|
||||
expect(validateObject).toHaveBeenCalledWith(expect.any(Function), inputConfig);
|
||||
expect(validateCronExpression).toHaveBeenCalledWith('0 6 * * *');
|
||||
expect(result).toBe(validatedConfig);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,59 +1,45 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import { CronExpression } from '@nestjs/schedule';
|
||||
|
||||
import { ConfigFilePersister } from '@unraid/shared/services/config-file.js';
|
||||
import { validateCronExpression } from 'cron';
|
||||
|
||||
import { FeatureFlags } from '@app/consts.js';
|
||||
import { AppError } from '@app/core/errors/app-error.js';
|
||||
import { DockerConfig } from '@app/unraid-api/graph/resolvers/docker/docker-config.model.js';
|
||||
import { validateObject } from '@app/unraid-api/graph/resolvers/validation.utils.js';
|
||||
import {
|
||||
DEFAULT_ORGANIZER_ROOT_ID,
|
||||
DEFAULT_ORGANIZER_VIEW_ID,
|
||||
} from '@app/unraid-api/organizer/organizer.js';
|
||||
import { OrganizerV1 } from '@app/unraid-api/organizer/organizer.model.js';
|
||||
import { validateOrganizerIntegrity } from '@app/unraid-api/organizer/organizer.validation.js';
|
||||
|
||||
@Injectable()
|
||||
export class DockerConfigService extends ConfigFilePersister<OrganizerV1> {
|
||||
export class DockerConfigService extends ConfigFilePersister<DockerConfig> {
|
||||
constructor(configService: ConfigService) {
|
||||
super(configService);
|
||||
}
|
||||
|
||||
enabled(): boolean {
|
||||
return FeatureFlags.ENABLE_NEXT_DOCKER_RELEASE;
|
||||
}
|
||||
|
||||
configKey(): string {
|
||||
return 'dockerOrganizer';
|
||||
return 'docker';
|
||||
}
|
||||
|
||||
fileName(): string {
|
||||
return 'docker.organizer.json';
|
||||
return 'docker.config.json';
|
||||
}
|
||||
|
||||
defaultConfig(): OrganizerV1 {
|
||||
defaultConfig(): DockerConfig {
|
||||
return {
|
||||
version: 1,
|
||||
resources: {},
|
||||
views: {
|
||||
default: {
|
||||
id: DEFAULT_ORGANIZER_VIEW_ID,
|
||||
name: 'Default',
|
||||
root: DEFAULT_ORGANIZER_ROOT_ID,
|
||||
entries: {
|
||||
root: {
|
||||
type: 'folder',
|
||||
id: DEFAULT_ORGANIZER_ROOT_ID,
|
||||
name: 'Root',
|
||||
children: [],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
updateCheckCronSchedule: CronExpression.EVERY_DAY_AT_6AM,
|
||||
};
|
||||
}
|
||||
|
||||
async validate(config: object): Promise<OrganizerV1> {
|
||||
const organizer = await validateObject(OrganizerV1, config);
|
||||
const { isValid, errors } = await validateOrganizerIntegrity(organizer);
|
||||
if (!isValid) {
|
||||
throw new AppError(`Docker organizer validation failed: ${JSON.stringify(errors, null, 2)}`);
|
||||
async validate(config: object): Promise<DockerConfig> {
|
||||
const dockerConfig = await validateObject(DockerConfig, config);
|
||||
const cronExpression = validateCronExpression(dockerConfig.updateCheckCronSchedule);
|
||||
if (!cronExpression.valid) {
|
||||
throw new AppError(`Cron expression not supported: ${dockerConfig.updateCheckCronSchedule}`);
|
||||
}
|
||||
return organizer;
|
||||
return dockerConfig;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,51 @@
|
||||
import { Logger } from '@nestjs/common';
|
||||
import { Mutation, Parent, ResolveField, Resolver } from '@nestjs/graphql';
|
||||
|
||||
import { Resource } from '@unraid/shared/graphql.model.js';
|
||||
import { AuthAction, UsePermissions } from '@unraid/shared/use-permissions.directive.js';
|
||||
|
||||
import { AppError } from '@app/core/errors/app-error.js';
|
||||
import { UseFeatureFlag } from '@app/unraid-api/decorators/use-feature-flag.decorator.js';
|
||||
import { DockerManifestService } from '@app/unraid-api/graph/resolvers/docker/docker-manifest.service.js';
|
||||
import { DockerContainer } from '@app/unraid-api/graph/resolvers/docker/docker.model.js';
|
||||
|
||||
@Resolver(() => DockerContainer)
|
||||
export class DockerContainerResolver {
|
||||
private readonly logger = new Logger(DockerContainerResolver.name);
|
||||
constructor(private readonly dockerManifestService: DockerManifestService) {}
|
||||
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@UsePermissions({
|
||||
action: AuthAction.READ_ANY,
|
||||
resource: Resource.DOCKER,
|
||||
})
|
||||
@ResolveField(() => Boolean, { nullable: true })
|
||||
public async isUpdateAvailable(@Parent() container: DockerContainer) {
|
||||
try {
|
||||
return await this.dockerManifestService.isUpdateAvailableCached(container.image);
|
||||
} catch (error) {
|
||||
this.logger.error(error);
|
||||
throw new AppError('Failed to read cached update status. See graphql-api.log for details.');
|
||||
}
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@UsePermissions({
|
||||
action: AuthAction.READ_ANY,
|
||||
resource: Resource.DOCKER,
|
||||
})
|
||||
@ResolveField(() => Boolean, { nullable: true })
|
||||
public async isRebuildReady(@Parent() container: DockerContainer) {
|
||||
return this.dockerManifestService.isRebuildReady(container.hostConfig?.networkMode);
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@UsePermissions({
|
||||
action: AuthAction.UPDATE_ANY,
|
||||
resource: Resource.DOCKER,
|
||||
})
|
||||
@Mutation(() => Boolean)
|
||||
public async refreshDockerDigests() {
|
||||
return this.dockerManifestService.refreshDigests();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,62 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
|
||||
import { AsyncMutex } from '@unraid/shared/util/processing.js';
|
||||
|
||||
import { docker } from '@app/core/utils/index.js';
|
||||
import {
|
||||
CachedStatusEntry,
|
||||
DockerPhpService,
|
||||
} from '@app/unraid-api/graph/resolvers/docker/docker-php.service.js';
|
||||
|
||||
@Injectable()
|
||||
export class DockerManifestService {
|
||||
constructor(private readonly dockerPhpService: DockerPhpService) {}
|
||||
|
||||
private readonly refreshDigestsMutex = new AsyncMutex(() => {
|
||||
return this.dockerPhpService.refreshDigestsViaPhp();
|
||||
});
|
||||
|
||||
/**
|
||||
* Recomputes local/remote docker container digests and writes them to /var/lib/docker/unraid-update-status.json
|
||||
* @param mutex - Optional mutex to use for the operation. If not provided, a default mutex will be used.
|
||||
* @param dockerUpdatePath - Optional path to the DockerUpdate.php file. If not provided, the default path will be used.
|
||||
* @returns True if the digests were refreshed, false if the operation failed
|
||||
*/
|
||||
async refreshDigests(mutex = this.refreshDigestsMutex, dockerUpdatePath?: string) {
|
||||
return mutex.do(() => {
|
||||
return this.dockerPhpService.refreshDigestsViaPhp(dockerUpdatePath);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if an update is available for a given container image.
|
||||
* @param imageRef - The image reference to check, e.g. "unraid/baseimage:latest". If no tag is provided, "latest" is assumed, following the webgui's implementation.
|
||||
* @param cacheData read from /var/lib/docker/unraid-update-status.json by default
|
||||
* @returns True if an update is available, false if not, or null if the status is unknown
|
||||
*/
|
||||
async isUpdateAvailableCached(imageRef: string, cacheData?: Record<string, CachedStatusEntry>) {
|
||||
let taggedRef = imageRef;
|
||||
if (!taggedRef.includes(':')) taggedRef += ':latest';
|
||||
|
||||
cacheData ??= await this.dockerPhpService.readCachedUpdateStatus();
|
||||
const containerData = cacheData[taggedRef];
|
||||
if (!containerData) return null;
|
||||
return containerData.status?.toLowerCase() === 'true';
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a container is rebuild ready.
|
||||
* @param networkMode - The network mode of the container, e.g. "container:unraid/baseimage:latest".
|
||||
* @returns True if the container is rebuild ready, false if not
|
||||
*/
|
||||
async isRebuildReady(networkMode?: string) {
|
||||
if (!networkMode || !networkMode.startsWith('container:')) return false;
|
||||
const target = networkMode.slice('container:'.length);
|
||||
try {
|
||||
await docker.getContainer(target).inspect();
|
||||
return false;
|
||||
} catch {
|
||||
return true; // unresolved target -> ':???' equivalent
|
||||
}
|
||||
}
|
||||
}
|
||||
130
api/src/unraid-api/graph/resolvers/docker/docker-php.service.ts
Normal file
130
api/src/unraid-api/graph/resolvers/docker/docker-php.service.ts
Normal file
@@ -0,0 +1,130 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { readFile } from 'fs/promises';
|
||||
|
||||
import { z } from 'zod';
|
||||
|
||||
import { phpLoader } from '@app/core/utils/plugins/php-loader.js';
|
||||
import {
|
||||
ExplicitStatusItem,
|
||||
UpdateStatus,
|
||||
} from '@app/unraid-api/graph/resolvers/docker/docker-update-status.model.js';
|
||||
import { parseDockerPushCalls } from '@app/unraid-api/graph/resolvers/docker/utils/docker-push-parser.js';
|
||||
|
||||
type StatusItem = { name: string; updateStatus: 0 | 1 | 2 | 3 };
|
||||
|
||||
/**
|
||||
* These types reflect the structure of the /var/lib/docker/unraid-update-status.json file,
|
||||
* which is not controlled by the Unraid API.
|
||||
*/
|
||||
const CachedStatusEntrySchema = z.object({
|
||||
/** sha256 digest - "sha256:..." */
|
||||
local: z.string(),
|
||||
/** sha256 digest - "sha256:..." */
|
||||
remote: z.string(),
|
||||
/** whether update is available (true), not available (false), or unknown (null) */
|
||||
status: z.enum(['true', 'false']).nullable(),
|
||||
});
|
||||
const CachedStatusSchema = z.record(z.string(), CachedStatusEntrySchema);
|
||||
export type CachedStatusEntry = z.infer<typeof CachedStatusEntrySchema>;
|
||||
|
||||
@Injectable()
|
||||
export class DockerPhpService {
|
||||
private readonly logger = new Logger(DockerPhpService.name);
|
||||
constructor() {}
|
||||
|
||||
/**
|
||||
* Reads JSON from a file containing cached update status.
|
||||
* If the file does not exist, an empty object is returned.
|
||||
* @param cacheFile
|
||||
* @returns
|
||||
*/
|
||||
async readCachedUpdateStatus(
|
||||
cacheFile = '/var/lib/docker/unraid-update-status.json'
|
||||
): Promise<Record<string, CachedStatusEntry>> {
|
||||
try {
|
||||
const cache = await readFile(cacheFile, 'utf8');
|
||||
const cacheData = JSON.parse(cache);
|
||||
const { success, data } = CachedStatusSchema.safeParse(cacheData);
|
||||
if (success) return data;
|
||||
this.logger.warn(cacheData, 'Invalid cached update status');
|
||||
return {};
|
||||
} catch (error) {
|
||||
this.logger.warn(error, 'Failed to read cached update status');
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
/**----------------------
|
||||
* Refresh Container Digests
|
||||
*------------------------**/
|
||||
|
||||
/**
|
||||
* Recomputes local/remote digests by triggering `DockerTemplates->getAllInfo(true)` via DockerUpdate.php
|
||||
* @param dockerUpdatePath - Path to the DockerUpdate.php file
|
||||
* @returns True if the digests were refreshed, false if the file is not found or the operation failed
|
||||
*/
|
||||
async refreshDigestsViaPhp(
|
||||
dockerUpdatePath = '/usr/local/emhttp/plugins/dynamix.docker.manager/include/DockerUpdate.php'
|
||||
) {
|
||||
try {
|
||||
await phpLoader({
|
||||
file: dockerUpdatePath,
|
||||
method: 'GET',
|
||||
});
|
||||
return true;
|
||||
} catch {
|
||||
// ignore; offline may keep remote as 'undef'
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**----------------------
|
||||
* Parse Container Statuses
|
||||
*------------------------**/
|
||||
|
||||
private parseStatusesFromDockerPush(js: string): ExplicitStatusItem[] {
|
||||
const matches = parseDockerPushCalls(js);
|
||||
return matches.map(({ name, updateStatus }) => ({
|
||||
name,
|
||||
updateStatus: this.updateStatusToString(updateStatus as StatusItem['updateStatus']),
|
||||
}));
|
||||
}
|
||||
|
||||
private updateStatusToString(updateStatus: 0): UpdateStatus.UP_TO_DATE;
|
||||
private updateStatusToString(updateStatus: 1): UpdateStatus.UPDATE_AVAILABLE;
|
||||
private updateStatusToString(updateStatus: 2): UpdateStatus.REBUILD_READY;
|
||||
private updateStatusToString(updateStatus: 3): UpdateStatus.UNKNOWN;
|
||||
// prettier-ignore
|
||||
private updateStatusToString(updateStatus: StatusItem['updateStatus']): ExplicitStatusItem['updateStatus'];
|
||||
private updateStatusToString(
|
||||
updateStatus: StatusItem['updateStatus']
|
||||
): ExplicitStatusItem['updateStatus'] {
|
||||
switch (updateStatus) {
|
||||
case 0:
|
||||
return UpdateStatus.UP_TO_DATE;
|
||||
case 1:
|
||||
return UpdateStatus.UPDATE_AVAILABLE;
|
||||
case 2:
|
||||
return UpdateStatus.REBUILD_READY;
|
||||
default:
|
||||
return UpdateStatus.UNKNOWN;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the update statuses for all containers by triggering `DockerTemplates->getAllInfo(true)` via DockerContainers.php
|
||||
* @param dockerContainersPath - Path to the DockerContainers.php file
|
||||
* @returns The update statuses for all containers
|
||||
*/
|
||||
async getContainerUpdateStatuses(
|
||||
dockerContainersPath = '/usr/local/emhttp/plugins/dynamix.docker.manager/include/DockerContainers.php'
|
||||
): Promise<ExplicitStatusItem[]> {
|
||||
const stdout = await phpLoader({
|
||||
file: dockerContainersPath,
|
||||
method: 'GET',
|
||||
});
|
||||
const parts = stdout.split('\0'); // [html, "docker.push(...)", busyFlag]
|
||||
const js = parts[1] || '';
|
||||
return this.parseStatusesFromDockerPush(js);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,25 @@
|
||||
import { Field, ObjectType, registerEnumType } from '@nestjs/graphql';
|
||||
|
||||
/**
|
||||
* Note that these values propagate down to API consumers, so be aware of breaking changes.
|
||||
*/
|
||||
export enum UpdateStatus {
|
||||
UP_TO_DATE = 'UP_TO_DATE',
|
||||
UPDATE_AVAILABLE = 'UPDATE_AVAILABLE',
|
||||
REBUILD_READY = 'REBUILD_READY',
|
||||
UNKNOWN = 'UNKNOWN',
|
||||
}
|
||||
|
||||
registerEnumType(UpdateStatus, {
|
||||
name: 'UpdateStatus',
|
||||
description: 'Update status of a container.',
|
||||
});
|
||||
|
||||
@ObjectType()
|
||||
export class ExplicitStatusItem {
|
||||
@Field(() => String)
|
||||
name!: string;
|
||||
|
||||
@Field(() => UpdateStatus)
|
||||
updateStatus!: UpdateStatus;
|
||||
}
|
||||
@@ -1,15 +1,16 @@
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
|
||||
import { describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { DockerConfigService } from '@app/unraid-api/graph/resolvers/docker/docker-config.service.js';
|
||||
import { DockerEventService } from '@app/unraid-api/graph/resolvers/docker/docker-event.service.js';
|
||||
import { DockerOrganizerService } from '@app/unraid-api/graph/resolvers/docker/docker-organizer.service.js';
|
||||
import { DockerPhpService } from '@app/unraid-api/graph/resolvers/docker/docker-php.service.js';
|
||||
import { DockerModule } from '@app/unraid-api/graph/resolvers/docker/docker.module.js';
|
||||
import { DockerMutationsResolver } from '@app/unraid-api/graph/resolvers/docker/docker.mutations.resolver.js';
|
||||
import { DockerResolver } from '@app/unraid-api/graph/resolvers/docker/docker.resolver.js';
|
||||
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
|
||||
import { DockerOrganizerConfigService } from '@app/unraid-api/graph/resolvers/docker/organizer/docker-organizer-config.service.js';
|
||||
import { DockerOrganizerService } from '@app/unraid-api/graph/resolvers/docker/organizer/docker-organizer.service.js';
|
||||
|
||||
describe('DockerModule', () => {
|
||||
it('should compile the module', async () => {
|
||||
@@ -18,6 +19,8 @@ describe('DockerModule', () => {
|
||||
})
|
||||
.overrideProvider(DockerService)
|
||||
.useValue({ getDockerClient: vi.fn() })
|
||||
.overrideProvider(DockerOrganizerConfigService)
|
||||
.useValue({ getConfig: vi.fn() })
|
||||
.overrideProvider(DockerConfigService)
|
||||
.useValue({ getConfig: vi.fn() })
|
||||
.compile();
|
||||
@@ -61,6 +64,7 @@ describe('DockerModule', () => {
|
||||
DockerResolver,
|
||||
{ provide: DockerService, useValue: {} },
|
||||
{ provide: DockerOrganizerService, useValue: {} },
|
||||
{ provide: DockerPhpService, useValue: { getContainerUpdateStatuses: vi.fn() } },
|
||||
],
|
||||
}).compile();
|
||||
|
||||
|
||||
@@ -1,22 +1,36 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
|
||||
import { JobModule } from '@app/unraid-api/cron/job.module.js';
|
||||
import { ContainerStatusJob } from '@app/unraid-api/graph/resolvers/docker/container-status.job.js';
|
||||
import { DockerConfigService } from '@app/unraid-api/graph/resolvers/docker/docker-config.service.js';
|
||||
import { DockerOrganizerService } from '@app/unraid-api/graph/resolvers/docker/docker-organizer.service.js';
|
||||
import { DockerContainerResolver } from '@app/unraid-api/graph/resolvers/docker/docker-container.resolver.js';
|
||||
import { DockerManifestService } from '@app/unraid-api/graph/resolvers/docker/docker-manifest.service.js';
|
||||
import { DockerPhpService } from '@app/unraid-api/graph/resolvers/docker/docker-php.service.js';
|
||||
import { DockerMutationsResolver } from '@app/unraid-api/graph/resolvers/docker/docker.mutations.resolver.js';
|
||||
import { DockerResolver } from '@app/unraid-api/graph/resolvers/docker/docker.resolver.js';
|
||||
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
|
||||
import { DockerOrganizerConfigService } from '@app/unraid-api/graph/resolvers/docker/organizer/docker-organizer-config.service.js';
|
||||
import { DockerOrganizerService } from '@app/unraid-api/graph/resolvers/docker/organizer/docker-organizer.service.js';
|
||||
|
||||
@Module({
|
||||
imports: [JobModule],
|
||||
providers: [
|
||||
// Services
|
||||
DockerService,
|
||||
DockerConfigService,
|
||||
DockerOrganizerConfigService,
|
||||
DockerOrganizerService,
|
||||
DockerManifestService,
|
||||
DockerPhpService,
|
||||
DockerConfigService,
|
||||
// DockerEventService,
|
||||
|
||||
// Jobs
|
||||
ContainerStatusJob,
|
||||
|
||||
// Resolvers
|
||||
DockerResolver,
|
||||
DockerMutationsResolver,
|
||||
DockerContainerResolver,
|
||||
],
|
||||
exports: [DockerService],
|
||||
})
|
||||
|
||||
@@ -3,10 +3,11 @@ import { Test } from '@nestjs/testing';
|
||||
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { DockerOrganizerService } from '@app/unraid-api/graph/resolvers/docker/docker-organizer.service.js';
|
||||
import { DockerPhpService } from '@app/unraid-api/graph/resolvers/docker/docker-php.service.js';
|
||||
import { ContainerState, DockerContainer } from '@app/unraid-api/graph/resolvers/docker/docker.model.js';
|
||||
import { DockerResolver } from '@app/unraid-api/graph/resolvers/docker/docker.resolver.js';
|
||||
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
|
||||
import { DockerOrganizerService } from '@app/unraid-api/graph/resolvers/docker/organizer/docker-organizer.service.js';
|
||||
|
||||
describe('DockerResolver', () => {
|
||||
let resolver: DockerResolver;
|
||||
@@ -26,7 +27,13 @@ describe('DockerResolver', () => {
|
||||
{
|
||||
provide: DockerOrganizerService,
|
||||
useValue: {
|
||||
getResolvedOrganizer: vi.fn(),
|
||||
resolveOrganizer: vi.fn(),
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: DockerPhpService,
|
||||
useValue: {
|
||||
getContainerUpdateStatuses: vi.fn(),
|
||||
},
|
||||
},
|
||||
],
|
||||
|
||||
@@ -3,21 +3,25 @@ import { Args, Mutation, Query, ResolveField, Resolver } from '@nestjs/graphql';
|
||||
import { AuthAction, Resource } from '@unraid/shared/graphql.model.js';
|
||||
import { UsePermissions } from '@unraid/shared/use-permissions.directive.js';
|
||||
|
||||
import { DockerOrganizerService } from '@app/unraid-api/graph/resolvers/docker/docker-organizer.service.js';
|
||||
import { UseFeatureFlag } from '@app/unraid-api/decorators/use-feature-flag.decorator.js';
|
||||
import { DockerPhpService } from '@app/unraid-api/graph/resolvers/docker/docker-php.service.js';
|
||||
import { ExplicitStatusItem } from '@app/unraid-api/graph/resolvers/docker/docker-update-status.model.js';
|
||||
import {
|
||||
Docker,
|
||||
DockerContainer,
|
||||
DockerNetwork,
|
||||
} from '@app/unraid-api/graph/resolvers/docker/docker.model.js';
|
||||
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
|
||||
import { DockerOrganizerService } from '@app/unraid-api/graph/resolvers/docker/organizer/docker-organizer.service.js';
|
||||
import { DEFAULT_ORGANIZER_ROOT_ID } from '@app/unraid-api/organizer/organizer.js';
|
||||
import { OrganizerV1, ResolvedOrganizerV1 } from '@app/unraid-api/organizer/organizer.model.js';
|
||||
import { ResolvedOrganizerV1 } from '@app/unraid-api/organizer/organizer.model.js';
|
||||
|
||||
@Resolver(() => Docker)
|
||||
export class DockerResolver {
|
||||
constructor(
|
||||
private readonly dockerService: DockerService,
|
||||
private readonly dockerOrganizerService: DockerOrganizerService
|
||||
private readonly dockerOrganizerService: DockerOrganizerService,
|
||||
private readonly dockerPhpService: DockerPhpService
|
||||
) {}
|
||||
|
||||
@UsePermissions({
|
||||
@@ -53,6 +57,7 @@ export class DockerResolver {
|
||||
return this.dockerService.getNetworks({ skipCache });
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@UsePermissions({
|
||||
action: AuthAction.READ_ANY,
|
||||
resource: Resource.DOCKER,
|
||||
@@ -62,6 +67,7 @@ export class DockerResolver {
|
||||
return this.dockerOrganizerService.resolveOrganizer();
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@UsePermissions({
|
||||
action: AuthAction.UPDATE_ANY,
|
||||
resource: Resource.DOCKER,
|
||||
@@ -80,6 +86,7 @@ export class DockerResolver {
|
||||
return this.dockerOrganizerService.resolveOrganizer(organizer);
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@UsePermissions({
|
||||
action: AuthAction.UPDATE_ANY,
|
||||
resource: Resource.DOCKER,
|
||||
@@ -96,6 +103,7 @@ export class DockerResolver {
|
||||
return this.dockerOrganizerService.resolveOrganizer(organizer);
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@UsePermissions({
|
||||
action: AuthAction.UPDATE_ANY,
|
||||
resource: Resource.DOCKER,
|
||||
@@ -108,6 +116,7 @@ export class DockerResolver {
|
||||
return this.dockerOrganizerService.resolveOrganizer(organizer);
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@UsePermissions({
|
||||
action: AuthAction.UPDATE_ANY,
|
||||
resource: Resource.DOCKER,
|
||||
@@ -123,4 +132,14 @@ export class DockerResolver {
|
||||
});
|
||||
return this.dockerOrganizerService.resolveOrganizer(organizer);
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@UsePermissions({
|
||||
action: AuthAction.READ_ANY,
|
||||
resource: Resource.DOCKER,
|
||||
})
|
||||
@ResolveField(() => [ExplicitStatusItem])
|
||||
public async containerUpdateStatuses() {
|
||||
return this.dockerPhpService.getContainerUpdateStatuses();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,64 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
|
||||
import { ConfigFilePersister } from '@unraid/shared/services/config-file.js';
|
||||
|
||||
import { FeatureFlags } from '@app/consts.js';
|
||||
import { AppError } from '@app/core/errors/app-error.js';
|
||||
import { validateObject } from '@app/unraid-api/graph/resolvers/validation.utils.js';
|
||||
import {
|
||||
DEFAULT_ORGANIZER_ROOT_ID,
|
||||
DEFAULT_ORGANIZER_VIEW_ID,
|
||||
} from '@app/unraid-api/organizer/organizer.js';
|
||||
import { OrganizerV1 } from '@app/unraid-api/organizer/organizer.model.js';
|
||||
import { validateOrganizerIntegrity } from '@app/unraid-api/organizer/organizer.validation.js';
|
||||
|
||||
@Injectable()
|
||||
export class DockerOrganizerConfigService extends ConfigFilePersister<OrganizerV1> {
|
||||
constructor(configService: ConfigService) {
|
||||
super(configService);
|
||||
}
|
||||
|
||||
enabled(): boolean {
|
||||
return FeatureFlags.ENABLE_NEXT_DOCKER_RELEASE;
|
||||
}
|
||||
|
||||
configKey(): string {
|
||||
return 'dockerOrganizer';
|
||||
}
|
||||
|
||||
fileName(): string {
|
||||
return 'docker.organizer.json';
|
||||
}
|
||||
|
||||
defaultConfig(): OrganizerV1 {
|
||||
return {
|
||||
version: 1,
|
||||
resources: {},
|
||||
views: {
|
||||
default: {
|
||||
id: DEFAULT_ORGANIZER_VIEW_ID,
|
||||
name: 'Default',
|
||||
root: DEFAULT_ORGANIZER_ROOT_ID,
|
||||
entries: {
|
||||
root: {
|
||||
type: 'folder',
|
||||
id: DEFAULT_ORGANIZER_ROOT_ID,
|
||||
name: 'Root',
|
||||
children: [],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
async validate(config: object): Promise<OrganizerV1> {
|
||||
const organizer = await validateObject(OrganizerV1, config);
|
||||
const { isValid, errors } = await validateOrganizerIntegrity(organizer);
|
||||
if (!isValid) {
|
||||
throw new AppError(`Docker organizer validation failed: ${JSON.stringify(errors, null, 2)}`);
|
||||
}
|
||||
return organizer;
|
||||
}
|
||||
}
|
||||
@@ -2,17 +2,17 @@ import { Test } from '@nestjs/testing';
|
||||
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { DockerConfigService } from '@app/unraid-api/graph/resolvers/docker/docker-config.service.js';
|
||||
import {
|
||||
containerToResource,
|
||||
DockerOrganizerService,
|
||||
} from '@app/unraid-api/graph/resolvers/docker/docker-organizer.service.js';
|
||||
import {
|
||||
ContainerPortType,
|
||||
ContainerState,
|
||||
DockerContainer,
|
||||
} from '@app/unraid-api/graph/resolvers/docker/docker.model.js';
|
||||
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
|
||||
import { DockerOrganizerConfigService } from '@app/unraid-api/graph/resolvers/docker/organizer/docker-organizer-config.service.js';
|
||||
import {
|
||||
containerToResource,
|
||||
DockerOrganizerService,
|
||||
} from '@app/unraid-api/graph/resolvers/docker/organizer/docker-organizer.service.js';
|
||||
import { OrganizerV1 } from '@app/unraid-api/organizer/organizer.model.js';
|
||||
|
||||
describe('containerToResource', () => {
|
||||
@@ -138,7 +138,7 @@ describe('containerToResource', () => {
|
||||
|
||||
describe('DockerOrganizerService', () => {
|
||||
let service: DockerOrganizerService;
|
||||
let configService: DockerConfigService;
|
||||
let configService: DockerOrganizerConfigService;
|
||||
let dockerService: DockerService;
|
||||
|
||||
const mockOrganizer: OrganizerV1 = {
|
||||
@@ -178,7 +178,7 @@ describe('DockerOrganizerService', () => {
|
||||
providers: [
|
||||
DockerOrganizerService,
|
||||
{
|
||||
provide: DockerConfigService,
|
||||
provide: DockerOrganizerConfigService,
|
||||
useValue: {
|
||||
getConfig: vi.fn().mockImplementation(() => structuredClone(mockOrganizer)),
|
||||
validate: vi.fn().mockImplementation((config) => Promise.resolve(config)),
|
||||
@@ -220,7 +220,7 @@ describe('DockerOrganizerService', () => {
|
||||
}).compile();
|
||||
|
||||
service = moduleRef.get<DockerOrganizerService>(DockerOrganizerService);
|
||||
configService = moduleRef.get<DockerConfigService>(DockerConfigService);
|
||||
configService = moduleRef.get<DockerOrganizerConfigService>(DockerOrganizerConfigService);
|
||||
dockerService = moduleRef.get<DockerService>(DockerService);
|
||||
});
|
||||
|
||||
@@ -3,9 +3,9 @@ import { Injectable, Logger } from '@nestjs/common';
|
||||
import type { ContainerListOptions } from 'dockerode';
|
||||
|
||||
import { AppError } from '@app/core/errors/app-error.js';
|
||||
import { DockerConfigService } from '@app/unraid-api/graph/resolvers/docker/docker-config.service.js';
|
||||
import { DockerContainer } from '@app/unraid-api/graph/resolvers/docker/docker.model.js';
|
||||
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
|
||||
import { DockerOrganizerConfigService } from '@app/unraid-api/graph/resolvers/docker/organizer/docker-organizer-config.service.js';
|
||||
import {
|
||||
addMissingResourcesToView,
|
||||
createFolderInView,
|
||||
@@ -47,7 +47,7 @@ export function containerListToResourcesObject(containers: DockerContainer[]): O
|
||||
export class DockerOrganizerService {
|
||||
private readonly logger = new Logger(DockerOrganizerService.name);
|
||||
constructor(
|
||||
private readonly dockerConfigService: DockerConfigService,
|
||||
private readonly dockerConfigService: DockerOrganizerConfigService,
|
||||
private readonly dockerService: DockerService
|
||||
) {}
|
||||
|
||||
@@ -0,0 +1,124 @@
|
||||
import { describe, expect, it } from 'vitest';
|
||||
|
||||
import type { DockerPushMatch } from '@app/unraid-api/graph/resolvers/docker/utils/docker-push-parser.js';
|
||||
import { parseDockerPushCalls } from '@app/unraid-api/graph/resolvers/docker/utils/docker-push-parser.js';
|
||||
|
||||
describe('parseDockerPushCalls', () => {
|
||||
it('should extract name and update status from valid docker.push call', () => {
|
||||
const jsCode = "docker.push({name:'nginx',update:1});";
|
||||
const result = parseDockerPushCalls(jsCode);
|
||||
|
||||
expect(result).toEqual([{ name: 'nginx', updateStatus: 1 }]);
|
||||
});
|
||||
|
||||
it('should handle multiple docker.push calls in same string', () => {
|
||||
const jsCode = `
|
||||
docker.push({name:'nginx',update:1});
|
||||
docker.push({name:'mysql',update:0});
|
||||
docker.push({name:'redis',update:2});
|
||||
`;
|
||||
const result = parseDockerPushCalls(jsCode);
|
||||
|
||||
expect(result).toEqual([
|
||||
{ name: 'nginx', updateStatus: 1 },
|
||||
{ name: 'mysql', updateStatus: 0 },
|
||||
{ name: 'redis', updateStatus: 2 },
|
||||
]);
|
||||
});
|
||||
|
||||
it('should handle docker.push calls with additional properties', () => {
|
||||
const jsCode =
|
||||
"docker.push({id:'123',name:'nginx',version:'latest',update:3,status:'running'});";
|
||||
const result = parseDockerPushCalls(jsCode);
|
||||
|
||||
expect(result).toEqual([{ name: 'nginx', updateStatus: 3 }]);
|
||||
});
|
||||
|
||||
it('should handle different property order', () => {
|
||||
const jsCode = "docker.push({update:2,name:'postgres',id:'456'});";
|
||||
const result = parseDockerPushCalls(jsCode);
|
||||
|
||||
expect(result).toEqual([{ name: 'postgres', updateStatus: 2 }]);
|
||||
});
|
||||
|
||||
it('should handle container names with special characters', () => {
|
||||
const jsCode = "docker.push({name:'my-app_v2.0',update:1});";
|
||||
const result = parseDockerPushCalls(jsCode);
|
||||
|
||||
expect(result).toEqual([{ name: 'my-app_v2.0', updateStatus: 1 }]);
|
||||
});
|
||||
|
||||
it('should handle whitespace variations', () => {
|
||||
const jsCode = "docker.push({ name: 'nginx' , update: 1 });";
|
||||
const result = parseDockerPushCalls(jsCode);
|
||||
|
||||
expect(result).toEqual([{ name: 'nginx', updateStatus: 1 }]);
|
||||
});
|
||||
|
||||
it('should return empty array for empty string', () => {
|
||||
const result = parseDockerPushCalls('');
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it('should return empty array when no docker.push calls found', () => {
|
||||
const jsCode = "console.log('no docker calls here');";
|
||||
const result = parseDockerPushCalls(jsCode);
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it('should ignore malformed docker.push calls', () => {
|
||||
const jsCode = `
|
||||
docker.push({name:'valid',update:1});
|
||||
docker.push({name:'missing-update'});
|
||||
docker.push({update:2});
|
||||
docker.push({name:'another-valid',update:0});
|
||||
`;
|
||||
const result = parseDockerPushCalls(jsCode);
|
||||
|
||||
expect(result).toEqual([
|
||||
{ name: 'valid', updateStatus: 1 },
|
||||
{ name: 'another-valid', updateStatus: 0 },
|
||||
]);
|
||||
});
|
||||
|
||||
it('should handle all valid update status values', () => {
|
||||
const jsCode = `
|
||||
docker.push({name:'container0',update:0});
|
||||
docker.push({name:'container1',update:1});
|
||||
docker.push({name:'container2',update:2});
|
||||
docker.push({name:'container3',update:3});
|
||||
`;
|
||||
const result = parseDockerPushCalls(jsCode);
|
||||
|
||||
expect(result).toEqual([
|
||||
{ name: 'container0', updateStatus: 0 },
|
||||
{ name: 'container1', updateStatus: 1 },
|
||||
{ name: 'container2', updateStatus: 2 },
|
||||
{ name: 'container3', updateStatus: 3 },
|
||||
]);
|
||||
});
|
||||
|
||||
it('should handle real-world example with HTML and multiple containers', () => {
|
||||
const jsCode = `
|
||||
<div>some html</div>
|
||||
docker.push({id:'abc123',name:'plex',version:'1.32',update:1,autostart:true});
|
||||
docker.push({id:'def456',name:'nextcloud',version:'latest',update:0,ports:'80:8080'});
|
||||
<script>more content</script>
|
||||
docker.push({id:'ghi789',name:'homeassistant',update:2});
|
||||
`;
|
||||
const result = parseDockerPushCalls(jsCode);
|
||||
|
||||
expect(result).toEqual([
|
||||
{ name: 'plex', updateStatus: 1 },
|
||||
{ name: 'nextcloud', updateStatus: 0 },
|
||||
{ name: 'homeassistant', updateStatus: 2 },
|
||||
]);
|
||||
});
|
||||
|
||||
it('should handle nested braces in other properties', () => {
|
||||
const jsCode = 'docker.push({config:\'{"nested":"value"}\',name:\'test\',update:1});';
|
||||
const result = parseDockerPushCalls(jsCode);
|
||||
|
||||
expect(result).toEqual([{ name: 'test', updateStatus: 1 }]);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,24 @@
|
||||
export interface DockerPushMatch {
|
||||
name: string;
|
||||
updateStatus: number;
|
||||
}
|
||||
|
||||
export function parseDockerPushCalls(jsCode: string): DockerPushMatch[] {
|
||||
const dockerPushRegex = /docker\.push\(\{[^}]*(?:(?:[^{}]|{[^}]*})*)\}\);/g;
|
||||
const matches: DockerPushMatch[] = [];
|
||||
|
||||
for (const match of jsCode.matchAll(dockerPushRegex)) {
|
||||
const objectContent = match[0];
|
||||
|
||||
const nameMatch = objectContent.match(/name\s*:\s*'([^']+)'/);
|
||||
const updateMatch = objectContent.match(/update\s*:\s*(\d)/);
|
||||
|
||||
if (nameMatch && updateMatch) {
|
||||
const name = nameMatch[1];
|
||||
const updateStatus = Number(updateMatch[1]);
|
||||
matches.push({ name, updateStatus });
|
||||
}
|
||||
}
|
||||
|
||||
return matches;
|
||||
}
|
||||
@@ -27,6 +27,16 @@ export class CpuLoad {
|
||||
description: 'The percentage of time the CPU spent servicing hardware interrupts.',
|
||||
})
|
||||
percentIrq!: number;
|
||||
|
||||
@Field(() => Float, {
|
||||
description: 'The percentage of time the CPU spent running virtual machines (guest).',
|
||||
})
|
||||
percentGuest!: number;
|
||||
|
||||
@Field(() => Float, {
|
||||
description: 'The percentage of CPU time stolen by the hypervisor.',
|
||||
})
|
||||
percentSteal!: number;
|
||||
}
|
||||
|
||||
@ObjectType({ implements: () => Node })
|
||||
|
||||
246
api/src/unraid-api/graph/resolvers/info/cpu/cpu.service.spec.ts
Normal file
246
api/src/unraid-api/graph/resolvers/info/cpu/cpu.service.spec.ts
Normal file
@@ -0,0 +1,246 @@
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { CpuService } from '@app/unraid-api/graph/resolvers/info/cpu/cpu.service.js';
|
||||
|
||||
vi.mock('systeminformation', () => ({
|
||||
cpu: vi.fn().mockResolvedValue({
|
||||
manufacturer: 'Intel',
|
||||
brand: 'Core i7-9700K',
|
||||
vendor: 'Intel',
|
||||
family: '6',
|
||||
model: '158',
|
||||
stepping: '12',
|
||||
revision: '',
|
||||
voltage: '1.2V',
|
||||
speed: 3.6,
|
||||
speedMin: 800,
|
||||
speedMax: 4900,
|
||||
cores: 16,
|
||||
physicalCores: 8,
|
||||
processors: 1,
|
||||
socket: 'LGA1151',
|
||||
cache: {
|
||||
l1d: 32768,
|
||||
l1i: 32768,
|
||||
l2: 262144,
|
||||
l3: 12582912,
|
||||
},
|
||||
}),
|
||||
cpuFlags: vi.fn().mockResolvedValue('fpu vme de pse tsc msr pae mce cx8'),
|
||||
currentLoad: vi.fn().mockResolvedValue({
|
||||
avgLoad: 2.5,
|
||||
currentLoad: 25.5,
|
||||
currentLoadUser: 15.0,
|
||||
currentLoadSystem: 8.0,
|
||||
currentLoadNice: 0.5,
|
||||
currentLoadIdle: 74.5,
|
||||
currentLoadIrq: 1.0,
|
||||
currentLoadSteal: 0.2,
|
||||
currentLoadGuest: 0.3,
|
||||
rawCurrentLoad: 25500,
|
||||
rawCurrentLoadUser: 15000,
|
||||
rawCurrentLoadSystem: 8000,
|
||||
rawCurrentLoadNice: 500,
|
||||
rawCurrentLoadIdle: 74500,
|
||||
rawCurrentLoadIrq: 1000,
|
||||
rawCurrentLoadSteal: 200,
|
||||
rawCurrentLoadGuest: 300,
|
||||
cpus: [
|
||||
{
|
||||
load: 30.0,
|
||||
loadUser: 20.0,
|
||||
loadSystem: 10.0,
|
||||
loadNice: 0,
|
||||
loadIdle: 70.0,
|
||||
loadIrq: 0,
|
||||
loadSteal: 0,
|
||||
loadGuest: 0,
|
||||
rawLoad: 30000,
|
||||
rawLoadUser: 20000,
|
||||
rawLoadSystem: 10000,
|
||||
rawLoadNice: 0,
|
||||
rawLoadIdle: 70000,
|
||||
rawLoadIrq: 0,
|
||||
rawLoadSteal: 0,
|
||||
rawLoadGuest: 0,
|
||||
},
|
||||
{
|
||||
load: 21.0,
|
||||
loadUser: 15.0,
|
||||
loadSystem: 6.0,
|
||||
loadNice: 0,
|
||||
loadIdle: 79.0,
|
||||
loadIrq: 0,
|
||||
loadSteal: 0,
|
||||
loadGuest: 0,
|
||||
rawLoad: 21000,
|
||||
rawLoadUser: 15000,
|
||||
rawLoadSystem: 6000,
|
||||
rawLoadNice: 0,
|
||||
rawLoadIdle: 79000,
|
||||
rawLoadIrq: 0,
|
||||
rawLoadSteal: 0,
|
||||
rawLoadGuest: 0,
|
||||
},
|
||||
],
|
||||
}),
|
||||
}));
|
||||
|
||||
describe('CpuService', () => {
|
||||
let service: CpuService;
|
||||
|
||||
beforeEach(() => {
|
||||
service = new CpuService();
|
||||
});
|
||||
|
||||
describe('generateCpu', () => {
|
||||
it('should return CPU information with correct structure', async () => {
|
||||
const result = await service.generateCpu();
|
||||
|
||||
expect(result).toEqual({
|
||||
id: 'info/cpu',
|
||||
manufacturer: 'Intel',
|
||||
brand: 'Core i7-9700K',
|
||||
vendor: 'Intel',
|
||||
family: '6',
|
||||
model: '158',
|
||||
stepping: 12,
|
||||
revision: '',
|
||||
voltage: '1.2V',
|
||||
speed: 3.6,
|
||||
speedmin: 800,
|
||||
speedmax: 4900,
|
||||
cores: 8,
|
||||
threads: 16,
|
||||
processors: 1,
|
||||
socket: 'LGA1151',
|
||||
cache: {
|
||||
l1d: 32768,
|
||||
l1i: 32768,
|
||||
l2: 262144,
|
||||
l3: 12582912,
|
||||
},
|
||||
flags: ['fpu', 'vme', 'de', 'pse', 'tsc', 'msr', 'pae', 'mce', 'cx8'],
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle missing speed values', async () => {
|
||||
const { cpu } = await import('systeminformation');
|
||||
vi.mocked(cpu).mockResolvedValueOnce({
|
||||
manufacturer: 'Intel',
|
||||
brand: 'Core i7-9700K',
|
||||
vendor: 'Intel',
|
||||
family: '6',
|
||||
model: '158',
|
||||
stepping: '12',
|
||||
revision: '',
|
||||
voltage: '1.2V',
|
||||
speed: 3.6,
|
||||
cores: 16,
|
||||
physicalCores: 8,
|
||||
processors: 1,
|
||||
socket: 'LGA1151',
|
||||
cache: { l1d: 32768, l1i: 32768, l2: 262144, l3: 12582912 },
|
||||
} as any);
|
||||
|
||||
const result = await service.generateCpu();
|
||||
|
||||
expect(result.speedmin).toBe(-1);
|
||||
expect(result.speedmax).toBe(-1);
|
||||
});
|
||||
|
||||
it('should handle cpuFlags error gracefully', async () => {
|
||||
const { cpuFlags } = await import('systeminformation');
|
||||
vi.mocked(cpuFlags).mockRejectedValueOnce(new Error('flags error'));
|
||||
|
||||
const result = await service.generateCpu();
|
||||
|
||||
expect(result.flags).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('generateCpuLoad', () => {
|
||||
it('should return CPU utilization with all load metrics', async () => {
|
||||
const result = await service.generateCpuLoad();
|
||||
|
||||
expect(result).toEqual({
|
||||
id: 'info/cpu-load',
|
||||
percentTotal: 25.5,
|
||||
cpus: [
|
||||
{
|
||||
percentTotal: 30.0,
|
||||
percentUser: 20.0,
|
||||
percentSystem: 10.0,
|
||||
percentNice: 0,
|
||||
percentIdle: 70.0,
|
||||
percentIrq: 0,
|
||||
percentGuest: 0,
|
||||
percentSteal: 0,
|
||||
},
|
||||
{
|
||||
percentTotal: 21.0,
|
||||
percentUser: 15.0,
|
||||
percentSystem: 6.0,
|
||||
percentNice: 0,
|
||||
percentIdle: 79.0,
|
||||
percentIrq: 0,
|
||||
percentGuest: 0,
|
||||
percentSteal: 0,
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
it('should include guest and steal metrics when present', async () => {
|
||||
const { currentLoad } = await import('systeminformation');
|
||||
vi.mocked(currentLoad).mockResolvedValueOnce({
|
||||
avgLoad: 2.5,
|
||||
currentLoad: 25.5,
|
||||
currentLoadUser: 15.0,
|
||||
currentLoadSystem: 8.0,
|
||||
currentLoadNice: 0.5,
|
||||
currentLoadIdle: 74.5,
|
||||
currentLoadIrq: 1.0,
|
||||
currentLoadSteal: 0.2,
|
||||
currentLoadGuest: 0.3,
|
||||
rawCurrentLoad: 25500,
|
||||
rawCurrentLoadUser: 15000,
|
||||
rawCurrentLoadSystem: 8000,
|
||||
rawCurrentLoadNice: 500,
|
||||
rawCurrentLoadIdle: 74500,
|
||||
rawCurrentLoadIrq: 1000,
|
||||
rawCurrentLoadSteal: 200,
|
||||
rawCurrentLoadGuest: 300,
|
||||
cpus: [
|
||||
{
|
||||
load: 30.0,
|
||||
loadUser: 20.0,
|
||||
loadSystem: 10.0,
|
||||
loadNice: 0,
|
||||
loadIdle: 70.0,
|
||||
loadIrq: 0,
|
||||
loadGuest: 2.5,
|
||||
loadSteal: 1.2,
|
||||
rawLoad: 30000,
|
||||
rawLoadUser: 20000,
|
||||
rawLoadSystem: 10000,
|
||||
rawLoadNice: 0,
|
||||
rawLoadIdle: 70000,
|
||||
rawLoadIrq: 0,
|
||||
rawLoadGuest: 2500,
|
||||
rawLoadSteal: 1200,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const result = await service.generateCpuLoad();
|
||||
|
||||
expect(result.cpus[0]).toEqual(
|
||||
expect.objectContaining({
|
||||
percentGuest: 2.5,
|
||||
percentSteal: 1.2,
|
||||
})
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -37,6 +37,8 @@ export class CpuService {
|
||||
percentNice: cpu.loadNice,
|
||||
percentIdle: cpu.loadIdle,
|
||||
percentIrq: cpu.loadIrq,
|
||||
percentGuest: cpu.loadGuest || 0,
|
||||
percentSteal: cpu.loadSteal || 0,
|
||||
})),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -0,0 +1,213 @@
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import {
|
||||
LogWatcherManager,
|
||||
WatcherState,
|
||||
} from '@app/unraid-api/graph/resolvers/logs/log-watcher-manager.service.js';
|
||||
|
||||
describe('LogWatcherManager', () => {
|
||||
let manager: LogWatcherManager;
|
||||
let mockWatcher: any;
|
||||
|
||||
beforeEach(async () => {
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [LogWatcherManager],
|
||||
}).compile();
|
||||
|
||||
manager = module.get<LogWatcherManager>(LogWatcherManager);
|
||||
|
||||
mockWatcher = {
|
||||
close: vi.fn(),
|
||||
on: vi.fn(),
|
||||
};
|
||||
});
|
||||
|
||||
describe('state management', () => {
|
||||
it('should set watcher as initializing', () => {
|
||||
manager.setInitializing('test-key');
|
||||
const entry = manager.getEntry('test-key');
|
||||
expect(entry).toBeDefined();
|
||||
expect(entry?.state).toBe(WatcherState.INITIALIZING);
|
||||
});
|
||||
|
||||
it('should set watcher as active with position', () => {
|
||||
manager.setActive('test-key', mockWatcher as any, 1000);
|
||||
const entry = manager.getEntry('test-key');
|
||||
expect(entry).toBeDefined();
|
||||
expect(entry?.state).toBe(WatcherState.ACTIVE);
|
||||
if (manager.isActive(entry)) {
|
||||
expect(entry.watcher).toBe(mockWatcher);
|
||||
expect(entry.position).toBe(1000);
|
||||
}
|
||||
});
|
||||
|
||||
it('should set watcher as stopping', () => {
|
||||
manager.setStopping('test-key');
|
||||
const entry = manager.getEntry('test-key');
|
||||
expect(entry).toBeDefined();
|
||||
expect(entry?.state).toBe(WatcherState.STOPPING);
|
||||
});
|
||||
});
|
||||
|
||||
describe('isWatchingOrInitializing', () => {
|
||||
it('should return true for initializing watcher', () => {
|
||||
manager.setInitializing('test-key');
|
||||
expect(manager.isWatchingOrInitializing('test-key')).toBe(true);
|
||||
});
|
||||
|
||||
it('should return true for active watcher', () => {
|
||||
manager.setActive('test-key', mockWatcher as any, 0);
|
||||
expect(manager.isWatchingOrInitializing('test-key')).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false for stopping watcher', () => {
|
||||
manager.setStopping('test-key');
|
||||
expect(manager.isWatchingOrInitializing('test-key')).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for non-existent watcher', () => {
|
||||
expect(manager.isWatchingOrInitializing('test-key')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('handlePostInitialization', () => {
|
||||
it('should activate watcher when not stopped', () => {
|
||||
manager.setInitializing('test-key');
|
||||
const result = manager.handlePostInitialization('test-key', mockWatcher as any, 500);
|
||||
|
||||
expect(result).toBe(true);
|
||||
expect(mockWatcher.close).not.toHaveBeenCalled();
|
||||
|
||||
const entry = manager.getEntry('test-key');
|
||||
expect(entry?.state).toBe(WatcherState.ACTIVE);
|
||||
if (manager.isActive(entry)) {
|
||||
expect(entry.position).toBe(500);
|
||||
}
|
||||
});
|
||||
|
||||
it('should cleanup watcher when marked as stopping', () => {
|
||||
manager.setStopping('test-key');
|
||||
const result = manager.handlePostInitialization('test-key', mockWatcher as any, 500);
|
||||
|
||||
expect(result).toBe(false);
|
||||
expect(mockWatcher.close).toHaveBeenCalled();
|
||||
expect(manager.getEntry('test-key')).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should cleanup watcher when entry is missing', () => {
|
||||
const result = manager.handlePostInitialization('test-key', mockWatcher as any, 500);
|
||||
|
||||
expect(result).toBe(false);
|
||||
expect(mockWatcher.close).toHaveBeenCalled();
|
||||
expect(manager.getEntry('test-key')).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('stopWatcher', () => {
|
||||
it('should mark initializing watcher as stopping', () => {
|
||||
manager.setInitializing('test-key');
|
||||
manager.stopWatcher('test-key');
|
||||
|
||||
const entry = manager.getEntry('test-key');
|
||||
expect(entry?.state).toBe(WatcherState.STOPPING);
|
||||
});
|
||||
|
||||
it('should close and remove active watcher', () => {
|
||||
manager.setActive('test-key', mockWatcher as any, 0);
|
||||
manager.stopWatcher('test-key');
|
||||
|
||||
expect(mockWatcher.close).toHaveBeenCalled();
|
||||
expect(manager.getEntry('test-key')).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should do nothing for non-existent watcher', () => {
|
||||
manager.stopWatcher('test-key');
|
||||
expect(mockWatcher.close).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('position management', () => {
|
||||
it('should update position for active watcher', () => {
|
||||
manager.setActive('test-key', mockWatcher as any, 100);
|
||||
manager.updatePosition('test-key', 200);
|
||||
|
||||
const position = manager.getPosition('test-key');
|
||||
expect(position).toBe(200);
|
||||
});
|
||||
|
||||
it('should not update position for non-active watcher', () => {
|
||||
manager.setInitializing('test-key');
|
||||
manager.updatePosition('test-key', 200);
|
||||
|
||||
const position = manager.getPosition('test-key');
|
||||
expect(position).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should get position for active watcher', () => {
|
||||
manager.setActive('test-key', mockWatcher as any, 300);
|
||||
expect(manager.getPosition('test-key')).toBe(300);
|
||||
});
|
||||
|
||||
it('should return undefined for non-active watcher', () => {
|
||||
manager.setStopping('test-key');
|
||||
expect(manager.getPosition('test-key')).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('stopAllWatchers', () => {
|
||||
it('should close all active watchers and clear map', () => {
|
||||
const mockWatcher1 = { close: vi.fn() };
|
||||
const mockWatcher2 = { close: vi.fn() };
|
||||
const mockWatcher3 = { close: vi.fn() };
|
||||
|
||||
manager.setActive('key1', mockWatcher1 as any, 0);
|
||||
manager.setInitializing('key2');
|
||||
manager.setActive('key3', mockWatcher2 as any, 0);
|
||||
manager.setStopping('key4');
|
||||
manager.setActive('key5', mockWatcher3 as any, 0);
|
||||
|
||||
manager.stopAllWatchers();
|
||||
|
||||
expect(mockWatcher1.close).toHaveBeenCalled();
|
||||
expect(mockWatcher2.close).toHaveBeenCalled();
|
||||
expect(mockWatcher3.close).toHaveBeenCalled();
|
||||
|
||||
expect(manager.getEntry('key1')).toBeUndefined();
|
||||
expect(manager.getEntry('key2')).toBeUndefined();
|
||||
expect(manager.getEntry('key3')).toBeUndefined();
|
||||
expect(manager.getEntry('key4')).toBeUndefined();
|
||||
expect(manager.getEntry('key5')).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('in-flight processing', () => {
|
||||
it('should prevent concurrent processing', () => {
|
||||
manager.setActive('test-key', mockWatcher as any, 0);
|
||||
|
||||
// First call should succeed
|
||||
expect(manager.startProcessing('test-key')).toBe(true);
|
||||
|
||||
// Second call should fail (already in flight)
|
||||
expect(manager.startProcessing('test-key')).toBe(false);
|
||||
|
||||
// After finishing, should be able to start again
|
||||
manager.finishProcessing('test-key');
|
||||
expect(manager.startProcessing('test-key')).toBe(true);
|
||||
});
|
||||
|
||||
it('should not start processing for non-active watcher', () => {
|
||||
manager.setInitializing('test-key');
|
||||
expect(manager.startProcessing('test-key')).toBe(false);
|
||||
|
||||
manager.setStopping('test-key');
|
||||
expect(manager.startProcessing('test-key')).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle finish processing for non-existent watcher', () => {
|
||||
// Should not throw
|
||||
expect(() => manager.finishProcessing('non-existent')).not.toThrow();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,183 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
|
||||
import * as chokidar from 'chokidar';
|
||||
|
||||
export enum WatcherState {
|
||||
INITIALIZING = 'initializing',
|
||||
ACTIVE = 'active',
|
||||
STOPPING = 'stopping',
|
||||
}
|
||||
|
||||
export type WatcherEntry =
|
||||
| { state: WatcherState.INITIALIZING }
|
||||
| { state: WatcherState.ACTIVE; watcher: chokidar.FSWatcher; position: number; inFlight: boolean }
|
||||
| { state: WatcherState.STOPPING };
|
||||
|
||||
/**
|
||||
* Service responsible for managing log file watchers and their lifecycle.
|
||||
* Handles race conditions during watcher initialization and cleanup.
|
||||
*/
|
||||
@Injectable()
|
||||
export class LogWatcherManager {
|
||||
private readonly logger = new Logger(LogWatcherManager.name);
|
||||
private readonly watchers = new Map<string, WatcherEntry>();
|
||||
|
||||
/**
|
||||
* Set a watcher as initializing
|
||||
*/
|
||||
setInitializing(key: string): void {
|
||||
this.watchers.set(key, { state: WatcherState.INITIALIZING });
|
||||
}
|
||||
|
||||
/**
|
||||
* Set a watcher as active with its FSWatcher and position
|
||||
*/
|
||||
setActive(key: string, watcher: chokidar.FSWatcher, position: number): void {
|
||||
this.watchers.set(key, { state: WatcherState.ACTIVE, watcher, position, inFlight: false });
|
||||
}
|
||||
|
||||
/**
|
||||
* Mark a watcher as stopping (used during initialization race conditions)
|
||||
*/
|
||||
setStopping(key: string): void {
|
||||
this.watchers.set(key, { state: WatcherState.STOPPING });
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a watcher entry by key
|
||||
*/
|
||||
getEntry(key: string): WatcherEntry | undefined {
|
||||
return this.watchers.get(key);
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove a watcher entry
|
||||
*/
|
||||
removeEntry(key: string): void {
|
||||
this.watchers.delete(key);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a watcher is active and return typed entry
|
||||
*/
|
||||
isActive(entry: WatcherEntry | undefined): entry is {
|
||||
state: WatcherState.ACTIVE;
|
||||
watcher: chokidar.FSWatcher;
|
||||
position: number;
|
||||
inFlight: boolean;
|
||||
} {
|
||||
return entry?.state === WatcherState.ACTIVE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a watcher exists and is either initializing or active
|
||||
*/
|
||||
isWatchingOrInitializing(key: string): boolean {
|
||||
const entry = this.getEntry(key);
|
||||
return (
|
||||
entry !== undefined &&
|
||||
(entry.state === WatcherState.ACTIVE || entry.state === WatcherState.INITIALIZING)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle cleanup after initialization completes.
|
||||
* Returns true if the watcher should continue, false if it should be cleaned up.
|
||||
*/
|
||||
handlePostInitialization(key: string, watcher: chokidar.FSWatcher, position: number): boolean {
|
||||
const currentEntry = this.getEntry(key);
|
||||
|
||||
if (!currentEntry || currentEntry.state === WatcherState.STOPPING) {
|
||||
// We were stopped during initialization, clean up immediately
|
||||
this.logger.debug(`Watcher for ${key} was stopped during initialization, cleaning up`);
|
||||
watcher.close();
|
||||
this.removeEntry(key);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Store the active watcher and position
|
||||
this.setActive(key, watcher, position);
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop a watcher, handling all possible states
|
||||
*/
|
||||
stopWatcher(key: string): void {
|
||||
const entry = this.getEntry(key);
|
||||
|
||||
if (!entry) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (entry.state === WatcherState.INITIALIZING) {
|
||||
// Mark as stopping so the initialization will clean up
|
||||
this.setStopping(key);
|
||||
this.logger.debug(`Marked watcher as stopping during initialization: ${key}`);
|
||||
} else if (entry.state === WatcherState.ACTIVE) {
|
||||
// Close the active watcher
|
||||
entry.watcher.close();
|
||||
this.removeEntry(key);
|
||||
this.logger.debug(`Stopped active watcher: ${key}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the position for an active watcher
|
||||
*/
|
||||
updatePosition(key: string, newPosition: number): void {
|
||||
const entry = this.getEntry(key);
|
||||
if (this.isActive(entry)) {
|
||||
entry.position = newPosition;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Start processing a change event (set inFlight to true)
|
||||
* Returns true if processing can proceed, false if already in flight
|
||||
*/
|
||||
startProcessing(key: string): boolean {
|
||||
const entry = this.getEntry(key);
|
||||
if (this.isActive(entry)) {
|
||||
if (entry.inFlight) {
|
||||
return false; // Already processing
|
||||
}
|
||||
entry.inFlight = true;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Finish processing a change event (set inFlight to false)
|
||||
*/
|
||||
finishProcessing(key: string): void {
|
||||
const entry = this.getEntry(key);
|
||||
if (this.isActive(entry)) {
|
||||
entry.inFlight = false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the position for an active watcher
|
||||
*/
|
||||
getPosition(key: string): number | undefined {
|
||||
const entry = this.getEntry(key);
|
||||
if (this.isActive(entry)) {
|
||||
return entry.position;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up all watchers (useful for module cleanup)
|
||||
*/
|
||||
stopAllWatchers(): void {
|
||||
for (const entry of this.watchers.values()) {
|
||||
if (this.isActive(entry)) {
|
||||
entry.watcher.close();
|
||||
}
|
||||
}
|
||||
this.watchers.clear();
|
||||
}
|
||||
}
|
||||
@@ -1,10 +1,13 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
|
||||
import { LogWatcherManager } from '@app/unraid-api/graph/resolvers/logs/log-watcher-manager.service.js';
|
||||
import { LogsResolver } from '@app/unraid-api/graph/resolvers/logs/logs.resolver.js';
|
||||
import { LogsService } from '@app/unraid-api/graph/resolvers/logs/logs.service.js';
|
||||
import { ServicesModule } from '@app/unraid-api/graph/services/services.module.js';
|
||||
|
||||
@Module({
|
||||
providers: [LogsResolver, LogsService],
|
||||
exports: [LogsService],
|
||||
imports: [ServicesModule],
|
||||
providers: [LogsResolver, LogsService, LogWatcherManager],
|
||||
exports: [LogsService, LogWatcherManager],
|
||||
})
|
||||
export class LogsModule {}
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
|
||||
import { beforeEach, describe, expect, it } from 'vitest';
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { LogsResolver } from '@app/unraid-api/graph/resolvers/logs/logs.resolver.js';
|
||||
import { LogsService } from '@app/unraid-api/graph/resolvers/logs/logs.service.js';
|
||||
import { SubscriptionHelperService } from '@app/unraid-api/graph/services/subscription-helper.service.js';
|
||||
|
||||
describe('LogsResolver', () => {
|
||||
let resolver: LogsResolver;
|
||||
@@ -18,6 +19,13 @@ describe('LogsResolver', () => {
|
||||
// Add mock implementations for service methods used by resolver
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: SubscriptionHelperService,
|
||||
useValue: {
|
||||
// Add mock implementations for subscription helper methods
|
||||
createTrackedSubscription: vi.fn(),
|
||||
},
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
resolver = module.get<LogsResolver>(LogsResolver);
|
||||
|
||||
@@ -3,13 +3,16 @@ import { Args, Int, Query, Resolver, Subscription } from '@nestjs/graphql';
|
||||
import { AuthAction, Resource } from '@unraid/shared/graphql.model.js';
|
||||
import { UsePermissions } from '@unraid/shared/use-permissions.directive.js';
|
||||
|
||||
import { createSubscription, PUBSUB_CHANNEL } from '@app/core/pubsub.js';
|
||||
import { LogFile, LogFileContent } from '@app/unraid-api/graph/resolvers/logs/logs.model.js';
|
||||
import { LogsService } from '@app/unraid-api/graph/resolvers/logs/logs.service.js';
|
||||
import { SubscriptionHelperService } from '@app/unraid-api/graph/services/subscription-helper.service.js';
|
||||
|
||||
@Resolver(() => LogFile)
|
||||
export class LogsResolver {
|
||||
constructor(private readonly logsService: LogsService) {}
|
||||
constructor(
|
||||
private readonly logsService: LogsService,
|
||||
private readonly subscriptionHelper: SubscriptionHelperService
|
||||
) {}
|
||||
|
||||
@Query(() => [LogFile])
|
||||
@UsePermissions({
|
||||
@@ -38,27 +41,12 @@ export class LogsResolver {
|
||||
action: AuthAction.READ_ANY,
|
||||
resource: Resource.LOGS,
|
||||
})
|
||||
async logFileSubscription(@Args('path') path: string) {
|
||||
// Start watching the file
|
||||
this.logsService.getLogFileSubscriptionChannel(path);
|
||||
logFileSubscription(@Args('path') path: string) {
|
||||
// Register the topic and get the key
|
||||
const topicKey = this.logsService.registerLogFileSubscription(path);
|
||||
|
||||
// Create the async iterator
|
||||
const asyncIterator = createSubscription(PUBSUB_CHANNEL.LOG_FILE);
|
||||
|
||||
// Store the original return method to wrap it
|
||||
const originalReturn = asyncIterator.return;
|
||||
|
||||
// Override the return method to clean up resources
|
||||
asyncIterator.return = async () => {
|
||||
// Stop watching the file when subscription ends
|
||||
this.logsService.stopWatchingLogFile(path);
|
||||
|
||||
// Call the original return method
|
||||
return originalReturn
|
||||
? originalReturn.call(asyncIterator)
|
||||
: Promise.resolve({ value: undefined, done: true });
|
||||
};
|
||||
|
||||
return asyncIterator;
|
||||
// Use the helper service to create a tracked subscription
|
||||
// This automatically handles subscribe/unsubscribe with reference counting
|
||||
return this.subscriptionHelper.createTrackedSubscription(topicKey);
|
||||
}
|
||||
}
|
||||
|
||||
201
api/src/unraid-api/graph/resolvers/logs/logs.service.spec.ts
Normal file
201
api/src/unraid-api/graph/resolvers/logs/logs.service.spec.ts
Normal file
@@ -0,0 +1,201 @@
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
import * as fs from 'node:fs/promises';
|
||||
|
||||
import * as chokidar from 'chokidar';
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { LogWatcherManager } from '@app/unraid-api/graph/resolvers/logs/log-watcher-manager.service.js';
|
||||
import { LogsService } from '@app/unraid-api/graph/resolvers/logs/logs.service.js';
|
||||
import { SubscriptionTrackerService } from '@app/unraid-api/graph/services/subscription-tracker.service.js';
|
||||
|
||||
vi.mock('node:fs/promises');
|
||||
vi.mock('chokidar');
|
||||
vi.mock('@app/store/index.js', () => ({
|
||||
getters: {
|
||||
paths: () => ({
|
||||
'unraid-log-base': '/var/log',
|
||||
}),
|
||||
},
|
||||
}));
|
||||
vi.mock('@app/core/pubsub.js', () => ({
|
||||
pubsub: {
|
||||
publish: vi.fn(),
|
||||
},
|
||||
PUBSUB_CHANNEL: {},
|
||||
}));
|
||||
|
||||
describe('LogsService', () => {
|
||||
let service: LogsService;
|
||||
let mockWatcher: any;
|
||||
let subscriptionTracker: any;
|
||||
|
||||
beforeEach(async () => {
|
||||
// Create a mock watcher
|
||||
mockWatcher = {
|
||||
on: vi.fn(),
|
||||
close: vi.fn(),
|
||||
};
|
||||
|
||||
// Mock chokidar.watch to return our mock watcher
|
||||
vi.mocked(chokidar.watch).mockReturnValue(mockWatcher as any);
|
||||
|
||||
// Mock fs.stat to return a file size
|
||||
vi.mocked(fs.stat).mockResolvedValue({ size: 1000 } as any);
|
||||
|
||||
subscriptionTracker = {
|
||||
getSubscriberCount: vi.fn().mockReturnValue(0),
|
||||
registerTopic: vi.fn(),
|
||||
};
|
||||
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [
|
||||
LogsService,
|
||||
LogWatcherManager,
|
||||
{
|
||||
provide: SubscriptionTrackerService,
|
||||
useValue: subscriptionTracker,
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
service = module.get<LogsService>(LogsService);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should be defined', () => {
|
||||
expect(service).toBeDefined();
|
||||
});
|
||||
|
||||
it('should handle race condition when stopping watcher during initialization', async () => {
|
||||
// Setup: Register the subscription which will trigger registerTopic
|
||||
service.registerLogFileSubscription('test.log');
|
||||
|
||||
// Get the onStart callback that was registered
|
||||
const registerTopicCall = subscriptionTracker.registerTopic.mock.calls[0];
|
||||
const onStartCallback = registerTopicCall[1];
|
||||
const onStopCallback = registerTopicCall[2];
|
||||
|
||||
// Create a promise to control when stat resolves
|
||||
let statResolve: any;
|
||||
const statPromise = new Promise((resolve) => {
|
||||
statResolve = resolve;
|
||||
});
|
||||
vi.mocked(fs.stat).mockReturnValue(statPromise as any);
|
||||
|
||||
// Start the watcher (this will call startWatchingLogFile internally)
|
||||
onStartCallback();
|
||||
|
||||
// At this point, the watcher should be marked as 'initializing'
|
||||
// Now call stop before the stat promise resolves
|
||||
onStopCallback();
|
||||
|
||||
// Now resolve the stat promise to complete initialization
|
||||
statResolve({ size: 1000 });
|
||||
|
||||
// Wait for any async operations to complete
|
||||
await new Promise((resolve) => setImmediate(resolve));
|
||||
|
||||
// The watcher should have been closed due to the race condition check
|
||||
expect(mockWatcher.close).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not leak watcher if stopped multiple times during initialization', async () => {
|
||||
// Setup: Register the subscription
|
||||
service.registerLogFileSubscription('test.log');
|
||||
|
||||
const registerTopicCall = subscriptionTracker.registerTopic.mock.calls[0];
|
||||
const onStartCallback = registerTopicCall[1];
|
||||
const onStopCallback = registerTopicCall[2];
|
||||
|
||||
// Create controlled stat promise
|
||||
let statResolve: any;
|
||||
const statPromise = new Promise((resolve) => {
|
||||
statResolve = resolve;
|
||||
});
|
||||
vi.mocked(fs.stat).mockReturnValue(statPromise as any);
|
||||
|
||||
// Start the watcher
|
||||
onStartCallback();
|
||||
|
||||
// Call stop multiple times during initialization
|
||||
onStopCallback();
|
||||
onStopCallback();
|
||||
onStopCallback();
|
||||
|
||||
// Complete initialization
|
||||
statResolve({ size: 1000 });
|
||||
await new Promise((resolve) => setImmediate(resolve));
|
||||
|
||||
// Should only close once
|
||||
expect(mockWatcher.close).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should properly handle normal start and stop without race condition', async () => {
|
||||
// Setup: Register the subscription
|
||||
service.registerLogFileSubscription('test.log');
|
||||
|
||||
const registerTopicCall = subscriptionTracker.registerTopic.mock.calls[0];
|
||||
const onStartCallback = registerTopicCall[1];
|
||||
const onStopCallback = registerTopicCall[2];
|
||||
|
||||
// Make stat resolve immediately
|
||||
vi.mocked(fs.stat).mockResolvedValue({ size: 1000 } as any);
|
||||
|
||||
// Start the watcher and let it complete initialization
|
||||
onStartCallback();
|
||||
await new Promise((resolve) => setImmediate(resolve));
|
||||
|
||||
// Watcher should be created but not closed
|
||||
expect(chokidar.watch).toHaveBeenCalled();
|
||||
expect(mockWatcher.close).not.toHaveBeenCalled();
|
||||
|
||||
// Now stop it normally
|
||||
onStopCallback();
|
||||
|
||||
// Watcher should be closed
|
||||
expect(mockWatcher.close).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should handle error during initialization without leaking watchers', async () => {
|
||||
// Setup: Register the subscription
|
||||
service.registerLogFileSubscription('test.log');
|
||||
|
||||
const registerTopicCall = subscriptionTracker.registerTopic.mock.calls[0];
|
||||
const onStartCallback = registerTopicCall[1];
|
||||
|
||||
// Make stat reject with an error
|
||||
vi.mocked(fs.stat).mockRejectedValue(new Error('File not found'));
|
||||
|
||||
// Start the watcher (should fail during initialization)
|
||||
onStartCallback();
|
||||
await new Promise((resolve) => setImmediate(resolve));
|
||||
|
||||
// Watcher should never be created due to stat error
|
||||
expect(chokidar.watch).not.toHaveBeenCalled();
|
||||
expect(mockWatcher.close).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not create duplicate watchers when started multiple times', async () => {
|
||||
// Setup: Register the subscription
|
||||
service.registerLogFileSubscription('test.log');
|
||||
|
||||
const registerTopicCall = subscriptionTracker.registerTopic.mock.calls[0];
|
||||
const onStartCallback = registerTopicCall[1];
|
||||
|
||||
// Make stat resolve immediately
|
||||
vi.mocked(fs.stat).mockResolvedValue({ size: 1000 } as any);
|
||||
|
||||
// Start the watcher multiple times
|
||||
onStartCallback();
|
||||
onStartCallback();
|
||||
onStartCallback();
|
||||
|
||||
await new Promise((resolve) => setImmediate(resolve));
|
||||
|
||||
// Should only create one watcher
|
||||
expect(chokidar.watch).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
@@ -1,13 +1,15 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { createReadStream } from 'node:fs';
|
||||
import { readdir, readFile, stat } from 'node:fs/promises';
|
||||
import { readdir, stat } from 'node:fs/promises';
|
||||
import { basename, join } from 'node:path';
|
||||
import { createInterface } from 'node:readline';
|
||||
|
||||
import * as chokidar from 'chokidar';
|
||||
|
||||
import { pubsub, PUBSUB_CHANNEL } from '@app/core/pubsub.js';
|
||||
import { pubsub } from '@app/core/pubsub.js';
|
||||
import { getters } from '@app/store/index.js';
|
||||
import { LogWatcherManager } from '@app/unraid-api/graph/resolvers/logs/log-watcher-manager.service.js';
|
||||
import { SubscriptionTrackerService } from '@app/unraid-api/graph/services/subscription-tracker.service.js';
|
||||
|
||||
interface LogFile {
|
||||
name: string;
|
||||
@@ -26,12 +28,13 @@ interface LogFileContent {
|
||||
@Injectable()
|
||||
export class LogsService {
|
||||
private readonly logger = new Logger(LogsService.name);
|
||||
private readonly logWatchers = new Map<
|
||||
string,
|
||||
{ watcher: chokidar.FSWatcher; position: number; subscriptionCount: number }
|
||||
>();
|
||||
private readonly DEFAULT_LINES = 100;
|
||||
|
||||
constructor(
|
||||
private readonly subscriptionTracker: SubscriptionTrackerService,
|
||||
private readonly watcherManager: LogWatcherManager
|
||||
) {}
|
||||
|
||||
/**
|
||||
* Get the base path for log files
|
||||
*/
|
||||
@@ -111,135 +114,208 @@ export class LogsService {
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the subscription channel for a log file
|
||||
* Register and get the topic key for a log file subscription
|
||||
* @param path Path to the log file
|
||||
* @returns The subscription topic key
|
||||
*/
|
||||
getLogFileSubscriptionChannel(path: string): PUBSUB_CHANNEL {
|
||||
registerLogFileSubscription(path: string): string {
|
||||
const normalizedPath = join(this.logBasePath, basename(path));
|
||||
const topicKey = this.getTopicKey(normalizedPath);
|
||||
|
||||
// Start watching the file if not already watching
|
||||
if (!this.logWatchers.has(normalizedPath)) {
|
||||
this.startWatchingLogFile(normalizedPath);
|
||||
} else {
|
||||
// Increment subscription count for existing watcher
|
||||
const watcher = this.logWatchers.get(normalizedPath);
|
||||
if (watcher) {
|
||||
watcher.subscriptionCount++;
|
||||
this.logger.debug(
|
||||
`Incremented subscription count for ${normalizedPath} to ${watcher.subscriptionCount}`
|
||||
);
|
||||
}
|
||||
// Register the topic if not already registered
|
||||
if (!this.subscriptionTracker.getSubscriberCount(topicKey)) {
|
||||
this.logger.debug(`Registering log file subscription topic: ${topicKey}`);
|
||||
|
||||
this.subscriptionTracker.registerTopic(
|
||||
topicKey,
|
||||
// onStart handler
|
||||
() => {
|
||||
this.logger.debug(`Starting log file watcher for topic: ${topicKey}`);
|
||||
this.startWatchingLogFile(normalizedPath);
|
||||
},
|
||||
// onStop handler
|
||||
() => {
|
||||
this.logger.debug(`Stopping log file watcher for topic: ${topicKey}`);
|
||||
this.stopWatchingLogFile(normalizedPath);
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
return PUBSUB_CHANNEL.LOG_FILE;
|
||||
return topicKey;
|
||||
}
|
||||
|
||||
/**
|
||||
* Start watching a log file for changes using chokidar
|
||||
* @param path Path to the log file
|
||||
*/
|
||||
private async startWatchingLogFile(path: string): Promise<void> {
|
||||
try {
|
||||
// Get initial file size
|
||||
const stats = await stat(path);
|
||||
let position = stats.size;
|
||||
private startWatchingLogFile(path: string): void {
|
||||
const watcherKey = path;
|
||||
|
||||
// Create a watcher for the file using chokidar
|
||||
const watcher = chokidar.watch(path, {
|
||||
persistent: true,
|
||||
awaitWriteFinish: {
|
||||
stabilityThreshold: 300,
|
||||
pollInterval: 100,
|
||||
},
|
||||
});
|
||||
// Check if already watching or initializing
|
||||
if (this.watcherManager.isWatchingOrInitializing(watcherKey)) {
|
||||
this.logger.debug(`Already watching or initializing log file: ${watcherKey}`);
|
||||
return;
|
||||
}
|
||||
|
||||
watcher.on('change', async () => {
|
||||
try {
|
||||
const newStats = await stat(path);
|
||||
// Mark as initializing immediately to prevent race conditions
|
||||
this.watcherManager.setInitializing(watcherKey);
|
||||
|
||||
// If the file has grown
|
||||
if (newStats.size > position) {
|
||||
// Read only the new content
|
||||
const stream = createReadStream(path, {
|
||||
start: position,
|
||||
end: newStats.size - 1,
|
||||
});
|
||||
// Get initial file size and set up watcher
|
||||
stat(path)
|
||||
.then((stats) => {
|
||||
const position = stats.size;
|
||||
|
||||
let newContent = '';
|
||||
stream.on('data', (chunk) => {
|
||||
newContent += chunk.toString();
|
||||
});
|
||||
// Create a watcher for the file using chokidar
|
||||
const watcher = chokidar.watch(path, {
|
||||
persistent: true,
|
||||
awaitWriteFinish: {
|
||||
stabilityThreshold: 300,
|
||||
pollInterval: 100,
|
||||
},
|
||||
});
|
||||
|
||||
stream.on('end', () => {
|
||||
if (newContent) {
|
||||
pubsub.publish(PUBSUB_CHANNEL.LOG_FILE, {
|
||||
watcher.on('change', async () => {
|
||||
// Check if we're already processing a change event for this file
|
||||
if (!this.watcherManager.startProcessing(watcherKey)) {
|
||||
// Already processing, ignore this event
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const newStats = await stat(path);
|
||||
|
||||
// Get the current position
|
||||
const currentPosition = this.watcherManager.getPosition(watcherKey);
|
||||
if (currentPosition === undefined) {
|
||||
// Watcher was stopped or not active, ignore the event
|
||||
return;
|
||||
}
|
||||
|
||||
// If the file has grown
|
||||
if (newStats.size > currentPosition) {
|
||||
// Read only the new content
|
||||
const stream = createReadStream(path, {
|
||||
start: currentPosition,
|
||||
end: newStats.size - 1,
|
||||
});
|
||||
|
||||
let newContent = '';
|
||||
stream.on('data', (chunk) => {
|
||||
newContent += chunk.toString();
|
||||
});
|
||||
|
||||
stream.on('end', () => {
|
||||
try {
|
||||
if (newContent) {
|
||||
// Use topic-specific channel
|
||||
const topicKey = this.getTopicKey(path);
|
||||
pubsub.publish(topicKey, {
|
||||
logFile: {
|
||||
path,
|
||||
content: newContent,
|
||||
totalLines: 0, // We don't need to count lines for updates
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// Update position for next read (while still holding the guard)
|
||||
this.watcherManager.updatePosition(watcherKey, newStats.size);
|
||||
} finally {
|
||||
// Clear the in-flight flag
|
||||
this.watcherManager.finishProcessing(watcherKey);
|
||||
}
|
||||
});
|
||||
|
||||
stream.on('error', (error) => {
|
||||
this.logger.error(`Error reading stream for ${path}: ${error}`);
|
||||
// Clear the in-flight flag on error
|
||||
this.watcherManager.finishProcessing(watcherKey);
|
||||
});
|
||||
} else if (newStats.size < currentPosition) {
|
||||
// File was truncated, reset position and read from beginning
|
||||
this.logger.debug(`File ${path} was truncated, resetting position`);
|
||||
|
||||
try {
|
||||
// Read the entire file content
|
||||
const content = await this.getLogFileContent(
|
||||
path,
|
||||
this.DEFAULT_LINES,
|
||||
undefined
|
||||
);
|
||||
|
||||
// Use topic-specific channel
|
||||
const topicKey = this.getTopicKey(path);
|
||||
pubsub.publish(topicKey, {
|
||||
logFile: {
|
||||
path,
|
||||
content: newContent,
|
||||
totalLines: 0, // We don't need to count lines for updates
|
||||
...content,
|
||||
},
|
||||
});
|
||||
|
||||
// Update position (while still holding the guard)
|
||||
this.watcherManager.updatePosition(watcherKey, newStats.size);
|
||||
} finally {
|
||||
// Clear the in-flight flag
|
||||
this.watcherManager.finishProcessing(watcherKey);
|
||||
}
|
||||
|
||||
// Update position for next read
|
||||
position = newStats.size;
|
||||
});
|
||||
} else if (newStats.size < position) {
|
||||
// File was truncated, reset position and read from beginning
|
||||
position = 0;
|
||||
this.logger.debug(`File ${path} was truncated, resetting position`);
|
||||
|
||||
// Read the entire file content
|
||||
const content = await this.getLogFileContent(path);
|
||||
|
||||
pubsub.publish(PUBSUB_CHANNEL.LOG_FILE, {
|
||||
logFile: content,
|
||||
});
|
||||
|
||||
position = newStats.size;
|
||||
} else {
|
||||
// File size unchanged, clear the in-flight flag
|
||||
this.watcherManager.finishProcessing(watcherKey);
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
this.logger.error(`Error processing file change for ${path}: ${error}`);
|
||||
// Clear the in-flight flag on error
|
||||
this.watcherManager.finishProcessing(watcherKey);
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
this.logger.error(`Error processing file change for ${path}: ${error}`);
|
||||
});
|
||||
|
||||
watcher.on('error', (error) => {
|
||||
this.logger.error(`Chokidar watcher error for ${path}: ${error}`);
|
||||
});
|
||||
|
||||
// Check if we were stopped during initialization and handle cleanup
|
||||
if (!this.watcherManager.handlePostInitialization(watcherKey, watcher, position)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Publish initial snapshot
|
||||
this.getLogFileContent(path, this.DEFAULT_LINES, undefined)
|
||||
.then((content) => {
|
||||
const topicKey = this.getTopicKey(path);
|
||||
pubsub.publish(topicKey, {
|
||||
logFile: {
|
||||
...content,
|
||||
},
|
||||
});
|
||||
})
|
||||
.catch((error) => {
|
||||
this.logger.error(`Error publishing initial log content for ${path}: ${error}`);
|
||||
});
|
||||
|
||||
this.logger.debug(`Started watching log file with chokidar: ${path}`);
|
||||
})
|
||||
.catch((error) => {
|
||||
this.logger.error(`Error setting up file watcher for ${path}: ${error}`);
|
||||
// Clean up the initializing entry on error
|
||||
this.watcherManager.removeEntry(watcherKey);
|
||||
});
|
||||
}
|
||||
|
||||
watcher.on('error', (error) => {
|
||||
this.logger.error(`Chokidar watcher error for ${path}: ${error}`);
|
||||
});
|
||||
|
||||
// Store the watcher and current position with initial subscription count of 1
|
||||
this.logWatchers.set(path, { watcher, position, subscriptionCount: 1 });
|
||||
|
||||
this.logger.debug(
|
||||
`Started watching log file with chokidar: ${path} (subscription count: 1)`
|
||||
);
|
||||
} catch (error: unknown) {
|
||||
this.logger.error(`Error setting up chokidar file watcher for ${path}: ${error}`);
|
||||
}
|
||||
/**
|
||||
* Get the topic key for a log file subscription
|
||||
* @param path Path to the log file (should already be normalized)
|
||||
* @returns The topic key
|
||||
*/
|
||||
private getTopicKey(path: string): string {
|
||||
// Assume path is already normalized (full path)
|
||||
return `LOG_FILE:${path}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop watching a log file
|
||||
* @param path Path to the log file
|
||||
*/
|
||||
public stopWatchingLogFile(path: string): void {
|
||||
const normalizedPath = join(this.logBasePath, basename(path));
|
||||
const watcher = this.logWatchers.get(normalizedPath);
|
||||
|
||||
if (watcher) {
|
||||
// Decrement subscription count
|
||||
watcher.subscriptionCount--;
|
||||
this.logger.debug(
|
||||
`Decremented subscription count for ${normalizedPath} to ${watcher.subscriptionCount}`
|
||||
);
|
||||
|
||||
// Only close the watcher when subscription count reaches 0
|
||||
if (watcher.subscriptionCount <= 0) {
|
||||
watcher.watcher.close();
|
||||
this.logWatchers.delete(normalizedPath);
|
||||
this.logger.debug(`Stopped watching log file: ${normalizedPath} (no more subscribers)`);
|
||||
}
|
||||
}
|
||||
private stopWatchingLogFile(path: string): void {
|
||||
this.watcherManager.stopWatcher(path);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -9,7 +9,7 @@ import { CpuService } from '@app/unraid-api/graph/resolvers/info/cpu/cpu.service
|
||||
import { MemoryService } from '@app/unraid-api/graph/resolvers/info/memory/memory.service.js';
|
||||
import { MetricsResolver } from '@app/unraid-api/graph/resolvers/metrics/metrics.resolver.js';
|
||||
import { SubscriptionHelperService } from '@app/unraid-api/graph/services/subscription-helper.service.js';
|
||||
import { SubscriptionPollingService } from '@app/unraid-api/graph/services/subscription-polling.service.js';
|
||||
import { SubscriptionManagerService } from '@app/unraid-api/graph/services/subscription-manager.service.js';
|
||||
import { SubscriptionTrackerService } from '@app/unraid-api/graph/services/subscription-tracker.service.js';
|
||||
|
||||
describe('MetricsResolver Integration Tests', () => {
|
||||
@@ -25,7 +25,7 @@ describe('MetricsResolver Integration Tests', () => {
|
||||
MemoryService,
|
||||
SubscriptionTrackerService,
|
||||
SubscriptionHelperService,
|
||||
SubscriptionPollingService,
|
||||
SubscriptionManagerService,
|
||||
],
|
||||
}).compile();
|
||||
|
||||
@@ -36,8 +36,8 @@ describe('MetricsResolver Integration Tests', () => {
|
||||
|
||||
afterEach(async () => {
|
||||
// Clean up polling service
|
||||
const pollingService = module.get<SubscriptionPollingService>(SubscriptionPollingService);
|
||||
pollingService.stopAll();
|
||||
const subscriptionManager = module.get<SubscriptionManagerService>(SubscriptionManagerService);
|
||||
subscriptionManager.stopAll();
|
||||
await module.close();
|
||||
});
|
||||
|
||||
@@ -202,10 +202,13 @@ describe('MetricsResolver Integration Tests', () => {
|
||||
it('should handle errors in CPU polling gracefully', async () => {
|
||||
const service = module.get<CpuService>(CpuService);
|
||||
const trackerService = module.get<SubscriptionTrackerService>(SubscriptionTrackerService);
|
||||
const pollingService = module.get<SubscriptionPollingService>(SubscriptionPollingService);
|
||||
const subscriptionManager =
|
||||
module.get<SubscriptionManagerService>(SubscriptionManagerService);
|
||||
|
||||
// Mock logger to capture error logs
|
||||
const loggerSpy = vi.spyOn(pollingService['logger'], 'error').mockImplementation(() => {});
|
||||
const loggerSpy = vi
|
||||
.spyOn(subscriptionManager['logger'], 'error')
|
||||
.mockImplementation(() => {});
|
||||
vi.spyOn(service, 'generateCpuLoad').mockRejectedValueOnce(new Error('CPU error'));
|
||||
|
||||
// Trigger polling
|
||||
@@ -215,7 +218,7 @@ describe('MetricsResolver Integration Tests', () => {
|
||||
await new Promise((resolve) => setTimeout(resolve, 1100));
|
||||
|
||||
expect(loggerSpy).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Error in polling task'),
|
||||
expect.stringContaining('Error in subscription callback'),
|
||||
expect.any(Error)
|
||||
);
|
||||
|
||||
@@ -226,10 +229,13 @@ describe('MetricsResolver Integration Tests', () => {
|
||||
it('should handle errors in memory polling gracefully', async () => {
|
||||
const service = module.get<MemoryService>(MemoryService);
|
||||
const trackerService = module.get<SubscriptionTrackerService>(SubscriptionTrackerService);
|
||||
const pollingService = module.get<SubscriptionPollingService>(SubscriptionPollingService);
|
||||
const subscriptionManager =
|
||||
module.get<SubscriptionManagerService>(SubscriptionManagerService);
|
||||
|
||||
// Mock logger to capture error logs
|
||||
const loggerSpy = vi.spyOn(pollingService['logger'], 'error').mockImplementation(() => {});
|
||||
const loggerSpy = vi
|
||||
.spyOn(subscriptionManager['logger'], 'error')
|
||||
.mockImplementation(() => {});
|
||||
vi.spyOn(service, 'generateMemoryLoad').mockRejectedValueOnce(new Error('Memory error'));
|
||||
|
||||
// Trigger polling
|
||||
@@ -239,7 +245,7 @@ describe('MetricsResolver Integration Tests', () => {
|
||||
await new Promise((resolve) => setTimeout(resolve, 2100));
|
||||
|
||||
expect(loggerSpy).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Error in polling task'),
|
||||
expect.stringContaining('Error in subscription callback'),
|
||||
expect.any(Error)
|
||||
);
|
||||
|
||||
@@ -251,22 +257,30 @@ describe('MetricsResolver Integration Tests', () => {
|
||||
describe('Polling cleanup on module destroy', () => {
|
||||
it('should clean up timers when module is destroyed', async () => {
|
||||
const trackerService = module.get<SubscriptionTrackerService>(SubscriptionTrackerService);
|
||||
const pollingService = module.get<SubscriptionPollingService>(SubscriptionPollingService);
|
||||
const subscriptionManager =
|
||||
module.get<SubscriptionManagerService>(SubscriptionManagerService);
|
||||
|
||||
// Start polling
|
||||
trackerService.subscribe(PUBSUB_CHANNEL.CPU_UTILIZATION);
|
||||
trackerService.subscribe(PUBSUB_CHANNEL.MEMORY_UTILIZATION);
|
||||
|
||||
// Verify polling is active
|
||||
expect(pollingService.isPolling(PUBSUB_CHANNEL.CPU_UTILIZATION)).toBe(true);
|
||||
expect(pollingService.isPolling(PUBSUB_CHANNEL.MEMORY_UTILIZATION)).toBe(true);
|
||||
// Wait a bit for subscriptions to be fully set up
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
// Verify subscriptions are active
|
||||
expect(subscriptionManager.isSubscriptionActive(PUBSUB_CHANNEL.CPU_UTILIZATION)).toBe(true);
|
||||
expect(subscriptionManager.isSubscriptionActive(PUBSUB_CHANNEL.MEMORY_UTILIZATION)).toBe(
|
||||
true
|
||||
);
|
||||
|
||||
// Clean up the module
|
||||
await module.close();
|
||||
|
||||
// Timers should be cleaned up
|
||||
expect(pollingService.isPolling(PUBSUB_CHANNEL.CPU_UTILIZATION)).toBe(false);
|
||||
expect(pollingService.isPolling(PUBSUB_CHANNEL.MEMORY_UTILIZATION)).toBe(false);
|
||||
// Subscriptions should be cleaned up
|
||||
expect(subscriptionManager.isSubscriptionActive(PUBSUB_CHANNEL.CPU_UTILIZATION)).toBe(false);
|
||||
expect(subscriptionManager.isSubscriptionActive(PUBSUB_CHANNEL.MEMORY_UTILIZATION)).toBe(
|
||||
false
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -32,6 +32,8 @@ describe('MetricsResolver', () => {
|
||||
loadNice: 0,
|
||||
loadIdle: 70.0,
|
||||
loadIrq: 0,
|
||||
loadGuest: 0,
|
||||
loadSteal: 0,
|
||||
},
|
||||
{
|
||||
load: 21.0,
|
||||
@@ -40,6 +42,8 @@ describe('MetricsResolver', () => {
|
||||
loadNice: 0,
|
||||
loadIdle: 79.0,
|
||||
loadIrq: 0,
|
||||
loadGuest: 0,
|
||||
loadSteal: 0,
|
||||
},
|
||||
],
|
||||
}),
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Injectable, Logger, OnModuleDestroy, OnModuleInit } from '@nestjs/common';
|
||||
import { Injectable, Logger, OnApplicationBootstrap, OnModuleDestroy } from '@nestjs/common';
|
||||
import crypto from 'crypto';
|
||||
import { ChildProcess } from 'node:child_process';
|
||||
import { mkdir, rm, writeFile } from 'node:fs/promises';
|
||||
@@ -7,6 +7,7 @@ import { dirname, join } from 'node:path';
|
||||
import { execa } from 'execa';
|
||||
import got, { HTTPError } from 'got';
|
||||
import pRetry from 'p-retry';
|
||||
import semver from 'semver';
|
||||
|
||||
import { sanitizeParams } from '@app/core/log.js';
|
||||
import { fileExists } from '@app/core/utils/files/file-exists.js';
|
||||
@@ -25,7 +26,7 @@ import {
|
||||
import { validateObject } from '@app/unraid-api/graph/resolvers/validation.utils.js';
|
||||
|
||||
@Injectable()
|
||||
export class RCloneApiService implements OnModuleInit, OnModuleDestroy {
|
||||
export class RCloneApiService implements OnApplicationBootstrap, OnModuleDestroy {
|
||||
private isInitialized: boolean = false;
|
||||
private readonly logger = new Logger(RCloneApiService.name);
|
||||
private rcloneSocketPath: string = '';
|
||||
@@ -44,7 +45,7 @@ export class RCloneApiService implements OnModuleInit, OnModuleDestroy {
|
||||
return this.isInitialized;
|
||||
}
|
||||
|
||||
async onModuleInit(): Promise<void> {
|
||||
async onApplicationBootstrap(): Promise<void> {
|
||||
// RClone startup disabled - early return
|
||||
if (ENVIRONMENT === 'production') {
|
||||
this.logger.debug('RClone startup is disabled');
|
||||
@@ -239,12 +240,41 @@ export class RCloneApiService implements OnModuleInit, OnModuleDestroy {
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the RClone binary is available on the system
|
||||
* Checks if the RClone binary is available on the system and meets minimum version requirements
|
||||
*/
|
||||
private async checkRcloneBinaryExists(): Promise<boolean> {
|
||||
try {
|
||||
await execa('rclone', ['version']);
|
||||
this.logger.debug('RClone binary is available on the system.');
|
||||
const result = await execa('rclone', ['version']);
|
||||
const versionOutput = result.stdout.trim();
|
||||
|
||||
// Extract raw version string (format: "rclone vX.XX.X" or "rclone vX.XX.X-beta.X")
|
||||
const versionMatch = versionOutput.match(/rclone v([\d.\-\w]+)/);
|
||||
if (!versionMatch) {
|
||||
this.logger.error('Unable to parse RClone version from output');
|
||||
return false;
|
||||
}
|
||||
|
||||
const rawVersion = versionMatch[1];
|
||||
|
||||
// Use semver.coerce to get base semver from prerelease versions
|
||||
const coercedVersion = semver.coerce(rawVersion);
|
||||
if (!coercedVersion) {
|
||||
this.logger.error(`Failed to parse RClone version: raw="${rawVersion}"`);
|
||||
return false;
|
||||
}
|
||||
|
||||
const minimumVersion = '1.70.0';
|
||||
|
||||
if (!semver.gte(coercedVersion, minimumVersion)) {
|
||||
this.logger.error(
|
||||
`RClone version ${rawVersion} (coerced: ${coercedVersion}) is too old. Minimum required version is ${minimumVersion}`
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
||||
this.logger.debug(
|
||||
`RClone binary is available on the system (version ${rawVersion}, coerced: ${coercedVersion}).`
|
||||
);
|
||||
return true;
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error && 'code' in error && error.code === 'ENOENT') {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
|
||||
import { AuthModule } from '@app/unraid-api/auth/auth.module.js';
|
||||
import { ApiConfigModule } from '@app/unraid-api/config/api-config.module.js';
|
||||
import { ApiKeyModule } from '@app/unraid-api/graph/resolvers/api-key/api-key.module.js';
|
||||
import { ApiKeyResolver } from '@app/unraid-api/graph/resolvers/api-key/api-key.resolver.js';
|
||||
import { ArrayModule } from '@app/unraid-api/graph/resolvers/array/array.module.js';
|
||||
@@ -11,8 +12,7 @@ import { DockerModule } from '@app/unraid-api/graph/resolvers/docker/docker.modu
|
||||
import { FlashBackupModule } from '@app/unraid-api/graph/resolvers/flash-backup/flash-backup.module.js';
|
||||
import { FlashResolver } from '@app/unraid-api/graph/resolvers/flash/flash.resolver.js';
|
||||
import { InfoModule } from '@app/unraid-api/graph/resolvers/info/info.module.js';
|
||||
import { LogsResolver } from '@app/unraid-api/graph/resolvers/logs/logs.resolver.js';
|
||||
import { LogsService } from '@app/unraid-api/graph/resolvers/logs/logs.service.js';
|
||||
import { LogsModule } from '@app/unraid-api/graph/resolvers/logs/logs.module.js';
|
||||
import { MetricsModule } from '@app/unraid-api/graph/resolvers/metrics/metrics.module.js';
|
||||
import { RootMutationsResolver } from '@app/unraid-api/graph/resolvers/mutation/mutation.resolver.js';
|
||||
import { NotificationsResolver } from '@app/unraid-api/graph/resolvers/notifications/notifications.resolver.js';
|
||||
@@ -39,12 +39,14 @@ import { MeResolver } from '@app/unraid-api/graph/user/user.resolver.js';
|
||||
ServicesModule,
|
||||
ArrayModule,
|
||||
ApiKeyModule,
|
||||
ApiConfigModule,
|
||||
AuthModule,
|
||||
CustomizationModule,
|
||||
DockerModule,
|
||||
DisksModule,
|
||||
FlashBackupModule,
|
||||
InfoModule,
|
||||
LogsModule,
|
||||
RCloneModule,
|
||||
SettingsModule,
|
||||
SsoModule,
|
||||
@@ -54,8 +56,6 @@ import { MeResolver } from '@app/unraid-api/graph/user/user.resolver.js';
|
||||
providers: [
|
||||
ConfigResolver,
|
||||
FlashResolver,
|
||||
LogsResolver,
|
||||
LogsService,
|
||||
MeResolver,
|
||||
NotificationsResolver,
|
||||
NotificationsService,
|
||||
|
||||
@@ -38,7 +38,9 @@ export class Server extends Node {
|
||||
@Field()
|
||||
name!: string;
|
||||
|
||||
@Field(() => ServerStatus)
|
||||
@Field(() => ServerStatus, {
|
||||
description: 'Whether this server is online or offline',
|
||||
})
|
||||
status!: ServerStatus;
|
||||
|
||||
@Field()
|
||||
|
||||
@@ -24,7 +24,7 @@ export class ServerResolver {
|
||||
resource: Resource.SERVERS,
|
||||
})
|
||||
public async server(): Promise<ServerModel | null> {
|
||||
return this.getLocalServer()[0] || null;
|
||||
return this.getLocalServer() || null;
|
||||
}
|
||||
|
||||
@Query(() => [ServerModel])
|
||||
@@ -33,7 +33,7 @@ export class ServerResolver {
|
||||
resource: Resource.SERVERS,
|
||||
})
|
||||
public async servers(): Promise<ServerModel[]> {
|
||||
return this.getLocalServer();
|
||||
return [this.getLocalServer()];
|
||||
}
|
||||
|
||||
@Subscription(() => ServerModel)
|
||||
@@ -45,7 +45,7 @@ export class ServerResolver {
|
||||
return createSubscription(PUBSUB_CHANNEL.SERVERS);
|
||||
}
|
||||
|
||||
private getLocalServer(): ServerModel[] {
|
||||
private getLocalServer(): ServerModel {
|
||||
const emhttp = getters.emhttp();
|
||||
const connectConfig = this.configService.get('connect');
|
||||
|
||||
@@ -64,22 +64,17 @@ export class ServerResolver {
|
||||
avatar: '',
|
||||
};
|
||||
|
||||
return [
|
||||
{
|
||||
id: 'local',
|
||||
owner,
|
||||
guid: guid || '',
|
||||
apikey: connectConfig?.config?.apikey ?? '',
|
||||
name: name ?? 'Local Server',
|
||||
status:
|
||||
connectConfig?.mothership?.status === MinigraphStatus.CONNECTED
|
||||
? ServerStatus.ONLINE
|
||||
: ServerStatus.OFFLINE,
|
||||
wanip,
|
||||
lanip,
|
||||
localurl,
|
||||
remoteurl,
|
||||
},
|
||||
];
|
||||
return {
|
||||
id: 'local',
|
||||
owner,
|
||||
guid: guid || '',
|
||||
apikey: connectConfig?.config?.apikey ?? '',
|
||||
name: name ?? 'Local Server',
|
||||
status: ServerStatus.ONLINE,
|
||||
wanip,
|
||||
lanip,
|
||||
localurl,
|
||||
remoteurl,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,8 +16,8 @@ import {
|
||||
} from '@app/unraid-api/graph/resolvers/settings/settings.model.js';
|
||||
import { ApiSettings } from '@app/unraid-api/graph/resolvers/settings/settings.service.js';
|
||||
import { SsoSettings } from '@app/unraid-api/graph/resolvers/settings/sso-settings.model.js';
|
||||
import { OidcConfigPersistence } from '@app/unraid-api/graph/resolvers/sso/oidc-config.service.js';
|
||||
import { OidcProvider } from '@app/unraid-api/graph/resolvers/sso/oidc-provider.model.js';
|
||||
import { OidcConfigPersistence } from '@app/unraid-api/graph/resolvers/sso/core/oidc-config.service.js';
|
||||
import { OidcProvider } from '@app/unraid-api/graph/resolvers/sso/models/oidc-provider.model.js';
|
||||
|
||||
@Resolver(() => Settings)
|
||||
export class SettingsResolver {
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user