mirror of
https://github.com/unraid/api.git
synced 2026-01-02 14:40:01 -06:00
Compare commits
126 Commits
v4.14.0
...
4.22.2-bui
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1d9ce0aa3d | ||
|
|
9714b21c5c | ||
|
|
44b4d77d80 | ||
|
|
3f5039c342 | ||
|
|
1d2c6701ce | ||
|
|
0ee09aefbb | ||
|
|
c60a51dc1b | ||
|
|
c4fbf698b4 | ||
|
|
00faa8f9d9 | ||
|
|
45d9d65c13 | ||
|
|
771014b005 | ||
|
|
31a255c928 | ||
|
|
167857a323 | ||
|
|
b80988aaab | ||
|
|
fe4a6451f1 | ||
|
|
9a86c615da | ||
|
|
25ff8992a5 | ||
|
|
45fb53d040 | ||
|
|
c855caa9b2 | ||
|
|
ba4a43aec8 | ||
|
|
c4ca761dfc | ||
|
|
01d353fa08 | ||
|
|
4a07953457 | ||
|
|
0b20e3ea9f | ||
|
|
3f4af09db5 | ||
|
|
222ced7518 | ||
|
|
03dae7ce66 | ||
|
|
0990b898bd | ||
|
|
95faeaa2f3 | ||
|
|
b49ef5a762 | ||
|
|
c782cf0e87 | ||
|
|
f95ca9c9cb | ||
|
|
a59b363ebc | ||
|
|
2fef10c94a | ||
|
|
1c73a4af42 | ||
|
|
88a924c84f | ||
|
|
ae4d3ecbc4 | ||
|
|
c569043ab5 | ||
|
|
50ea2a3ffb | ||
|
|
b518131406 | ||
|
|
e57d81e073 | ||
|
|
88baddd6c0 | ||
|
|
abc22bdb87 | ||
|
|
6ed2f5ce8e | ||
|
|
b79b44e95c | ||
|
|
ca22285a26 | ||
|
|
838be2c52e | ||
|
|
73c1100d0b | ||
|
|
434e331384 | ||
|
|
a27453fda8 | ||
|
|
98e6058cd8 | ||
|
|
6c2c51ae1d | ||
|
|
d10c12035e | ||
|
|
5dd6f42550 | ||
|
|
4759b3d0b3 | ||
|
|
daeeba8c1f | ||
|
|
196bd52628 | ||
|
|
6c0061923a | ||
|
|
f33afe7ae5 | ||
|
|
aecf70ffad | ||
|
|
785f1f5eb1 | ||
|
|
193be3df36 | ||
|
|
116ee88fcf | ||
|
|
413db4bd30 | ||
|
|
095c2221c9 | ||
|
|
dfe891ce38 | ||
|
|
797bf50ec7 | ||
|
|
af5ca11860 | ||
|
|
f0cffbdc7a | ||
|
|
16905dd3a6 | ||
|
|
2ecdb99052 | ||
|
|
286f1be8ed | ||
|
|
bcefdd5261 | ||
|
|
d3459ecbc6 | ||
|
|
534a07788b | ||
|
|
239cdd6133 | ||
|
|
77cfc07dda | ||
|
|
728b38ac11 | ||
|
|
44774d0acd | ||
|
|
e204eb80a0 | ||
|
|
0c727c37f4 | ||
|
|
292bc0fc81 | ||
|
|
53f501e1a7 | ||
|
|
6cf7c88242 | ||
|
|
33774aa596 | ||
|
|
88087d5201 | ||
|
|
5d89682a3f | ||
|
|
bc15bd3d70 | ||
|
|
7c3aee8f3f | ||
|
|
c7c3bb57ea | ||
|
|
99dbad57d5 | ||
|
|
c42f79d406 | ||
|
|
4d8588b173 | ||
|
|
0d1d27064e | ||
|
|
0fe2c2c1c8 | ||
|
|
a8e4119270 | ||
|
|
372a4ebb42 | ||
|
|
4e945f5f56 | ||
|
|
6356f9c41d | ||
|
|
a1ee915ca5 | ||
|
|
c147a6b507 | ||
|
|
9d42b36f74 | ||
|
|
26a95af953 | ||
|
|
0ead267838 | ||
|
|
163763f9e5 | ||
|
|
6469d002b7 | ||
|
|
ab11e7ff7f | ||
|
|
7316dc753f | ||
|
|
1bf74e9d6c | ||
|
|
9cd0d6ac65 | ||
|
|
f0348aa038 | ||
|
|
c1ab3a4746 | ||
|
|
7d67a40433 | ||
|
|
674323fd87 | ||
|
|
6947b5d4af | ||
|
|
c4cc54923c | ||
|
|
c508366702 | ||
|
|
9df6a3f5eb | ||
|
|
aa588883cc | ||
|
|
b2e7801238 | ||
|
|
fd895cacf0 | ||
|
|
6edd3a3d16 | ||
|
|
ac198d5d1a | ||
|
|
f1c043fe5f | ||
|
|
d0c66020e1 | ||
|
|
335f949b53 |
@@ -1,123 +1,3 @@
|
||||
{
|
||||
"permissions": {
|
||||
"allow": [
|
||||
"# Development Commands",
|
||||
"Bash(pnpm install)",
|
||||
"Bash(pnpm dev)",
|
||||
"Bash(pnpm build)",
|
||||
"Bash(pnpm test)",
|
||||
"Bash(pnpm test:*)",
|
||||
"Bash(pnpm lint)",
|
||||
"Bash(pnpm lint:fix)",
|
||||
"Bash(pnpm type-check)",
|
||||
"Bash(pnpm codegen)",
|
||||
"Bash(pnpm storybook)",
|
||||
"Bash(pnpm --filter * dev)",
|
||||
"Bash(pnpm --filter * build)",
|
||||
"Bash(pnpm --filter * test)",
|
||||
"Bash(pnpm --filter * lint)",
|
||||
"Bash(pnpm --filter * codegen)",
|
||||
|
||||
"# Git Commands (read-only)",
|
||||
"Bash(git status)",
|
||||
"Bash(git diff)",
|
||||
"Bash(git log)",
|
||||
"Bash(git branch)",
|
||||
"Bash(git remote -v)",
|
||||
|
||||
"# Search Commands",
|
||||
"Bash(rg *)",
|
||||
|
||||
"# File System (read-only)",
|
||||
"Bash(ls)",
|
||||
"Bash(ls -la)",
|
||||
"Bash(pwd)",
|
||||
"Bash(find . -name)",
|
||||
"Bash(find . -type)",
|
||||
|
||||
"# Node/NPM Commands",
|
||||
"Bash(node --version)",
|
||||
"Bash(pnpm --version)",
|
||||
"Bash(npx --version)",
|
||||
|
||||
"# Environment Commands",
|
||||
"Bash(echo $*)",
|
||||
"Bash(which *)",
|
||||
|
||||
"# Process Commands",
|
||||
"Bash(ps aux | grep)",
|
||||
"Bash(lsof -i)",
|
||||
|
||||
"# Documentation Domains",
|
||||
"WebFetch(domain:tailwindcss.com)",
|
||||
"WebFetch(domain:github.com)",
|
||||
"WebFetch(domain:reka-ui.com)",
|
||||
"WebFetch(domain:nodejs.org)",
|
||||
"WebFetch(domain:pnpm.io)",
|
||||
"WebFetch(domain:vitejs.dev)",
|
||||
"WebFetch(domain:nuxt.com)",
|
||||
"WebFetch(domain:nestjs.com)",
|
||||
|
||||
"# IDE Integration",
|
||||
"mcp__ide__getDiagnostics",
|
||||
|
||||
"# Browser MCP (for testing)",
|
||||
"mcp__browsermcp__browser_navigate",
|
||||
"mcp__browsermcp__browser_click",
|
||||
"mcp__browsermcp__browser_screenshot"
|
||||
],
|
||||
"deny": [
|
||||
"# Dangerous Commands",
|
||||
"Bash(rm -rf)",
|
||||
"Bash(chmod 777)",
|
||||
"Bash(curl)",
|
||||
"Bash(wget)",
|
||||
"Bash(ssh)",
|
||||
"Bash(scp)",
|
||||
"Bash(sudo)",
|
||||
"Bash(su)",
|
||||
"Bash(pkill)",
|
||||
"Bash(kill)",
|
||||
"Bash(killall)",
|
||||
"Bash(python)",
|
||||
"Bash(python3)",
|
||||
"Bash(pip)",
|
||||
"Bash(npm)",
|
||||
"Bash(yarn)",
|
||||
"Bash(apt)",
|
||||
"Bash(brew)",
|
||||
"Bash(systemctl)",
|
||||
"Bash(service)",
|
||||
"Bash(docker)",
|
||||
"Bash(docker-compose)",
|
||||
|
||||
"# File Modification (use Edit/Write tools instead)",
|
||||
"Bash(sed)",
|
||||
"Bash(awk)",
|
||||
"Bash(perl)",
|
||||
"Bash(echo > *)",
|
||||
"Bash(echo >> *)",
|
||||
"Bash(cat > *)",
|
||||
"Bash(cat >> *)",
|
||||
"Bash(tee)",
|
||||
|
||||
"# Git Write Commands (require explicit user action)",
|
||||
"Bash(git add)",
|
||||
"Bash(git commit)",
|
||||
"Bash(git push)",
|
||||
"Bash(git pull)",
|
||||
"Bash(git merge)",
|
||||
"Bash(git rebase)",
|
||||
"Bash(git checkout)",
|
||||
"Bash(git reset)",
|
||||
"Bash(git clean)",
|
||||
|
||||
"# Package Management Write Commands",
|
||||
"Bash(pnpm add)",
|
||||
"Bash(pnpm remove)",
|
||||
"Bash(pnpm update)",
|
||||
"Bash(pnpm upgrade)"
|
||||
]
|
||||
},
|
||||
"enableAllProjectMcpServers": false
|
||||
"permissions": {}
|
||||
}
|
||||
68
.github/workflows/build-plugin.yml
vendored
68
.github/workflows/build-plugin.yml
vendored
@@ -36,6 +36,8 @@ on:
|
||||
required: true
|
||||
CF_ENDPOINT:
|
||||
required: true
|
||||
UNRAID_BOT_GITHUB_ADMIN_TOKEN:
|
||||
required: false
|
||||
jobs:
|
||||
build-plugin:
|
||||
name: Build and Deploy Plugin
|
||||
@@ -49,21 +51,16 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Get pnpm store directory
|
||||
id: pnpm-cache
|
||||
shell: bash
|
||||
run: |
|
||||
echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Get API Version
|
||||
id: vars
|
||||
@@ -74,14 +71,6 @@ jobs:
|
||||
API_VERSION=$([[ -n "$IS_TAGGED" ]] && echo "$PACKAGE_LOCK_VERSION" || echo "${PACKAGE_LOCK_VERSION}+${GIT_SHA}")
|
||||
echo "API_VERSION=${API_VERSION}" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/cache@v4
|
||||
name: Setup pnpm cache
|
||||
with:
|
||||
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
cd ${{ github.workspace }}
|
||||
@@ -97,7 +86,7 @@ jobs:
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
pattern: unraid-wc-rich
|
||||
path: ${{ github.workspace }}/plugin/source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components/nuxt
|
||||
path: ${{ github.workspace }}/plugin/source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components/standalone
|
||||
merge-multiple: true
|
||||
- name: Download Unraid API
|
||||
uses: actions/download-artifact@v5
|
||||
@@ -151,8 +140,8 @@ jobs:
|
||||
uses: the-actions-org/workflow-dispatch@v4.0.0
|
||||
with:
|
||||
workflow: release-production.yml
|
||||
inputs: '{ "version": "${{ steps.vars.outputs.API_VERSION }}" }'
|
||||
token: ${{ secrets.WORKFLOW_TRIGGER_PAT }}
|
||||
inputs: '{ "version": "v${{ steps.vars.outputs.API_VERSION }}" }'
|
||||
token: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN }}
|
||||
|
||||
- name: Upload to Cloudflare
|
||||
if: inputs.RELEASE_CREATED == 'false'
|
||||
@@ -181,3 +170,40 @@ jobs:
|
||||
```
|
||||
${{ inputs.BASE_URL }}/tag/${{ inputs.TAG }}/dynamix.unraid.net.plg
|
||||
```
|
||||
|
||||
- name: Clean up old preview builds
|
||||
if: inputs.RELEASE_CREATED == 'false' && github.event_name == 'push'
|
||||
continue-on-error: true
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.CF_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
AWS_DEFAULT_REGION: auto
|
||||
run: |
|
||||
echo "🧹 Cleaning up old preview builds (keeping last 7 days)..."
|
||||
|
||||
# Calculate cutoff date (7 days ago)
|
||||
CUTOFF_DATE=$(date -d "7 days ago" +"%Y.%m.%d")
|
||||
echo "Deleting builds older than: ${CUTOFF_DATE}"
|
||||
|
||||
# List and delete old timestamped .txz files
|
||||
OLD_FILES=$(aws s3 ls "s3://${{ secrets.CF_BUCKET_PREVIEW }}/unraid-api/" \
|
||||
--endpoint-url ${{ secrets.CF_ENDPOINT }} --recursive | \
|
||||
grep -E "dynamix\.unraid\.net-[0-9]{4}\.[0-9]{2}\.[0-9]{2}\.[0-9]{4}\.txz" | \
|
||||
awk '{print $4}' || true)
|
||||
|
||||
DELETED_COUNT=0
|
||||
if [ -n "$OLD_FILES" ]; then
|
||||
while IFS= read -r file; do
|
||||
if [[ $file =~ ([0-9]{4}\.[0-9]{2}\.[0-9]{2})\.[0-9]{4}\.txz ]]; then
|
||||
FILE_DATE="${BASH_REMATCH[1]}"
|
||||
if [[ "$FILE_DATE" < "$CUTOFF_DATE" ]]; then
|
||||
echo "Deleting old build: $(basename "$file")"
|
||||
aws s3 rm "s3://${{ secrets.CF_BUCKET_PREVIEW }}/${file}" \
|
||||
--endpoint-url ${{ secrets.CF_ENDPOINT }} || true
|
||||
((DELETED_COUNT++))
|
||||
fi
|
||||
fi
|
||||
done <<< "$OLD_FILES"
|
||||
fi
|
||||
|
||||
echo "✅ Deleted ${DELETED_COUNT} old builds"
|
||||
|
||||
13
.github/workflows/deploy-storybook.yml
vendored
13
.github/workflows/deploy-storybook.yml
vendored
@@ -22,16 +22,17 @@ jobs:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '22.18.0'
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
@@ -65,7 +66,7 @@ jobs:
|
||||
|
||||
- name: Comment PR with deployment URL
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: actions/github-script@v7
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
script: |
|
||||
github.rest.issues.createComment({
|
||||
|
||||
193
.github/workflows/main.yml
vendored
193
.github/workflows/main.yml
vendored
@@ -6,29 +6,15 @@ on:
|
||||
branches:
|
||||
- main
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
jobs:
|
||||
release-please:
|
||||
name: Release Please
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
# Only run release-please on pushes to main
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
||||
|
||||
- id: release
|
||||
uses: googleapis/release-please-action@v4
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
||||
outputs:
|
||||
releases_created: ${{ steps.release.outputs.releases_created || 'false' }}
|
||||
tag_name: ${{ steps.release.outputs.tag_name || '' }}
|
||||
test-api:
|
||||
name: Test API
|
||||
defaults:
|
||||
@@ -38,36 +24,25 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential libvirt-daemon-system
|
||||
version: 1.0
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Get pnpm store directory
|
||||
id: pnpm-cache
|
||||
shell: bash
|
||||
run: |
|
||||
echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/cache@v4
|
||||
name: Setup pnpm cache
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential libvirt-daemon-system php-cli
|
||||
version: 1.0
|
||||
|
||||
- name: PNPM Install
|
||||
run: pnpm install --frozen-lockfile
|
||||
@@ -117,42 +92,68 @@ jobs:
|
||||
# Verify libvirt is running using sudo to bypass group membership delays
|
||||
sudo virsh list --all || true
|
||||
|
||||
- uses: oven-sh/setup-bun@v2
|
||||
with:
|
||||
bun-version: latest
|
||||
- name: Build UI Package First
|
||||
run: |
|
||||
echo "🔧 Building UI package for web tests dependency..."
|
||||
cd ../unraid-ui && pnpm run build
|
||||
|
||||
- name: Run Tests Concurrently
|
||||
run: |
|
||||
set -e
|
||||
|
||||
# Run all tests in parallel with labeled output
|
||||
# Run all tests in parallel with labeled output and coverage generation
|
||||
echo "🚀 Starting API coverage tests..."
|
||||
pnpm run coverage > api-test.log 2>&1 &
|
||||
API_PID=$!
|
||||
|
||||
echo "🚀 Starting Connect plugin tests..."
|
||||
(cd ../packages/unraid-api-plugin-connect && pnpm test) > connect-test.log 2>&1 &
|
||||
(cd ../packages/unraid-api-plugin-connect && pnpm test --coverage 2>/dev/null || pnpm test) > connect-test.log 2>&1 &
|
||||
CONNECT_PID=$!
|
||||
|
||||
echo "🚀 Starting Shared package tests..."
|
||||
(cd ../packages/unraid-shared && pnpm test) > shared-test.log 2>&1 &
|
||||
(cd ../packages/unraid-shared && pnpm test --coverage 2>/dev/null || pnpm test) > shared-test.log 2>&1 &
|
||||
SHARED_PID=$!
|
||||
|
||||
echo "🚀 Starting Web package coverage tests..."
|
||||
(cd ../web && (pnpm test --coverage || pnpm test)) > web-test.log 2>&1 &
|
||||
WEB_PID=$!
|
||||
|
||||
echo "🚀 Starting UI package coverage tests..."
|
||||
(cd ../unraid-ui && pnpm test --coverage 2>/dev/null || pnpm test) > ui-test.log 2>&1 &
|
||||
UI_PID=$!
|
||||
|
||||
echo "🚀 Starting Plugin tests..."
|
||||
(cd ../plugin && pnpm test) > plugin-test.log 2>&1 &
|
||||
PLUGIN_PID=$!
|
||||
|
||||
# Wait for all processes and capture exit codes
|
||||
wait $API_PID && echo "✅ API tests completed" || { echo "❌ API tests failed"; API_EXIT=1; }
|
||||
wait $CONNECT_PID && echo "✅ Connect tests completed" || { echo "❌ Connect tests failed"; CONNECT_EXIT=1; }
|
||||
wait $SHARED_PID && echo "✅ Shared tests completed" || { echo "❌ Shared tests failed"; SHARED_EXIT=1; }
|
||||
wait $WEB_PID && echo "✅ Web tests completed" || { echo "❌ Web tests failed"; WEB_EXIT=1; }
|
||||
wait $UI_PID && echo "✅ UI tests completed" || { echo "❌ UI tests failed"; UI_EXIT=1; }
|
||||
wait $PLUGIN_PID && echo "✅ Plugin tests completed" || { echo "❌ Plugin tests failed"; PLUGIN_EXIT=1; }
|
||||
|
||||
# Display all outputs
|
||||
echo "📋 API Test Results:" && cat api-test.log
|
||||
echo "📋 Connect Plugin Test Results:" && cat connect-test.log
|
||||
echo "📋 Shared Package Test Results:" && cat shared-test.log
|
||||
echo "📋 Web Package Test Results:" && cat web-test.log
|
||||
echo "📋 UI Package Test Results:" && cat ui-test.log
|
||||
echo "📋 Plugin Test Results:" && cat plugin-test.log
|
||||
|
||||
# Exit with error if any test failed
|
||||
if [[ ${API_EXIT:-0} -eq 1 || ${CONNECT_EXIT:-0} -eq 1 || ${SHARED_EXIT:-0} -eq 1 ]]; then
|
||||
if [[ ${API_EXIT:-0} -eq 1 || ${CONNECT_EXIT:-0} -eq 1 || ${SHARED_EXIT:-0} -eq 1 || ${WEB_EXIT:-0} -eq 1 || ${UI_EXIT:-0} -eq 1 || ${PLUGIN_EXIT:-0} -eq 1 ]]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Upload all coverage reports to Codecov
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
files: ./coverage/coverage-final.json,../web/coverage/coverage-final.json,../unraid-ui/coverage/coverage-final.json,../packages/unraid-api-plugin-connect/coverage/coverage-final.json,../packages/unraid-shared/coverage/coverage-final.json
|
||||
fail_ci_if_error: false
|
||||
|
||||
build-api:
|
||||
name: Build API
|
||||
runs-on: ubuntu-latest
|
||||
@@ -165,29 +166,16 @@ jobs:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Get pnpm store directory
|
||||
id: pnpm-cache
|
||||
shell: bash
|
||||
run: |
|
||||
echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/cache@v4
|
||||
name: Setup pnpm cache
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
@@ -218,7 +206,7 @@ jobs:
|
||||
id: buildnumber
|
||||
uses: onyxmueller/build-tag-number@v1
|
||||
with:
|
||||
token: ${{secrets.github_token}}
|
||||
token: ${{secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN}}
|
||||
prefix: ${{steps.vars.outputs.PACKAGE_LOCK_VERSION}}
|
||||
|
||||
- name: Build
|
||||
@@ -242,29 +230,16 @@ jobs:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Get pnpm store directory
|
||||
id: pnpm-cache
|
||||
shell: bash
|
||||
run: |
|
||||
echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/cache@v4
|
||||
name: Setup pnpm cache
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
@@ -307,31 +282,17 @@ jobs:
|
||||
echo VITE_CONNECT=${{ secrets.VITE_CONNECT }} >> .env
|
||||
echo VITE_UNRAID_NET=${{ secrets.VITE_UNRAID_NET }} >> .env
|
||||
echo VITE_CALLBACK_KEY=${{ secrets.VITE_CALLBACK_KEY }} >> .env
|
||||
cat .env
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Get pnpm store directory
|
||||
id: pnpm-cache
|
||||
shell: bash
|
||||
run: |
|
||||
echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/cache@v4
|
||||
name: Setup pnpm cache
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: PNPM Install
|
||||
run: |
|
||||
@@ -359,12 +320,34 @@ jobs:
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: unraid-wc-rich
|
||||
path: web/.nuxt/nuxt-custom-elements/dist/unraid-components
|
||||
path: web/dist
|
||||
|
||||
release-please:
|
||||
name: Release Please
|
||||
runs-on: ubuntu-latest
|
||||
# Only run on pushes to main AND after tests pass
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
||||
needs:
|
||||
- test-api
|
||||
- build-api
|
||||
- build-web
|
||||
- build-unraid-ui-webcomponents
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- id: release
|
||||
uses: googleapis/release-please-action@v4
|
||||
outputs:
|
||||
releases_created: ${{ steps.release.outputs.releases_created || 'false' }}
|
||||
tag_name: ${{ steps.release.outputs.tag_name || '' }}
|
||||
|
||||
build-plugin-staging-pr:
|
||||
name: Build and Deploy Plugin
|
||||
needs:
|
||||
- release-please
|
||||
- build-api
|
||||
- build-web
|
||||
- build-unraid-ui-webcomponents
|
||||
@@ -388,9 +371,6 @@ jobs:
|
||||
needs:
|
||||
- release-please
|
||||
- build-api
|
||||
- build-web
|
||||
- build-unraid-ui-webcomponents
|
||||
- test-api
|
||||
uses: ./.github/workflows/build-plugin.yml
|
||||
with:
|
||||
RELEASE_CREATED: true
|
||||
@@ -404,3 +384,4 @@ jobs:
|
||||
CF_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
CF_BUCKET_PREVIEW: ${{ secrets.CF_BUCKET_PREVIEW }}
|
||||
CF_ENDPOINT: ${{ secrets.CF_ENDPOINT }}
|
||||
UNRAID_BOT_GITHUB_ADMIN_TOKEN: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN }}
|
||||
|
||||
100
.github/workflows/push-staging-pr-on-close.yml
vendored
100
.github/workflows/push-staging-pr-on-close.yml
vendored
@@ -1,4 +1,9 @@
|
||||
name: Push Staging Plugin on PR Close
|
||||
name: Replace PR Plugin with Staging Redirect on Merge
|
||||
|
||||
# This workflow runs when a PR is merged and replaces the PR-specific plugin
|
||||
# with a redirect version that points to the main staging URL.
|
||||
# This ensures users who installed the PR version will automatically
|
||||
# update to the staging version on their next update check.
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
@@ -17,18 +22,13 @@ on:
|
||||
default: true
|
||||
|
||||
jobs:
|
||||
push-staging:
|
||||
push-staging-redirect:
|
||||
if: (github.event_name == 'pull_request' && github.event.pull_request.merged == true) || (github.event_name == 'workflow_dispatch' && inputs.pr_merged == true)
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
actions: read
|
||||
steps:
|
||||
- name: Set Timezone
|
||||
uses: szenius/set-timezone@v2.0
|
||||
with:
|
||||
timezoneLinux: "America/Los_Angeles"
|
||||
|
||||
- name: Set PR number
|
||||
id: pr_number
|
||||
run: |
|
||||
@@ -45,11 +45,12 @@ jobs:
|
||||
name: unraid-plugin-.*
|
||||
path: connect-files
|
||||
pr: ${{ steps.pr_number.outputs.pr_number }}
|
||||
workflow: main.yml
|
||||
workflow_conclusion: success
|
||||
workflow_search: true
|
||||
search_artifacts: true
|
||||
if_no_artifact_found: fail
|
||||
|
||||
- name: Update Downloaded Staging Plugin to New Date
|
||||
- name: Update Downloaded Plugin to Redirect to Staging
|
||||
run: |
|
||||
# Find the .plg file in the downloaded artifact
|
||||
plgfile=$(find connect-files -name "*.plg" -type f | head -1)
|
||||
@@ -60,23 +61,82 @@ jobs:
|
||||
fi
|
||||
|
||||
echo "Found plugin file: $plgfile"
|
||||
version=$(date +"%Y.%m.%d.%H%M")
|
||||
sed -i -E "s#(<!ENTITY version \").*(\">)#\1${version}\2#g" "${plgfile}" || exit 1
|
||||
|
||||
# Get current version and bump it with current timestamp
|
||||
current_version=$(grep '<!ENTITY version' "${plgfile}" | sed -E 's/.*"(.*)".*/\1/')
|
||||
echo "Current version: ${current_version}"
|
||||
|
||||
# Create new version with current timestamp (ensures it's newer)
|
||||
new_version=$(date +"%Y.%m.%d.%H%M")
|
||||
echo "New redirect version: ${new_version}"
|
||||
|
||||
# Update version to trigger update
|
||||
sed -i -E "s#(<!ENTITY version \").*(\">)#\1${new_version}\2#g" "${plgfile}" || exit 1
|
||||
|
||||
# Change the plugin url to point to staging
|
||||
# Change the plugin url to point to staging - users will switch to staging on next update
|
||||
url="https://preview.dl.unraid.net/unraid-api/dynamix.unraid.net.plg"
|
||||
sed -i -E "s#(<!ENTITY plugin_url \").*?(\">)#\1${url}\2#g" "${plgfile}" || exit 1
|
||||
cat "${plgfile}"
|
||||
|
||||
echo "Modified plugin to redirect to: ${url}"
|
||||
echo "Version bumped from ${current_version} to ${new_version}"
|
||||
|
||||
mkdir -p pr-release
|
||||
mv "${plgfile}" pr-release/dynamix.unraid.net.plg
|
||||
|
||||
- name: Upload to Cloudflare
|
||||
uses: jakejarvis/s3-sync-action@v0.5.1
|
||||
- name: Clean up old PR artifacts from Cloudflare
|
||||
env:
|
||||
AWS_S3_ENDPOINT: ${{ secrets.CF_ENDPOINT }}
|
||||
AWS_S3_BUCKET: ${{ secrets.CF_BUCKET_PREVIEW }}
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.CF_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
AWS_REGION: "auto"
|
||||
SOURCE_DIR: pr-release
|
||||
DEST_DIR: unraid-api/tag/PR${{ steps.pr_number.outputs.pr_number }}
|
||||
AWS_DEFAULT_REGION: auto
|
||||
run: |
|
||||
# Delete all existing files in the PR directory first (txz, plg, etc.)
|
||||
aws s3 rm s3://${{ secrets.CF_BUCKET_PREVIEW }}/unraid-api/tag/PR${{ steps.pr_number.outputs.pr_number }}/ \
|
||||
--recursive \
|
||||
--endpoint-url ${{ secrets.CF_ENDPOINT }}
|
||||
|
||||
echo "✅ Cleaned up old PR artifacts"
|
||||
|
||||
- name: Upload PR Redirect Plugin to Cloudflare
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.CF_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
AWS_DEFAULT_REGION: auto
|
||||
run: |
|
||||
# Upload only the redirect plugin file
|
||||
aws s3 cp pr-release/dynamix.unraid.net.plg \
|
||||
s3://${{ secrets.CF_BUCKET_PREVIEW }}/unraid-api/tag/PR${{ steps.pr_number.outputs.pr_number }}/dynamix.unraid.net.plg \
|
||||
--endpoint-url ${{ secrets.CF_ENDPOINT }} \
|
||||
--content-encoding none \
|
||||
--acl public-read
|
||||
|
||||
echo "✅ Uploaded redirect plugin"
|
||||
|
||||
- name: Output redirect information
|
||||
run: |
|
||||
echo "✅ PR plugin replaced with staging redirect version"
|
||||
echo "PR URL remains: https://preview.dl.unraid.net/unraid-api/tag/PR${{ steps.pr_number.outputs.pr_number }}/dynamix.unraid.net.plg"
|
||||
echo "Redirects users to staging: https://preview.dl.unraid.net/unraid-api/dynamix.unraid.net.plg"
|
||||
echo "Users updating from this PR version will automatically switch to staging"
|
||||
|
||||
- name: Comment on PR about staging redirect
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: thollander/actions-comment-pull-request@v3
|
||||
with:
|
||||
comment-tag: pr-closed-staging
|
||||
mode: recreate
|
||||
message: |
|
||||
## 🔄 PR Merged - Plugin Redirected to Staging
|
||||
|
||||
This PR has been merged and the preview plugin has been updated to redirect to the staging version.
|
||||
|
||||
**For users testing this PR:**
|
||||
- Your plugin will automatically update to the staging version on the next update check
|
||||
- The staging version includes all merged changes from this PR
|
||||
- No manual intervention required
|
||||
|
||||
**Staging URL:**
|
||||
```
|
||||
https://preview.dl.unraid.net/unraid-api/dynamix.unraid.net.plg
|
||||
```
|
||||
|
||||
Thank you for testing! 🚀
|
||||
|
||||
26
.github/workflows/release-production.yml
vendored
26
.github/workflows/release-production.yml
vendored
@@ -28,16 +28,16 @@ jobs:
|
||||
with:
|
||||
latest: true
|
||||
prerelease: false
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: '22.18.0'
|
||||
node-version: 22.19.0
|
||||
- run: |
|
||||
cat << 'EOF' > release-notes.txt
|
||||
${{ steps.release-info.outputs.body }}
|
||||
EOF
|
||||
- run: npm install html-escaper@2 xml2js
|
||||
- name: Update Plugin Changelog
|
||||
uses: actions/github-script@v7
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
script: |
|
||||
const fs = require('fs');
|
||||
@@ -124,3 +124,23 @@ jobs:
|
||||
--no-guess-mime-type \
|
||||
--content-encoding none \
|
||||
--acl public-read
|
||||
|
||||
- name: Discord Webhook Notification
|
||||
uses: tsickert/discord-webhook@v7.0.0
|
||||
with:
|
||||
webhook-url: ${{ secrets.PUBLIC_DISCORD_RELEASE_ENDPOINT }}
|
||||
username: "Unraid API Bot"
|
||||
avatar-url: "https://craftassets.unraid.net/uploads/logos/un-mark-gradient.png"
|
||||
embed-title: "🚀 Unraid API ${{ inputs.version }} Released!"
|
||||
embed-url: "https://github.com/${{ github.repository }}/releases/tag/${{ inputs.version }}"
|
||||
embed-description: |
|
||||
A new version of Unraid API has been released!
|
||||
|
||||
**Version:** `${{ inputs.version }}`
|
||||
**Release Page:** [View on GitHub](https://github.com/${{ github.repository }}/releases/tag/${{ inputs.version }})
|
||||
|
||||
**📋 Changelog:**
|
||||
${{ steps.release-info.outputs.body }}
|
||||
embed-color: 16734296
|
||||
embed-footer-text: "Unraid API • Automated Release"
|
||||
embed-timestamp: true
|
||||
|
||||
71
.github/workflows/test-libvirt.yml
vendored
71
.github/workflows/test-libvirt.yml
vendored
@@ -1,71 +0,0 @@
|
||||
name: Test Libvirt
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- "libvirt/**"
|
||||
pull_request:
|
||||
paths:
|
||||
- "libvirt/**"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./libvirt
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
submodules: recursive
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.13.6"
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
packages: libvirt-dev
|
||||
version: 1.0
|
||||
|
||||
- name: Set Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10.14.0
|
||||
run_install: false
|
||||
|
||||
- name: Get pnpm store directory
|
||||
id: pnpm-cache
|
||||
shell: bash
|
||||
run: |
|
||||
echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/cache@v4
|
||||
name: Setup pnpm cache
|
||||
with:
|
||||
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('libvirt/package.json') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
|
||||
- name: pnpm install
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Build
|
||||
run: pnpm run build
|
||||
|
||||
- name: test
|
||||
run: pnpm run test
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -29,6 +29,10 @@ unraid-ui/node_modules/
|
||||
# TypeScript v1 declaration files
|
||||
typings/
|
||||
|
||||
# Auto-generated type declarations for Nuxt UI
|
||||
auto-imports.d.ts
|
||||
components.d.ts
|
||||
|
||||
# Optional npm cache directory
|
||||
.npm
|
||||
|
||||
@@ -118,3 +122,4 @@ api/dev/Unraid.net/myservers.cfg
|
||||
|
||||
# local Mise settings
|
||||
.mise.toml
|
||||
|
||||
|
||||
@@ -1 +1 @@
|
||||
{".":"4.14.0"}
|
||||
{".":"4.22.2"}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
@custom-variant dark (&:where(.dark, .dark *));
|
||||
|
||||
@layer utilities {
|
||||
:host {
|
||||
/* Utility defaults for web components (when we were using shadow DOM) */
|
||||
:host {
|
||||
--tw-divide-y-reverse: 0;
|
||||
--tw-border-style: solid;
|
||||
--tw-font-weight: initial;
|
||||
@@ -48,21 +48,20 @@
|
||||
--tw-drop-shadow: initial;
|
||||
--tw-duration: initial;
|
||||
--tw-ease: initial;
|
||||
}
|
||||
}
|
||||
|
||||
@layer base {
|
||||
*,
|
||||
::after,
|
||||
::before,
|
||||
::backdrop,
|
||||
::file-selector-button {
|
||||
border-color: hsl(var(--border));
|
||||
}
|
||||
/* Global border color - this is what's causing the issue! */
|
||||
/* Commenting out since it affects all elements globally
|
||||
*,
|
||||
::after,
|
||||
::before,
|
||||
::backdrop,
|
||||
::file-selector-button {
|
||||
border-color: hsl(var(--border));
|
||||
}
|
||||
*/
|
||||
|
||||
|
||||
|
||||
body {
|
||||
body {
|
||||
--color-alpha: #1c1b1b;
|
||||
--color-beta: #f2f2f2;
|
||||
--color-gamma: #999999;
|
||||
@@ -74,8 +73,24 @@
|
||||
--ring-shadow: 0 0 var(--color-beta);
|
||||
}
|
||||
|
||||
button:not(:disabled),
|
||||
[role='button']:not(:disabled) {
|
||||
cursor: pointer;
|
||||
}
|
||||
button:not(:disabled),
|
||||
[role='button']:not(:disabled) {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
/* Font size overrides for SSO button component */
|
||||
unraid-sso-button {
|
||||
--text-xs: 0.75rem;
|
||||
--text-sm: 0.875rem;
|
||||
--text-base: 1rem;
|
||||
--text-lg: 1.125rem;
|
||||
--text-xl: 1.25rem;
|
||||
--text-2xl: 1.5rem;
|
||||
--text-3xl: 1.875rem;
|
||||
--text-4xl: 2.25rem;
|
||||
--text-5xl: 3rem;
|
||||
--text-6xl: 3.75rem;
|
||||
--text-7xl: 4.5rem;
|
||||
--text-8xl: 6rem;
|
||||
--text-9xl: 8rem;
|
||||
}
|
||||
@@ -1,7 +1,61 @@
|
||||
/* Hybrid theme system: Native CSS + Theme Store fallback */
|
||||
@layer base {
|
||||
/* Light mode defaults */
|
||||
:root {
|
||||
|
||||
/* Light mode defaults */
|
||||
:root {
|
||||
/* Nuxt UI Color System - Primary (Orange for Unraid) */
|
||||
--ui-color-primary-50: #fff7ed;
|
||||
--ui-color-primary-100: #ffedd5;
|
||||
--ui-color-primary-200: #fed7aa;
|
||||
--ui-color-primary-300: #fdba74;
|
||||
--ui-color-primary-400: #fb923c;
|
||||
--ui-color-primary-500: #ff8c2f;
|
||||
--ui-color-primary-600: #ea580c;
|
||||
--ui-color-primary-700: #c2410c;
|
||||
--ui-color-primary-800: #9a3412;
|
||||
--ui-color-primary-900: #7c2d12;
|
||||
--ui-color-primary-950: #431407;
|
||||
|
||||
/* Nuxt UI Color System - Neutral (True Gray) */
|
||||
--ui-color-neutral-50: #fafafa;
|
||||
--ui-color-neutral-100: #f5f5f5;
|
||||
--ui-color-neutral-200: #e5e5e5;
|
||||
--ui-color-neutral-300: #d4d4d4;
|
||||
--ui-color-neutral-400: #a3a3a3;
|
||||
--ui-color-neutral-500: #737373;
|
||||
--ui-color-neutral-600: #525252;
|
||||
--ui-color-neutral-700: #404040;
|
||||
--ui-color-neutral-800: #262626;
|
||||
--ui-color-neutral-900: #171717;
|
||||
--ui-color-neutral-950: #0a0a0a;
|
||||
|
||||
/* Nuxt UI Default color shades */
|
||||
--ui-primary: var(--ui-color-primary-500);
|
||||
--ui-secondary: var(--ui-color-neutral-500);
|
||||
|
||||
/* Nuxt UI Design Tokens - Text */
|
||||
--ui-text-dimmed: var(--ui-color-neutral-400);
|
||||
--ui-text-muted: var(--ui-color-neutral-500);
|
||||
--ui-text-toned: var(--ui-color-neutral-600);
|
||||
--ui-text: var(--ui-color-neutral-700);
|
||||
--ui-text-highlighted: var(--ui-color-neutral-900);
|
||||
--ui-text-inverted: white;
|
||||
|
||||
/* Nuxt UI Design Tokens - Background */
|
||||
--ui-bg: white;
|
||||
--ui-bg-muted: var(--ui-color-neutral-50);
|
||||
--ui-bg-elevated: var(--ui-color-neutral-100);
|
||||
--ui-bg-accented: var(--ui-color-neutral-200);
|
||||
--ui-bg-inverted: var(--ui-color-neutral-900);
|
||||
|
||||
/* Nuxt UI Design Tokens - Border */
|
||||
--ui-border: var(--ui-color-neutral-200);
|
||||
--ui-border-muted: var(--ui-color-neutral-200);
|
||||
--ui-border-accented: var(--ui-color-neutral-300);
|
||||
--ui-border-inverted: var(--ui-color-neutral-900);
|
||||
|
||||
/* Nuxt UI Radius */
|
||||
--ui-radius: 0.5rem;
|
||||
|
||||
--background: 0 0% 100%;
|
||||
--foreground: 0 0% 3.9%;
|
||||
--muted: 0 0% 96.1%;
|
||||
@@ -12,7 +66,7 @@
|
||||
--card-foreground: 0 0% 3.9%;
|
||||
--border: 0 0% 89.8%;
|
||||
--input: 0 0% 89.8%;
|
||||
--primary: 0 0% 9%;
|
||||
--primary: 24 100% 50%; /* Orange #ff8c2f in HSL */
|
||||
--primary-foreground: 0 0% 98%;
|
||||
--secondary: 0 0% 96.1%;
|
||||
--secondary-foreground: 0 0% 9%;
|
||||
@@ -20,7 +74,7 @@
|
||||
--accent-foreground: 0 0% 9%;
|
||||
--destructive: 0 84.2% 60.2%;
|
||||
--destructive-foreground: 0 0% 98%;
|
||||
--ring: 0 0% 3.9%;
|
||||
--ring: 24 100% 50%; /* Orange ring to match primary */
|
||||
--chart-1: 12 76% 61%;
|
||||
--chart-2: 173 58% 39%;
|
||||
--chart-3: 197 37% 24%;
|
||||
@@ -30,6 +84,31 @@
|
||||
|
||||
/* Dark mode */
|
||||
.dark {
|
||||
/* Nuxt UI Default color shades - Dark mode */
|
||||
--ui-primary: var(--ui-color-primary-400);
|
||||
--ui-secondary: var(--ui-color-neutral-400);
|
||||
|
||||
/* Nuxt UI Design Tokens - Text (Dark) */
|
||||
--ui-text-dimmed: var(--ui-color-neutral-500);
|
||||
--ui-text-muted: var(--ui-color-neutral-400);
|
||||
--ui-text-toned: var(--ui-color-neutral-300);
|
||||
--ui-text: var(--ui-color-neutral-200);
|
||||
--ui-text-highlighted: white;
|
||||
--ui-text-inverted: var(--ui-color-neutral-900);
|
||||
|
||||
/* Nuxt UI Design Tokens - Background (Dark) */
|
||||
--ui-bg: var(--ui-color-neutral-900);
|
||||
--ui-bg-muted: var(--ui-color-neutral-800);
|
||||
--ui-bg-elevated: var(--ui-color-neutral-800);
|
||||
--ui-bg-accented: var(--ui-color-neutral-700);
|
||||
--ui-bg-inverted: white;
|
||||
|
||||
/* Nuxt UI Design Tokens - Border (Dark) */
|
||||
--ui-border: var(--ui-color-neutral-800);
|
||||
--ui-border-muted: var(--ui-color-neutral-700);
|
||||
--ui-border-accented: var(--ui-color-neutral-700);
|
||||
--ui-border-inverted: white;
|
||||
|
||||
--background: 0 0% 3.9%;
|
||||
--foreground: 0 0% 98%;
|
||||
--muted: 0 0% 14.9%;
|
||||
@@ -40,15 +119,15 @@
|
||||
--card-foreground: 0 0% 98%;
|
||||
--border: 0 0% 14.9%;
|
||||
--input: 0 0% 14.9%;
|
||||
--primary: 0 0% 98%;
|
||||
--primary-foreground: 0 0% 9%;
|
||||
--primary: 24 100% 50%; /* Orange #ff8c2f in HSL */
|
||||
--primary-foreground: 0 0% 98%;
|
||||
--secondary: 0 0% 14.9%;
|
||||
--secondary-foreground: 0 0% 98%;
|
||||
--accent: 0 0% 14.9%;
|
||||
--accent-foreground: 0 0% 98%;
|
||||
--destructive: 0 62.8% 30.6%;
|
||||
--destructive-foreground: 0 0% 98%;
|
||||
--ring: 0 0% 83.1%;
|
||||
--ring: 24 100% 50%; /* Orange ring to match primary */
|
||||
--chart-1: 220 70% 50%;
|
||||
--chart-2: 160 60% 45%;
|
||||
--chart-3: 30 80% 55%;
|
||||
@@ -62,69 +141,4 @@
|
||||
--background: 0 0% 3.9%;
|
||||
--foreground: 0 0% 98%;
|
||||
--border: 0 0% 14.9%;
|
||||
}
|
||||
|
||||
/* For web components: inherit CSS variables from the host */
|
||||
:host {
|
||||
--background: inherit;
|
||||
--foreground: inherit;
|
||||
--muted: inherit;
|
||||
--muted-foreground: inherit;
|
||||
--popover: inherit;
|
||||
--popover-foreground: inherit;
|
||||
--card: inherit;
|
||||
--card-foreground: inherit;
|
||||
--border: inherit;
|
||||
--input: inherit;
|
||||
--primary: inherit;
|
||||
--primary-foreground: inherit;
|
||||
--secondary: inherit;
|
||||
--secondary-foreground: inherit;
|
||||
--accent: inherit;
|
||||
--accent-foreground: inherit;
|
||||
--destructive: inherit;
|
||||
--destructive-foreground: inherit;
|
||||
--ring: inherit;
|
||||
--chart-1: inherit;
|
||||
--chart-2: inherit;
|
||||
--chart-3: inherit;
|
||||
--chart-4: inherit;
|
||||
--chart-5: inherit;
|
||||
}
|
||||
|
||||
/* Class-based dark mode support for web components using :host-context */
|
||||
:host-context(.dark) {
|
||||
--background: 0 0% 3.9%;
|
||||
--foreground: 0 0% 98%;
|
||||
--muted: 0 0% 14.9%;
|
||||
--muted-foreground: 0 0% 63.9%;
|
||||
--popover: 0 0% 3.9%;
|
||||
--popover-foreground: 0 0% 98%;
|
||||
--card: 0 0% 3.9%;
|
||||
--card-foreground: 0 0% 98%;
|
||||
--border: 0 0% 14.9%;
|
||||
--input: 0 0% 14.9%;
|
||||
--primary: 0 0% 98%;
|
||||
--primary-foreground: 0 0% 9%;
|
||||
--secondary: 0 0% 14.9%;
|
||||
--secondary-foreground: 0 0% 98%;
|
||||
--accent: 0 0% 14.9%;
|
||||
--accent-foreground: 0 0% 98%;
|
||||
--destructive: 0 62.8% 30.6%;
|
||||
--destructive-foreground: 0 0% 98%;
|
||||
--ring: 0 0% 83.1%;
|
||||
--chart-1: 220 70% 50%;
|
||||
--chart-2: 160 60% 45%;
|
||||
--chart-3: 30 80% 55%;
|
||||
--chart-4: 280 65% 60%;
|
||||
--chart-5: 340 75% 55%;
|
||||
}
|
||||
|
||||
/* Alternative class-based dark mode support for specific Unraid themes */
|
||||
:host-context(.dark[data-theme='black']),
|
||||
:host-context(.dark[data-theme='gray']) {
|
||||
--background: 0 0% 3.9%;
|
||||
--foreground: 0 0% 98%;
|
||||
--border: 0 0% 14.9%;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/* Tailwind Shared Styles - Single entry point for all shared CSS */
|
||||
@import './css-variables.css';
|
||||
@import './unraid-theme.css';
|
||||
@import './theme-variants.css';
|
||||
@import './base-utilities.css';
|
||||
@import './sonner.css';
|
||||
@@ -1,665 +0,0 @@
|
||||
/**------------------------------------------------------------------------------------------------
|
||||
* SONNER.CSS
|
||||
* This is a copy of Sonner's `style.css` as of commit a5b77c2df08d5c05aa923170176168102855533d
|
||||
*
|
||||
* This was necessary because I couldn't find a simple way to include Sonner's styles in vite's
|
||||
* css build output. They wouldn't show up even though the toaster was included, and vue-sonner
|
||||
* currently doesn't export its stylesheet (it appears to be inlined, but styles weren't applied
|
||||
* to the unraid-toaster component for some reason).
|
||||
*------------------------------------------------------------------------------------------------**/
|
||||
:where(html[dir='ltr']),
|
||||
:where([data-sonner-toaster][dir='ltr']) {
|
||||
--toast-icon-margin-start: -3px;
|
||||
--toast-icon-margin-end: 4px;
|
||||
--toast-svg-margin-start: -1px;
|
||||
--toast-svg-margin-end: 0px;
|
||||
--toast-button-margin-start: auto;
|
||||
--toast-button-margin-end: 0;
|
||||
--toast-close-button-start: 0;
|
||||
--toast-close-button-end: unset;
|
||||
--toast-close-button-transform: translate(-35%, -35%);
|
||||
}
|
||||
|
||||
:where(html[dir='rtl']),
|
||||
:where([data-sonner-toaster][dir='rtl']) {
|
||||
--toast-icon-margin-start: 4px;
|
||||
--toast-icon-margin-end: -3px;
|
||||
--toast-svg-margin-start: 0px;
|
||||
--toast-svg-margin-end: -1px;
|
||||
--toast-button-margin-start: 0;
|
||||
--toast-button-margin-end: auto;
|
||||
--toast-close-button-start: unset;
|
||||
--toast-close-button-end: 0;
|
||||
--toast-close-button-transform: translate(35%, -35%);
|
||||
}
|
||||
|
||||
:where([data-sonner-toaster]) {
|
||||
position: fixed;
|
||||
width: var(--width);
|
||||
font-family: ui-sans-serif, system-ui, -apple-system, BlinkMacSystemFont, Segoe UI, Roboto, Helvetica Neue, Arial,
|
||||
Noto Sans, sans-serif, Apple Color Emoji, Segoe UI Emoji, Segoe UI Symbol, Noto Color Emoji;
|
||||
--gray1: hsl(0, 0%, 99%);
|
||||
--gray2: hsl(0, 0%, 97.3%);
|
||||
--gray3: hsl(0, 0%, 95.1%);
|
||||
--gray4: hsl(0, 0%, 93%);
|
||||
--gray5: hsl(0, 0%, 90.9%);
|
||||
--gray6: hsl(0, 0%, 88.7%);
|
||||
--gray7: hsl(0, 0%, 85.8%);
|
||||
--gray8: hsl(0, 0%, 78%);
|
||||
--gray9: hsl(0, 0%, 56.1%);
|
||||
--gray10: hsl(0, 0%, 52.3%);
|
||||
--gray11: hsl(0, 0%, 43.5%);
|
||||
--gray12: hsl(0, 0%, 9%);
|
||||
--border-radius: 8px;
|
||||
box-sizing: border-box;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
list-style: none;
|
||||
outline: none;
|
||||
z-index: 999999999;
|
||||
transition: transform 400ms ease;
|
||||
}
|
||||
|
||||
:where([data-sonner-toaster][data-lifted='true']) {
|
||||
transform: translateY(-10px);
|
||||
}
|
||||
|
||||
@media (hover: none) and (pointer: coarse) {
|
||||
:where([data-sonner-toaster][data-lifted='true']) {
|
||||
transform: none;
|
||||
}
|
||||
}
|
||||
|
||||
:where([data-sonner-toaster][data-x-position='right']) {
|
||||
right: max(var(--offset), env(safe-area-inset-right));
|
||||
}
|
||||
|
||||
:where([data-sonner-toaster][data-x-position='left']) {
|
||||
left: max(var(--offset), env(safe-area-inset-left));
|
||||
}
|
||||
|
||||
:where([data-sonner-toaster][data-x-position='center']) {
|
||||
left: 50%;
|
||||
transform: translateX(-50%);
|
||||
}
|
||||
|
||||
:where([data-sonner-toaster][data-y-position='top']) {
|
||||
top: max(var(--offset), env(safe-area-inset-top));
|
||||
}
|
||||
|
||||
:where([data-sonner-toaster][data-y-position='bottom']) {
|
||||
bottom: max(var(--offset), env(safe-area-inset-bottom));
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) {
|
||||
--y: translateY(100%);
|
||||
--lift-amount: calc(var(--lift) * var(--gap));
|
||||
z-index: var(--z-index);
|
||||
position: absolute;
|
||||
opacity: 0;
|
||||
transform: var(--y);
|
||||
filter: blur(0);
|
||||
/* https://stackoverflow.com/questions/48124372/pointermove-event-not-working-with-touch-why-not */
|
||||
touch-action: none;
|
||||
transition: transform 400ms, opacity 400ms, height 400ms, box-shadow 200ms;
|
||||
box-sizing: border-box;
|
||||
outline: none;
|
||||
overflow-wrap: anywhere;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-styled='true']) {
|
||||
padding: 16px;
|
||||
background: var(--normal-bg);
|
||||
border: 1px solid var(--normal-border);
|
||||
color: var(--normal-text);
|
||||
border-radius: var(--border-radius);
|
||||
box-shadow: 0px 4px 12px rgba(0, 0, 0, 0.1);
|
||||
width: var(--width);
|
||||
font-size: 13px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 6px;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]:focus-visible) {
|
||||
box-shadow: 0px 4px 12px rgba(0, 0, 0, 0.1), 0 0 0 2px rgba(0, 0, 0, 0.2);
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-y-position='top']) {
|
||||
top: 0;
|
||||
--y: translateY(-100%);
|
||||
--lift: 1;
|
||||
--lift-amount: calc(1 * var(--gap));
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-y-position='bottom']) {
|
||||
bottom: 0;
|
||||
--y: translateY(100%);
|
||||
--lift: -1;
|
||||
--lift-amount: calc(var(--lift) * var(--gap));
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) :where([data-description]) {
|
||||
font-weight: 400;
|
||||
line-height: 1.4;
|
||||
color: inherit;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) :where([data-title]) {
|
||||
font-weight: 500;
|
||||
line-height: 1.5;
|
||||
color: inherit;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) :where([data-icon]) {
|
||||
display: flex;
|
||||
height: 16px;
|
||||
width: 16px;
|
||||
position: relative;
|
||||
justify-content: flex-start;
|
||||
align-items: center;
|
||||
flex-shrink: 0;
|
||||
margin-left: var(--toast-icon-margin-start);
|
||||
margin-right: var(--toast-icon-margin-end);
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-promise='true']) :where([data-icon]) > svg {
|
||||
opacity: 0;
|
||||
transform: scale(0.8);
|
||||
transform-origin: center;
|
||||
animation: sonner-fade-in 300ms ease forwards;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) :where([data-icon]) > * {
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) :where([data-icon]) svg {
|
||||
margin-left: var(--toast-svg-margin-start);
|
||||
margin-right: var(--toast-svg-margin-end);
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) :where([data-content]) {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 2px;
|
||||
}
|
||||
|
||||
[data-sonner-toast][data-styled='true'] [data-button] {
|
||||
border-radius: 4px;
|
||||
padding-left: 8px;
|
||||
padding-right: 8px;
|
||||
height: 24px;
|
||||
font-size: 12px;
|
||||
color: var(--normal-bg);
|
||||
background: var(--normal-text);
|
||||
margin-left: var(--toast-button-margin-start);
|
||||
margin-right: var(--toast-button-margin-end);
|
||||
border: none;
|
||||
cursor: pointer;
|
||||
outline: none;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
flex-shrink: 0;
|
||||
transition: opacity 400ms, box-shadow 200ms;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) :where([data-button]):focus-visible {
|
||||
box-shadow: 0 0 0 2px rgba(0, 0, 0, 0.4);
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) :where([data-button]):first-of-type {
|
||||
margin-left: var(--toast-button-margin-start);
|
||||
margin-right: var(--toast-button-margin-end);
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) :where([data-cancel]) {
|
||||
color: var(--normal-text);
|
||||
background: rgba(0, 0, 0, 0.08);
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-theme='dark']) :where([data-cancel]) {
|
||||
background: rgba(255, 255, 255, 0.3);
|
||||
}
|
||||
|
||||
[data-sonner-toast] [data-close-button] {
|
||||
position: absolute;
|
||||
left: var(--toast-close-button-start);
|
||||
right: var(--toast-close-button-end);
|
||||
top: 0;
|
||||
height: 20px;
|
||||
width: 20px;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
padding: 0;
|
||||
color: var(--gray12);
|
||||
border: 1px solid var(--gray4);
|
||||
transform: var(--toast-close-button-transform);
|
||||
border-radius: 50%;
|
||||
cursor: pointer;
|
||||
z-index: 1;
|
||||
transition: opacity 100ms, background 200ms, border-color 200ms;
|
||||
}
|
||||
|
||||
[data-sonner-toast] [data-close-button] {
|
||||
background: var(--gray1);
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) :where([data-close-button]):focus-visible {
|
||||
box-shadow: 0px 4px 12px rgba(0, 0, 0, 0.1), 0 0 0 2px rgba(0, 0, 0, 0.2);
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) :where([data-disabled='true']) {
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
[data-sonner-toast]:hover [data-close-button]:hover {
|
||||
background: var(--gray2);
|
||||
border-color: var(--gray5);
|
||||
}
|
||||
|
||||
/* Leave a ghost div to avoid setting hover to false when swiping out */
|
||||
:where([data-sonner-toast][data-swiping='true'])::before {
|
||||
content: '';
|
||||
position: absolute;
|
||||
left: 0;
|
||||
right: 0;
|
||||
height: 100%;
|
||||
z-index: -1;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-y-position='top'][data-swiping='true'])::before {
|
||||
/* y 50% needed to distribute height additional height evenly */
|
||||
bottom: 50%;
|
||||
transform: scaleY(3) translateY(50%);
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-y-position='bottom'][data-swiping='true'])::before {
|
||||
/* y -50% needed to distribute height additional height evenly */
|
||||
top: 50%;
|
||||
transform: scaleY(3) translateY(-50%);
|
||||
}
|
||||
|
||||
/* Leave a ghost div to avoid setting hover to false when transitioning out */
|
||||
:where([data-sonner-toast][data-swiping='false'][data-removed='true'])::before {
|
||||
content: '';
|
||||
position: absolute;
|
||||
inset: 0;
|
||||
transform: scaleY(2);
|
||||
}
|
||||
|
||||
/* Needed to avoid setting hover to false when inbetween toasts */
|
||||
:where([data-sonner-toast])::after {
|
||||
content: '';
|
||||
position: absolute;
|
||||
left: 0;
|
||||
height: calc(var(--gap) + 1px);
|
||||
bottom: 100%;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-mounted='true']) {
|
||||
--y: translateY(0);
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-expanded='false'][data-front='false']) {
|
||||
--scale: var(--toasts-before) * 0.05 + 1;
|
||||
--y: translateY(calc(var(--lift-amount) * var(--toasts-before))) scale(calc(-1 * var(--scale)));
|
||||
height: var(--front-toast-height);
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) > * {
|
||||
transition: opacity 400ms;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-expanded='false'][data-front='false'][data-styled='true']) > * {
|
||||
opacity: 0;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-visible='false']) {
|
||||
opacity: 0;
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-mounted='true'][data-expanded='true']) {
|
||||
--y: translateY(calc(var(--lift) * var(--offset)));
|
||||
height: var(--initial-height);
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-removed='true'][data-front='true'][data-swipe-out='false']) {
|
||||
--y: translateY(calc(var(--lift) * -100%));
|
||||
opacity: 0;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-removed='true'][data-front='false'][data-swipe-out='false'][data-expanded='true']) {
|
||||
--y: translateY(calc(var(--lift) * var(--offset) + var(--lift) * -100%));
|
||||
opacity: 0;
|
||||
}
|
||||
|
||||
:where([data-sonner-toast][data-removed='true'][data-front='false'][data-swipe-out='false'][data-expanded='false']) {
|
||||
--y: translateY(40%);
|
||||
opacity: 0;
|
||||
transition: transform 500ms, opacity 200ms;
|
||||
}
|
||||
|
||||
/* Bump up the height to make sure hover state doesn't get set to false */
|
||||
:where([data-sonner-toast][data-removed='true'][data-front='false'])::before {
|
||||
height: calc(var(--initial-height) + 20%);
|
||||
}
|
||||
|
||||
[data-sonner-toast][data-swiping='true'] {
|
||||
transform: var(--y) translateY(var(--swipe-amount, 0px));
|
||||
transition: none;
|
||||
}
|
||||
|
||||
[data-sonner-toast][data-swiped='true'] {
|
||||
user-select: none;
|
||||
}
|
||||
|
||||
[data-sonner-toast][data-swipe-out='true'][data-y-position='bottom'],
|
||||
[data-sonner-toast][data-swipe-out='true'][data-y-position='top'] {
|
||||
animation: swipe-out 200ms ease-out forwards;
|
||||
}
|
||||
|
||||
@keyframes swipe-out {
|
||||
from {
|
||||
transform: translateY(calc(var(--lift) * var(--offset) + var(--swipe-amount)));
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
to {
|
||||
transform: translateY(calc(var(--lift) * var(--offset) + var(--swipe-amount) + var(--lift) * -100%));
|
||||
opacity: 0;
|
||||
}
|
||||
}
|
||||
|
||||
@media (max-width: 600px) {
|
||||
[data-sonner-toaster] {
|
||||
position: fixed;
|
||||
--mobile-offset: 16px;
|
||||
right: var(--mobile-offset);
|
||||
left: var(--mobile-offset);
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
[data-sonner-toaster][dir='rtl'] {
|
||||
left: calc(var(--mobile-offset) * -1);
|
||||
}
|
||||
|
||||
[data-sonner-toaster] [data-sonner-toast] {
|
||||
left: 0;
|
||||
right: 0;
|
||||
width: calc(100% - var(--mobile-offset) * 2);
|
||||
}
|
||||
|
||||
[data-sonner-toaster][data-x-position='left'] {
|
||||
left: var(--mobile-offset);
|
||||
}
|
||||
|
||||
[data-sonner-toaster][data-y-position='bottom'] {
|
||||
bottom: 20px;
|
||||
}
|
||||
|
||||
[data-sonner-toaster][data-y-position='top'] {
|
||||
top: 20px;
|
||||
}
|
||||
|
||||
[data-sonner-toaster][data-x-position='center'] {
|
||||
left: var(--mobile-offset);
|
||||
right: var(--mobile-offset);
|
||||
transform: none;
|
||||
}
|
||||
}
|
||||
|
||||
[data-sonner-toaster][data-theme='light'] {
|
||||
--normal-bg: #fff;
|
||||
--normal-border: var(--gray4);
|
||||
--normal-text: var(--gray12);
|
||||
|
||||
--success-bg: hsl(143, 85%, 96%);
|
||||
--success-border: hsl(145, 92%, 91%);
|
||||
--success-text: hsl(140, 100%, 27%);
|
||||
|
||||
--info-bg: hsl(208, 100%, 97%);
|
||||
--info-border: hsl(221, 91%, 91%);
|
||||
--info-text: hsl(210, 92%, 45%);
|
||||
|
||||
--warning-bg: hsl(49, 100%, 97%);
|
||||
--warning-border: hsl(49, 91%, 91%);
|
||||
--warning-text: hsl(31, 92%, 45%);
|
||||
|
||||
--error-bg: hsl(359, 100%, 97%);
|
||||
--error-border: hsl(359, 100%, 94%);
|
||||
--error-text: hsl(360, 100%, 45%);
|
||||
}
|
||||
|
||||
[data-sonner-toaster][data-theme='light'] [data-sonner-toast][data-invert='true'] {
|
||||
--normal-bg: #000;
|
||||
--normal-border: hsl(0, 0%, 20%);
|
||||
--normal-text: var(--gray1);
|
||||
}
|
||||
|
||||
[data-sonner-toaster][data-theme='dark'] [data-sonner-toast][data-invert='true'] {
|
||||
--normal-bg: #fff;
|
||||
--normal-border: var(--gray3);
|
||||
--normal-text: var(--gray12);
|
||||
}
|
||||
|
||||
[data-sonner-toaster][data-theme='dark'] {
|
||||
--normal-bg: #000;
|
||||
--normal-border: hsl(0, 0%, 20%);
|
||||
--normal-text: var(--gray1);
|
||||
|
||||
--success-bg: hsl(150, 100%, 6%);
|
||||
--success-border: hsl(147, 100%, 12%);
|
||||
--success-text: hsl(150, 86%, 65%);
|
||||
|
||||
--info-bg: hsl(215, 100%, 6%);
|
||||
--info-border: hsl(223, 100%, 12%);
|
||||
--info-text: hsl(216, 87%, 65%);
|
||||
|
||||
--warning-bg: hsl(64, 100%, 6%);
|
||||
--warning-border: hsl(60, 100%, 12%);
|
||||
--warning-text: hsl(46, 87%, 65%);
|
||||
|
||||
--error-bg: hsl(358, 76%, 10%);
|
||||
--error-border: hsl(357, 89%, 16%);
|
||||
--error-text: hsl(358, 100%, 81%);
|
||||
}
|
||||
|
||||
[data-rich-colors='true'][data-sonner-toast][data-type='success'] {
|
||||
background: var(--success-bg);
|
||||
border-color: var(--success-border);
|
||||
color: var(--success-text);
|
||||
}
|
||||
|
||||
[data-rich-colors='true'][data-sonner-toast][data-type='success'] [data-close-button] {
|
||||
background: var(--success-bg);
|
||||
border-color: var(--success-border);
|
||||
color: var(--success-text);
|
||||
}
|
||||
|
||||
[data-rich-colors='true'][data-sonner-toast][data-type='info'] {
|
||||
background: var(--info-bg);
|
||||
border-color: var(--info-border);
|
||||
color: var(--info-text);
|
||||
}
|
||||
|
||||
[data-rich-colors='true'][data-sonner-toast][data-type='info'] [data-close-button] {
|
||||
background: var(--info-bg);
|
||||
border-color: var(--info-border);
|
||||
color: var(--info-text);
|
||||
}
|
||||
|
||||
[data-rich-colors='true'][data-sonner-toast][data-type='warning'] {
|
||||
background: var(--warning-bg);
|
||||
border-color: var(--warning-border);
|
||||
color: var(--warning-text);
|
||||
}
|
||||
|
||||
[data-rich-colors='true'][data-sonner-toast][data-type='warning'] [data-close-button] {
|
||||
background: var(--warning-bg);
|
||||
border-color: var(--warning-border);
|
||||
color: var(--warning-text);
|
||||
}
|
||||
|
||||
[data-rich-colors='true'][data-sonner-toast][data-type='error'] {
|
||||
background: var(--error-bg);
|
||||
border-color: var(--error-border);
|
||||
color: var(--error-text);
|
||||
}
|
||||
|
||||
[data-rich-colors='true'][data-sonner-toast][data-type='error'] [data-close-button] {
|
||||
background: var(--error-bg);
|
||||
border-color: var(--error-border);
|
||||
color: var(--error-text);
|
||||
}
|
||||
|
||||
.sonner-loading-wrapper {
|
||||
--size: 16px;
|
||||
height: var(--size);
|
||||
width: var(--size);
|
||||
position: absolute;
|
||||
inset: 0;
|
||||
z-index: 10;
|
||||
}
|
||||
|
||||
.sonner-loading-wrapper[data-visible='false'] {
|
||||
transform-origin: center;
|
||||
animation: sonner-fade-out 0.2s ease forwards;
|
||||
}
|
||||
|
||||
.sonner-spinner {
|
||||
position: relative;
|
||||
top: 50%;
|
||||
left: 50%;
|
||||
height: var(--size);
|
||||
width: var(--size);
|
||||
}
|
||||
|
||||
.sonner-loading-bar {
|
||||
animation: sonner-spin 1.2s linear infinite;
|
||||
background: var(--gray11);
|
||||
border-radius: 6px;
|
||||
height: 8%;
|
||||
left: -10%;
|
||||
position: absolute;
|
||||
top: -3.9%;
|
||||
width: 24%;
|
||||
}
|
||||
|
||||
.sonner-loading-bar:nth-child(1) {
|
||||
animation-delay: -1.2s;
|
||||
transform: rotate(0.0001deg) translate(146%);
|
||||
}
|
||||
|
||||
.sonner-loading-bar:nth-child(2) {
|
||||
animation-delay: -1.1s;
|
||||
transform: rotate(30deg) translate(146%);
|
||||
}
|
||||
|
||||
.sonner-loading-bar:nth-child(3) {
|
||||
animation-delay: -1s;
|
||||
transform: rotate(60deg) translate(146%);
|
||||
}
|
||||
|
||||
.sonner-loading-bar:nth-child(4) {
|
||||
animation-delay: -0.9s;
|
||||
transform: rotate(90deg) translate(146%);
|
||||
}
|
||||
|
||||
.sonner-loading-bar:nth-child(5) {
|
||||
animation-delay: -0.8s;
|
||||
transform: rotate(120deg) translate(146%);
|
||||
}
|
||||
|
||||
.sonner-loading-bar:nth-child(6) {
|
||||
animation-delay: -0.7s;
|
||||
transform: rotate(150deg) translate(146%);
|
||||
}
|
||||
|
||||
.sonner-loading-bar:nth-child(7) {
|
||||
animation-delay: -0.6s;
|
||||
transform: rotate(180deg) translate(146%);
|
||||
}
|
||||
|
||||
.sonner-loading-bar:nth-child(8) {
|
||||
animation-delay: -0.5s;
|
||||
transform: rotate(210deg) translate(146%);
|
||||
}
|
||||
|
||||
.sonner-loading-bar:nth-child(9) {
|
||||
animation-delay: -0.4s;
|
||||
transform: rotate(240deg) translate(146%);
|
||||
}
|
||||
|
||||
.sonner-loading-bar:nth-child(10) {
|
||||
animation-delay: -0.3s;
|
||||
transform: rotate(270deg) translate(146%);
|
||||
}
|
||||
|
||||
.sonner-loading-bar:nth-child(11) {
|
||||
animation-delay: -0.2s;
|
||||
transform: rotate(300deg) translate(146%);
|
||||
}
|
||||
|
||||
.sonner-loading-bar:nth-child(12) {
|
||||
animation-delay: -0.1s;
|
||||
transform: rotate(330deg) translate(146%);
|
||||
}
|
||||
|
||||
@keyframes sonner-fade-in {
|
||||
0% {
|
||||
opacity: 0;
|
||||
transform: scale(0.8);
|
||||
}
|
||||
100% {
|
||||
opacity: 1;
|
||||
transform: scale(1);
|
||||
}
|
||||
}
|
||||
|
||||
@keyframes sonner-fade-out {
|
||||
0% {
|
||||
opacity: 1;
|
||||
transform: scale(1);
|
||||
}
|
||||
100% {
|
||||
opacity: 0;
|
||||
transform: scale(0.8);
|
||||
}
|
||||
}
|
||||
|
||||
@keyframes sonner-spin {
|
||||
0% {
|
||||
opacity: 1;
|
||||
}
|
||||
100% {
|
||||
opacity: 0.15;
|
||||
}
|
||||
}
|
||||
|
||||
@media (prefers-reduced-motion) {
|
||||
[data-sonner-toast],
|
||||
[data-sonner-toast] > *,
|
||||
.sonner-loading-bar {
|
||||
transition: none !important;
|
||||
animation: none !important;
|
||||
}
|
||||
}
|
||||
|
||||
.sonner-loader {
|
||||
position: absolute;
|
||||
top: 50%;
|
||||
left: 50%;
|
||||
transform: translate(-50%, -50%);
|
||||
transform-origin: center;
|
||||
transition: opacity 200ms, transform 200ms;
|
||||
}
|
||||
|
||||
.sonner-loader[data-visible='false'] {
|
||||
opacity: 0;
|
||||
transform: scale(0.8) translate(-50%, -50%);
|
||||
}
|
||||
92
@tailwind-shared/theme-variants.css
Normal file
92
@tailwind-shared/theme-variants.css
Normal file
@@ -0,0 +1,92 @@
|
||||
/**
|
||||
* Tailwind v4 Theme Variants
|
||||
* Defines theme-specific CSS variables that can be switched via classes
|
||||
* These are applied dynamically based on the theme selected in GraphQL
|
||||
*/
|
||||
|
||||
/* Default/White Theme */
|
||||
:root,
|
||||
.theme-white {
|
||||
--header-text-primary: #ffffff;
|
||||
--header-text-secondary: #999999;
|
||||
--header-background-color: #1c1b1b;
|
||||
--header-gradient-start: rgba(28, 27, 27, 0);
|
||||
--header-gradient-end: rgba(28, 27, 27, 0.7);
|
||||
--color-border: #383735;
|
||||
--color-alpha: #ff8c2f;
|
||||
--color-beta: #1c1b1b;
|
||||
--color-gamma: #ffffff;
|
||||
--color-gamma-opaque: rgba(255, 255, 255, 0.3);
|
||||
}
|
||||
|
||||
/* Black Theme */
|
||||
.theme-black,
|
||||
.theme-black.dark {
|
||||
--header-text-primary: #1c1b1b;
|
||||
--header-text-secondary: #999999;
|
||||
--header-background-color: #f2f2f2;
|
||||
--header-gradient-start: rgba(242, 242, 242, 0);
|
||||
--header-gradient-end: rgba(242, 242, 242, 0.7);
|
||||
--color-border: #e0e0e0;
|
||||
--color-alpha: #ff8c2f;
|
||||
--color-beta: #f2f2f2;
|
||||
--color-gamma: #1c1b1b;
|
||||
--color-gamma-opaque: rgba(28, 27, 27, 0.3);
|
||||
}
|
||||
|
||||
/* Gray Theme */
|
||||
.theme-gray {
|
||||
--header-text-primary: #ffffff;
|
||||
--header-text-secondary: #999999;
|
||||
--header-background-color: #1c1b1b;
|
||||
--header-gradient-start: rgba(28, 27, 27, 0);
|
||||
--header-gradient-end: rgba(28, 27, 27, 0.7);
|
||||
--color-border: #383735;
|
||||
--color-alpha: #ff8c2f;
|
||||
--color-beta: #383735;
|
||||
--color-gamma: #ffffff;
|
||||
--color-gamma-opaque: rgba(255, 255, 255, 0.3);
|
||||
}
|
||||
|
||||
/* Azure Theme */
|
||||
.theme-azure {
|
||||
--header-text-primary: #1c1b1b;
|
||||
--header-text-secondary: #999999;
|
||||
--header-background-color: #f2f2f2;
|
||||
--header-gradient-start: rgba(242, 242, 242, 0);
|
||||
--header-gradient-end: rgba(242, 242, 242, 0.7);
|
||||
--color-border: #5a8bb8;
|
||||
--color-alpha: #ff8c2f;
|
||||
--color-beta: #e7f2f8;
|
||||
--color-gamma: #336699;
|
||||
--color-gamma-opaque: rgba(51, 102, 153, 0.3);
|
||||
}
|
||||
|
||||
/* Dark Mode Overrides */
|
||||
.dark {
|
||||
--color-border: #383735;
|
||||
}
|
||||
|
||||
/*
|
||||
* Dynamic color variables for user overrides from GraphQL
|
||||
* These are set via JavaScript and override the theme defaults
|
||||
* Using :root with class for higher specificity to override theme classes
|
||||
*/
|
||||
:root.has-custom-header-text {
|
||||
--header-text-primary: var(--custom-header-text-primary);
|
||||
--color-header-text-primary: var(--custom-header-text-primary);
|
||||
}
|
||||
|
||||
:root.has-custom-header-meta {
|
||||
--header-text-secondary: var(--custom-header-text-secondary);
|
||||
--color-header-text-secondary: var(--custom-header-text-secondary);
|
||||
}
|
||||
|
||||
:root.has-custom-header-bg {
|
||||
--header-background-color: var(--custom-header-background-color);
|
||||
--color-header-background: var(--custom-header-background-color);
|
||||
--header-gradient-start: var(--custom-header-gradient-start);
|
||||
--header-gradient-end: var(--custom-header-gradient-end);
|
||||
--color-header-gradient-start: var(--custom-header-gradient-start);
|
||||
--color-header-gradient-end: var(--custom-header-gradient-end);
|
||||
}
|
||||
@@ -84,23 +84,23 @@
|
||||
--color-primary-900: #7c2d12;
|
||||
--color-primary-950: #431407;
|
||||
|
||||
/* Header colors */
|
||||
--color-header-text-primary: var(--header-text-primary);
|
||||
--color-header-text-secondary: var(--header-text-secondary);
|
||||
--color-header-background-color: var(--header-background-color);
|
||||
/* Header colors - defaults will be overridden by theme */
|
||||
--color-header-text-primary: var(--header-text-primary, #1c1c1c);
|
||||
--color-header-text-secondary: var(--header-text-secondary, #999999);
|
||||
--color-header-background: var(--header-background-color, #f2f2f2);
|
||||
|
||||
/* Legacy colors */
|
||||
--color-alpha: var(--color-alpha);
|
||||
--color-beta: var(--color-beta);
|
||||
--color-gamma: var(--color-gamma);
|
||||
--color-gamma-opaque: var(--color-gamma-opaque);
|
||||
--color-customgradient-start: var(--color-customgradient-start);
|
||||
--color-customgradient-end: var(--color-customgradient-end);
|
||||
/* Legacy colors - defaults (overridden by theme-variants.css) */
|
||||
--color-alpha: #ff8c2f;
|
||||
--color-beta: #f2f2f2;
|
||||
--color-gamma: #999999;
|
||||
--color-gamma-opaque: rgba(153, 153, 153, 0.5);
|
||||
--color-customgradient-start: rgba(242, 242, 242, 0);
|
||||
--color-customgradient-end: rgba(242, 242, 242, 0.85);
|
||||
|
||||
/* Gradients */
|
||||
--color-header-gradient-start: var(--header-gradient-start);
|
||||
--color-header-gradient-end: var(--header-gradient-end);
|
||||
--color-banner-gradient: var(--banner-gradient);
|
||||
/* Gradients - defaults (overridden by theme-variants.css) */
|
||||
--color-header-gradient-start: rgba(242, 242, 242, 0);
|
||||
--color-header-gradient-end: rgba(242, 242, 242, 0.85);
|
||||
--color-banner-gradient: none;
|
||||
|
||||
/* Font sizes */
|
||||
--font-10px: 10px;
|
||||
@@ -167,6 +167,27 @@
|
||||
--max-width-800px: 800px;
|
||||
--max-width-1024px: 1024px;
|
||||
|
||||
/* Container sizes adjusted for 10px base font size (1.6x scale) */
|
||||
--container-xs: 32rem;
|
||||
--container-sm: 38.4rem;
|
||||
--container-md: 44.8rem;
|
||||
--container-lg: 51.2rem;
|
||||
--container-xl: 57.6rem;
|
||||
--container-2xl: 67.2rem;
|
||||
--container-3xl: 76.8rem;
|
||||
--container-4xl: 89.6rem;
|
||||
--container-5xl: 102.4rem;
|
||||
--container-6xl: 115.2rem;
|
||||
--container-7xl: 128rem;
|
||||
|
||||
/* Extended width scale for max-w-* utilities */
|
||||
--width-5xl: 102.4rem;
|
||||
--width-6xl: 115.2rem;
|
||||
--width-7xl: 128rem;
|
||||
--width-8xl: 140.8rem;
|
||||
--width-9xl: 153.6rem;
|
||||
--width-10xl: 166.4rem;
|
||||
|
||||
/* Animations */
|
||||
--animate-mark-2: mark-2 1.5s ease infinite;
|
||||
--animate-mark-3: mark-3 1.5s ease infinite;
|
||||
|
||||
11
CLAUDE.md
11
CLAUDE.md
@@ -7,7 +7,7 @@ This file provides guidance to Claude Code (claude.ai/code) when working with co
|
||||
This is the Unraid API monorepo containing multiple packages that provide API functionality for Unraid servers. It uses pnpm workspaces with the following structure:
|
||||
|
||||
- `/api` - Core NestJS API server with GraphQL
|
||||
- `/web` - Nuxt.js frontend application
|
||||
- `/web` - Vue 3 frontend application
|
||||
- `/unraid-ui` - Vue 3 component library
|
||||
- `/plugin` - Unraid plugin package (.plg)
|
||||
- `/packages` - Shared packages and API plugins
|
||||
@@ -128,9 +128,6 @@ Enables GraphQL playground at `http://tower.local/graphql`
|
||||
- **Use Mocks Correctly**: Mocks should be used as nouns, not verbs.
|
||||
|
||||
#### Vue Component Testing
|
||||
|
||||
- This is a Nuxt.js app but we are testing with vitest outside of the Nuxt environment
|
||||
- Nuxt is currently set to auto import so some vue files may need compute or ref imported
|
||||
- Use pnpm when running terminal commands and stay within the web directory
|
||||
- Tests are located under `web/__test__`, run with `pnpm test`
|
||||
- Use `mount` from Vue Test Utils for component testing
|
||||
@@ -156,4 +153,8 @@ Enables GraphQL playground at `http://tower.local/graphql`
|
||||
## Development Memories
|
||||
|
||||
- We are using tailwind v4 we do not need a tailwind config anymore
|
||||
- always search the internet for tailwind v4 documentation when making tailwind related style changes
|
||||
- always search the internet for tailwind v4 documentation when making tailwind related style changes
|
||||
- never run or restart the API server or web server. I will handle the lifecycle, simply wait and ask me to do this for you
|
||||
- Never use the `any` type. Always prefer proper typing
|
||||
- Avoid using casting whenever possible, prefer proper typing from the start
|
||||
- **IMPORTANT:** cache-manager v7 expects TTL values in **milliseconds**, not seconds. Always use milliseconds when setting cache TTL (e.g., 600000 for 10 minutes, not 600)
|
||||
|
||||
@@ -18,6 +18,7 @@ PATHS_LOG_BASE=./dev/log # Where we store logs
|
||||
PATHS_LOGS_FILE=./dev/log/graphql-api.log
|
||||
PATHS_CONNECT_STATUS_FILE_PATH=./dev/connectStatus.json # Connect plugin status file
|
||||
PATHS_OIDC_JSON=./dev/configs/oidc.local.json
|
||||
PATHS_LOCAL_SESSION_FILE=./dev/local-session
|
||||
ENVIRONMENT="development"
|
||||
NODE_ENV="development"
|
||||
PORT="3001"
|
||||
@@ -30,3 +31,4 @@ BYPASS_CORS_CHECKS=true
|
||||
CHOKIDAR_USEPOLLING=true
|
||||
LOG_TRANSPORT=console
|
||||
LOG_LEVEL=trace
|
||||
ENABLE_NEXT_DOCKER_RELEASE=true
|
||||
|
||||
@@ -14,5 +14,6 @@ PATHS_CONFIG_MODULES=./dev/configs
|
||||
PATHS_ACTIVATION_BASE=./dev/activation
|
||||
PATHS_PASSWD=./dev/passwd
|
||||
PATHS_LOGS_FILE=./dev/log/graphql-api.log
|
||||
PATHS_LOCAL_SESSION_FILE=./dev/local-session
|
||||
PORT=5000
|
||||
NODE_ENV="test"
|
||||
|
||||
5
api/.gitignore
vendored
5
api/.gitignore
vendored
@@ -88,6 +88,11 @@ dev/connectStatus.json
|
||||
dev/configs/*
|
||||
# local status - doesn't need to be tracked
|
||||
dev/connectStatus.json
|
||||
# mock local session file
|
||||
dev/local-session
|
||||
|
||||
# local OIDC config for testing - contains secrets
|
||||
dev/configs/oidc.local.json
|
||||
|
||||
# local api keys
|
||||
dev/keys/*
|
||||
|
||||
198
api/CHANGELOG.md
198
api/CHANGELOG.md
@@ -1,5 +1,203 @@
|
||||
# Changelog
|
||||
|
||||
## [4.22.2](https://github.com/unraid/api/compare/v4.22.1...v4.22.2) (2025-09-15)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **deps:** pin dependency conventional-changelog-conventionalcommits to 9.1.0 ([#1697](https://github.com/unraid/api/issues/1697)) ([9a86c61](https://github.com/unraid/api/commit/9a86c615da2e975f568922fa012cc29b3f9cde0e))
|
||||
* **deps:** update dependency filenamify to v7 ([#1703](https://github.com/unraid/api/issues/1703)) ([b80988a](https://github.com/unraid/api/commit/b80988aaabebc4b8dbf2bf31f0764bf2f28e1575))
|
||||
* **deps:** update graphqlcodegenerator monorepo (major) ([#1689](https://github.com/unraid/api/issues/1689)) ([ba4a43a](https://github.com/unraid/api/commit/ba4a43aec863fc30c47dd17370d74daed7f84703))
|
||||
* false positive on verify_install script being external shell ([#1704](https://github.com/unraid/api/issues/1704)) ([31a255c](https://github.com/unraid/api/commit/31a255c9281b29df983d0f5d0475cd5a69790a48))
|
||||
* improve vue mount speed by 10x ([c855caa](https://github.com/unraid/api/commit/c855caa9b2d4d63bead1a992f5c583e00b9ba843))
|
||||
|
||||
## [4.22.1](https://github.com/unraid/api/compare/v4.22.0...v4.22.1) (2025-09-12)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* set input color in SSO field rather than inside of the main.css ([01d353f](https://github.com/unraid/api/commit/01d353fa08a3df688b37a495a204605138f7f71d))
|
||||
|
||||
## [4.22.0](https://github.com/unraid/api/compare/v4.21.0...v4.22.0) (2025-09-12)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* improved update ui ([#1691](https://github.com/unraid/api/issues/1691)) ([a59b363](https://github.com/unraid/api/commit/a59b363ebc1e660f854c55d50fc02c823c2fd0cc))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **deps:** update dependency camelcase-keys to v10 ([#1687](https://github.com/unraid/api/issues/1687)) ([95faeaa](https://github.com/unraid/api/commit/95faeaa2f39bf7bd16502698d7530aaa590b286d))
|
||||
* **deps:** update dependency p-retry to v7 ([#1608](https://github.com/unraid/api/issues/1608)) ([c782cf0](https://github.com/unraid/api/commit/c782cf0e8710c6690050376feefda3edb30dd549))
|
||||
* **deps:** update dependency uuid to v13 ([#1688](https://github.com/unraid/api/issues/1688)) ([2fef10c](https://github.com/unraid/api/commit/2fef10c94aae910e95d9f5bcacf7289e2cca6ed9))
|
||||
* **deps:** update dependency vue-sonner to v2 ([#1475](https://github.com/unraid/api/issues/1475)) ([f95ca9c](https://github.com/unraid/api/commit/f95ca9c9cb69725dcf3bb4bcbd0b558a2074e311))
|
||||
* display settings fix for languages on less than 7.2-beta.2.3 ([#1696](https://github.com/unraid/api/issues/1696)) ([03dae7c](https://github.com/unraid/api/commit/03dae7ce66b3409593eeee90cd5b56e2a920ca44))
|
||||
* hide reset help option when sso is being checked ([#1695](https://github.com/unraid/api/issues/1695)) ([222ced7](https://github.com/unraid/api/commit/222ced7518d40c207198a3b8548f0e024bc865b0))
|
||||
* progressFrame white on black ([0990b89](https://github.com/unraid/api/commit/0990b898bd02c231153157c20d5142e5fd4513cd))
|
||||
|
||||
## [4.21.0](https://github.com/unraid/api/compare/v4.20.4...v4.21.0) (2025-09-10)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add zsh shell detection to install script ([#1539](https://github.com/unraid/api/issues/1539)) ([50ea2a3](https://github.com/unraid/api/commit/50ea2a3ffb82b30152fb85e0fb9b0d178d596efe))
|
||||
* **api:** determine if docker container has update ([#1582](https://github.com/unraid/api/issues/1582)) ([e57d81e](https://github.com/unraid/api/commit/e57d81e0735772758bb85e0b3c89dce15c56635e))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* white on white login text ([ae4d3ec](https://github.com/unraid/api/commit/ae4d3ecbc417454ae3c6e02018f8e4c49bbfc902))
|
||||
|
||||
## [4.20.4](https://github.com/unraid/api/compare/v4.20.3...v4.20.4) (2025-09-09)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* staging PR plugin fixes + UI issues on 7.2 beta ([b79b44e](https://github.com/unraid/api/commit/b79b44e95c65a124313814ab55b0d0a745a799c7))
|
||||
|
||||
## [4.20.3](https://github.com/unraid/api/compare/v4.20.2...v4.20.3) (2025-09-09)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* header background color issues fixed on 7.2 - thanks Nick! ([73c1100](https://github.com/unraid/api/commit/73c1100d0ba396fe4342f8ce7561017ab821e68b))
|
||||
|
||||
## [4.20.2](https://github.com/unraid/api/compare/v4.20.1...v4.20.2) (2025-09-09)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* trigger deployment ([a27453f](https://github.com/unraid/api/commit/a27453fda81e4eeb07f257e60516bebbbc27cf7a))
|
||||
|
||||
## [4.20.1](https://github.com/unraid/api/compare/v4.20.0...v4.20.1) (2025-09-09)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* adjust header styles to fix flashing and width issues - thanks ZarZ ([4759b3d](https://github.com/unraid/api/commit/4759b3d0b3fb6bc71636f75f807cd6f4f62305d1))
|
||||
|
||||
## [4.20.0](https://github.com/unraid/api/compare/v4.19.1...v4.20.0) (2025-09-08)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **disks:** add isSpinning field to Disk type ([#1527](https://github.com/unraid/api/issues/1527)) ([193be3d](https://github.com/unraid/api/commit/193be3df3672514be9904e3d4fbdff776470afc0))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* better component loading to prevent per-page strange behavior ([095c222](https://github.com/unraid/api/commit/095c2221c94f144f8ad410a69362b15803765531))
|
||||
* **deps:** pin dependencies ([#1669](https://github.com/unraid/api/issues/1669)) ([413db4b](https://github.com/unraid/api/commit/413db4bd30a06aa69d3ca86e793782854f822589))
|
||||
* **plugin:** add fallback for unraid-api stop in deprecation cleanup ([#1668](https://github.com/unraid/api/issues/1668)) ([797bf50](https://github.com/unraid/api/commit/797bf50ec702ebc8244ff71a8ef1a80ea5cd2169))
|
||||
* prepend 'v' to API version in workflow dispatch inputs ([f0cffbd](https://github.com/unraid/api/commit/f0cffbdc7ac36e7037ab60fe9dddbb2cab4a5e10))
|
||||
* progress frame background color fix ([#1672](https://github.com/unraid/api/issues/1672)) ([785f1f5](https://github.com/unraid/api/commit/785f1f5eb1a1cc8b41f6eb502e4092d149cfbd80))
|
||||
* properly override header values ([#1673](https://github.com/unraid/api/issues/1673)) ([aecf70f](https://github.com/unraid/api/commit/aecf70ffad60c83074347d3d6ec23f73acbd1aee))
|
||||
|
||||
## [4.19.1](https://github.com/unraid/api/compare/v4.19.0...v4.19.1) (2025-09-05)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* custom path detection to fix setup issues ([#1664](https://github.com/unraid/api/issues/1664)) ([2ecdb99](https://github.com/unraid/api/commit/2ecdb99052f39d89af21bbe7ad3f80b83bb1eaa9))
|
||||
|
||||
## [4.19.0](https://github.com/unraid/api/compare/v4.18.2...v4.19.0) (2025-09-04)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* mount vue apps, not web components ([#1639](https://github.com/unraid/api/issues/1639)) ([88087d5](https://github.com/unraid/api/commit/88087d5201992298cdafa791d5d1b5bb23dcd72b))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* api version json response ([#1653](https://github.com/unraid/api/issues/1653)) ([292bc0f](https://github.com/unraid/api/commit/292bc0fc810a0d0f0cce6813b0631ff25099cc05))
|
||||
* enhance DOM validation and cleanup in vue-mount-app ([6cf7c88](https://github.com/unraid/api/commit/6cf7c88242f2f4fe9f83871560039767b5b90273))
|
||||
* enhance getKeyFile function to handle missing key file gracefully ([#1659](https://github.com/unraid/api/issues/1659)) ([728b38a](https://github.com/unraid/api/commit/728b38ac11faeacd39ce9d0157024ad140e29b36))
|
||||
* info alert docker icon ([#1661](https://github.com/unraid/api/issues/1661)) ([239cdd6](https://github.com/unraid/api/commit/239cdd6133690699348e61f68e485d2b54fdcbdb))
|
||||
* oidc cache busting issues fixed ([#1656](https://github.com/unraid/api/issues/1656)) ([e204eb8](https://github.com/unraid/api/commit/e204eb80a00ab9242e3dca4ccfc3e1b55a7694b7))
|
||||
* **plugin:** restore cleanup behavior for unsupported unraid versions ([#1658](https://github.com/unraid/api/issues/1658)) ([534a077](https://github.com/unraid/api/commit/534a07788b76de49e9ba14059a9aed0bf16e02ca))
|
||||
* UnraidToaster component and update dialog close button ([#1657](https://github.com/unraid/api/issues/1657)) ([44774d0](https://github.com/unraid/api/commit/44774d0acdd25aa33cb60a5d0b4f80777f4068e5))
|
||||
* vue mounting logic with tests ([#1651](https://github.com/unraid/api/issues/1651)) ([33774aa](https://github.com/unraid/api/commit/33774aa596124a031a7452b62ca4c43743a09951))
|
||||
|
||||
## [4.18.2](https://github.com/unraid/api/compare/v4.18.1...v4.18.2) (2025-09-03)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* add missing CPU guest metrics to CPU responses ([#1644](https://github.com/unraid/api/issues/1644)) ([99dbad5](https://github.com/unraid/api/commit/99dbad57d55a256f5f3f850f9a47a6eaa6348065))
|
||||
* **plugin:** raise minimum unraid os version to 6.12.15 ([#1649](https://github.com/unraid/api/issues/1649)) ([bc15bd3](https://github.com/unraid/api/commit/bc15bd3d7008acb416ac3c6fb1f4724c685ec7e7))
|
||||
* update GitHub Actions token for workflow trigger ([4d8588b](https://github.com/unraid/api/commit/4d8588b17331afa45ba8caf84fcec8c0ea03591f))
|
||||
* update OIDC URL validation and add tests ([#1646](https://github.com/unraid/api/issues/1646)) ([c7c3bb5](https://github.com/unraid/api/commit/c7c3bb57ea482633a7acff064b39fbc8d4e07213))
|
||||
* use shared bg & border color for styled toasts ([#1647](https://github.com/unraid/api/issues/1647)) ([7c3aee8](https://github.com/unraid/api/commit/7c3aee8f3f9ba82ae8c8ed3840c20ab47f3cb00f))
|
||||
|
||||
## [4.18.1](https://github.com/unraid/api/compare/v4.18.0...v4.18.1) (2025-09-03)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* OIDC and API Key management issues ([#1642](https://github.com/unraid/api/issues/1642)) ([0fe2c2c](https://github.com/unraid/api/commit/0fe2c2c1c85dcc547e4b1217a3b5636d7dd6d4b4))
|
||||
* rm redundant emission to `$HOME/.pm2/logs` ([#1640](https://github.com/unraid/api/issues/1640)) ([a8e4119](https://github.com/unraid/api/commit/a8e4119270868a1dabccd405853a7340f8dcd8a5))
|
||||
|
||||
## [4.18.0](https://github.com/unraid/api/compare/v4.17.0...v4.18.0) (2025-09-02)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **api:** enhance OIDC redirect URI handling in service and tests ([#1618](https://github.com/unraid/api/issues/1618)) ([4e945f5](https://github.com/unraid/api/commit/4e945f5f56ce059eb275a9576caf3194a5df8a90))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* api key creation cli ([#1637](https://github.com/unraid/api/issues/1637)) ([c147a6b](https://github.com/unraid/api/commit/c147a6b5075969e77798210c4a5cfd1fa5b96ae3))
|
||||
* **cli:** support `--log-level` for `start` and `restart` cmds ([#1623](https://github.com/unraid/api/issues/1623)) ([a1ee915](https://github.com/unraid/api/commit/a1ee915ca52e5a063eccf8facbada911a63f37f6))
|
||||
* confusing server -> status query ([#1635](https://github.com/unraid/api/issues/1635)) ([9d42b36](https://github.com/unraid/api/commit/9d42b36f74274cad72490da5152fdb98fdc5b89b))
|
||||
* use unraid css variables in sonner ([#1634](https://github.com/unraid/api/issues/1634)) ([26a95af](https://github.com/unraid/api/commit/26a95af9539d05a837112d62dc6b7dd46761c83f))
|
||||
|
||||
## [4.17.0](https://github.com/unraid/api/compare/v4.16.0...v4.17.0) (2025-08-27)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add tailwind class sort plugin ([#1562](https://github.com/unraid/api/issues/1562)) ([ab11e7f](https://github.com/unraid/api/commit/ab11e7ff7ff74da1f1cd5e49938459d00bfc846b))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* cleanup obsoleted legacy api keys on api startup (cli / connect) ([#1630](https://github.com/unraid/api/issues/1630)) ([6469d00](https://github.com/unraid/api/commit/6469d002b7b18e49c77ee650a4255974ab43e790))
|
||||
|
||||
## [4.16.0](https://github.com/unraid/api/compare/v4.15.1...v4.16.0) (2025-08-27)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add `parityCheckStatus` field to `array` query ([#1611](https://github.com/unraid/api/issues/1611)) ([c508366](https://github.com/unraid/api/commit/c508366702b9fa20d9ed05559fe73da282116aa6))
|
||||
* generated UI API key management + OAuth-like API Key Flows ([#1609](https://github.com/unraid/api/issues/1609)) ([674323f](https://github.com/unraid/api/commit/674323fd87bbcc55932e6b28f6433a2de79b7ab0))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **connect:** clear `wanport` upon disabling remote access ([#1624](https://github.com/unraid/api/issues/1624)) ([9df6a3f](https://github.com/unraid/api/commit/9df6a3f5ebb0319aa7e3fe3be6159d39ec6f587f))
|
||||
* **connect:** valid LAN FQDN while remote access is enabled ([#1625](https://github.com/unraid/api/issues/1625)) ([aa58888](https://github.com/unraid/api/commit/aa588883cc2e2fe4aa4aea1d035236c888638f5b))
|
||||
* correctly parse periods in share names from ini file ([#1629](https://github.com/unraid/api/issues/1629)) ([7d67a40](https://github.com/unraid/api/commit/7d67a404333a38d6e1ba5c3febf02be8b1b71901))
|
||||
* **rc.unraid-api:** remove profile sourcing ([#1622](https://github.com/unraid/api/issues/1622)) ([6947b5d](https://github.com/unraid/api/commit/6947b5d4aff70319116eb65cf4c639444f3749e9))
|
||||
* remove unused api key calls ([#1628](https://github.com/unraid/api/issues/1628)) ([9cd0d6a](https://github.com/unraid/api/commit/9cd0d6ac658475efa25683ef6e3f2e1d68f7e903))
|
||||
* retry VMs init for up to 2 min ([#1612](https://github.com/unraid/api/issues/1612)) ([b2e7801](https://github.com/unraid/api/commit/b2e78012384e6b3f2630341281fc811026be23b9))
|
||||
|
||||
## [4.15.1](https://github.com/unraid/api/compare/v4.15.0...v4.15.1) (2025-08-20)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* minor duplicate click handler and version resolver nullability issue ([ac198d5](https://github.com/unraid/api/commit/ac198d5d1a3073fdeb053c2ff8f704b0dba0d047))
|
||||
|
||||
## [4.15.0](https://github.com/unraid/api/compare/v4.14.0...v4.15.0) (2025-08-20)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **api:** restructure versioning information in GraphQL schema ([#1600](https://github.com/unraid/api/issues/1600)) ([d0c6602](https://github.com/unraid/api/commit/d0c66020e1d1d5b6fcbc4ee8979bba4b3d34c7ad))
|
||||
|
||||
## [4.14.0](https://github.com/unraid/api/compare/v4.13.1...v4.14.0) (2025-08-19)
|
||||
|
||||
|
||||
|
||||
@@ -17,6 +17,7 @@ const config: CodegenConfig = {
|
||||
URL: 'URL',
|
||||
Port: 'number',
|
||||
UUID: 'string',
|
||||
BigInt: 'number',
|
||||
},
|
||||
scalarSchemas: {
|
||||
URL: 'z.instanceof(URL)',
|
||||
@@ -24,6 +25,7 @@ const config: CodegenConfig = {
|
||||
JSON: 'z.record(z.string(), z.any())',
|
||||
Port: 'z.number()',
|
||||
UUID: 'z.string()',
|
||||
BigInt: 'z.number()',
|
||||
},
|
||||
},
|
||||
generates: {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"version": "4.13.1",
|
||||
"version": "4.22.2",
|
||||
"extraOrigins": [],
|
||||
"sandbox": true,
|
||||
"ssoSubIds": [],
|
||||
|
||||
@@ -17,5 +17,6 @@
|
||||
],
|
||||
"buttonText": "Login With Unraid.net"
|
||||
}
|
||||
]
|
||||
],
|
||||
"defaultAllowedOrigins": []
|
||||
}
|
||||
@@ -1,11 +0,0 @@
|
||||
{
|
||||
"createdAt": "2025-01-27T16:22:56.501Z",
|
||||
"description": "API key for Connect user",
|
||||
"id": "b5b4aa3d-8e40-4c92-bc40-d50182071886",
|
||||
"key": "_______________________LOCAL_API_KEY_HERE_________________________",
|
||||
"name": "Connect",
|
||||
"permissions": [],
|
||||
"roles": [
|
||||
"CONNECT"
|
||||
]
|
||||
}
|
||||
@@ -1,11 +0,0 @@
|
||||
{
|
||||
"createdAt": "2025-07-23T17:34:06.301Z",
|
||||
"description": "Internal admin API key used by CLI commands for system operations",
|
||||
"id": "fc91da7b-0284-46f4-9018-55aa9759fba9",
|
||||
"key": "_______SUPER_SECRET_KEY_______",
|
||||
"name": "CliInternal",
|
||||
"permissions": [],
|
||||
"roles": [
|
||||
"ADMIN"
|
||||
]
|
||||
}
|
||||
@@ -65,4 +65,38 @@ color="yellow-on"
|
||||
size="0"
|
||||
free="9091184"
|
||||
used="32831348"
|
||||
luksStatus="0"
|
||||
["system.with.periods"]
|
||||
name="system.with.periods"
|
||||
nameOrig="system.with.periods"
|
||||
comment="system data with periods"
|
||||
allocator="highwater"
|
||||
splitLevel="1"
|
||||
floor="0"
|
||||
include=""
|
||||
exclude=""
|
||||
useCache="prefer"
|
||||
cachePool="cache"
|
||||
cow="auto"
|
||||
color="yellow-on"
|
||||
size="0"
|
||||
free="9091184"
|
||||
used="32831348"
|
||||
luksStatus="0"
|
||||
["system.with.🚀"]
|
||||
name="system.with.🚀"
|
||||
nameOrig="system.with.🚀"
|
||||
comment="system data with 🚀"
|
||||
allocator="highwater"
|
||||
splitLevel="1"
|
||||
floor="0"
|
||||
include=""
|
||||
exclude=""
|
||||
useCache="prefer"
|
||||
cachePool="cache"
|
||||
cow="auto"
|
||||
color="yellow-on"
|
||||
size="0"
|
||||
free="9091184"
|
||||
used="32831348"
|
||||
luksStatus="0"
|
||||
247
api/docs/developer/feature-flags.md
Normal file
247
api/docs/developer/feature-flags.md
Normal file
@@ -0,0 +1,247 @@
|
||||
# Feature Flags
|
||||
|
||||
Feature flags allow you to conditionally enable or disable functionality in the Unraid API. This is useful for gradually rolling out new features, A/B testing, or keeping experimental code behind flags during development.
|
||||
|
||||
## Setting Up Feature Flags
|
||||
|
||||
### 1. Define the Feature Flag
|
||||
|
||||
Feature flags are defined as environment variables and collected in `src/consts.ts`:
|
||||
|
||||
```typescript
|
||||
// src/environment.ts
|
||||
export const ENABLE_MY_NEW_FEATURE = process.env.ENABLE_MY_NEW_FEATURE === 'true';
|
||||
|
||||
// src/consts.ts
|
||||
export const FeatureFlags = Object.freeze({
|
||||
ENABLE_NEXT_DOCKER_RELEASE,
|
||||
ENABLE_MY_NEW_FEATURE, // Add your new flag here
|
||||
});
|
||||
```
|
||||
|
||||
### 2. Set the Environment Variable
|
||||
|
||||
Set the environment variable when running the API:
|
||||
|
||||
```bash
|
||||
ENABLE_MY_NEW_FEATURE=true unraid-api start
|
||||
```
|
||||
|
||||
Or add it to your `.env` file:
|
||||
|
||||
```env
|
||||
ENABLE_MY_NEW_FEATURE=true
|
||||
```
|
||||
|
||||
## Using Feature Flags in GraphQL
|
||||
|
||||
### Method 1: @UseFeatureFlag Decorator (Schema-Level)
|
||||
|
||||
The `@UseFeatureFlag` decorator conditionally includes or excludes GraphQL fields, queries, and mutations from the schema based on feature flags. When a feature flag is disabled, the field won't appear in the GraphQL schema at all.
|
||||
|
||||
```typescript
|
||||
import { UseFeatureFlag } from '@app/unraid-api/decorators/use-feature-flag.decorator.js';
|
||||
import { Query, Mutation, ResolveField } from '@nestjs/graphql';
|
||||
|
||||
@Resolver()
|
||||
export class MyResolver {
|
||||
|
||||
// Conditionally include a query
|
||||
@UseFeatureFlag('ENABLE_MY_NEW_FEATURE')
|
||||
@Query(() => String)
|
||||
async experimentalQuery() {
|
||||
return 'This query only exists when ENABLE_MY_NEW_FEATURE is true';
|
||||
}
|
||||
|
||||
// Conditionally include a mutation
|
||||
@UseFeatureFlag('ENABLE_MY_NEW_FEATURE')
|
||||
@Mutation(() => Boolean)
|
||||
async experimentalMutation() {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Conditionally include a field resolver
|
||||
@UseFeatureFlag('ENABLE_MY_NEW_FEATURE')
|
||||
@ResolveField(() => String)
|
||||
async experimentalField() {
|
||||
return 'This field only exists when the flag is enabled';
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Benefits:**
|
||||
- Clean schema - disabled features don't appear in GraphQL introspection
|
||||
- No runtime overhead for disabled features
|
||||
- Clear feature boundaries
|
||||
|
||||
**Use when:**
|
||||
- You want to completely hide features from the GraphQL schema
|
||||
- The feature is experimental or in beta
|
||||
- You're doing a gradual rollout
|
||||
|
||||
### Method 2: checkFeatureFlag Function (Runtime)
|
||||
|
||||
The `checkFeatureFlag` function provides runtime feature flag checking within resolver methods. It throws a `ForbiddenException` if the feature is disabled.
|
||||
|
||||
```typescript
|
||||
import { checkFeatureFlag } from '@app/unraid-api/utils/feature-flag.helper.js';
|
||||
import { FeatureFlags } from '@app/consts.js';
|
||||
import { Query, ResolveField } from '@nestjs/graphql';
|
||||
|
||||
@Resolver()
|
||||
export class MyResolver {
|
||||
|
||||
@Query(() => String)
|
||||
async myQuery(
|
||||
@Args('useNewAlgorithm', { nullable: true }) useNewAlgorithm?: boolean
|
||||
) {
|
||||
// Conditionally use new logic based on feature flag
|
||||
if (useNewAlgorithm) {
|
||||
checkFeatureFlag(FeatureFlags, 'ENABLE_MY_NEW_FEATURE');
|
||||
return this.newAlgorithm();
|
||||
}
|
||||
|
||||
return this.oldAlgorithm();
|
||||
}
|
||||
|
||||
@ResolveField(() => String)
|
||||
async dataField() {
|
||||
// Check flag at the start of the method
|
||||
checkFeatureFlag(FeatureFlags, 'ENABLE_MY_NEW_FEATURE');
|
||||
|
||||
// Feature-specific logic here
|
||||
return this.computeExperimentalData();
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Benefits:**
|
||||
- More granular control within methods
|
||||
- Can conditionally execute parts of a method
|
||||
- Useful for A/B testing scenarios
|
||||
- Good for gradual migration strategies
|
||||
|
||||
**Use when:**
|
||||
- You need conditional logic within a method
|
||||
- The field should exist but behavior changes based on the flag
|
||||
- You're migrating from old to new implementation gradually
|
||||
|
||||
## Feature Flag Patterns
|
||||
|
||||
### Pattern 1: Complete Feature Toggle
|
||||
|
||||
Hide an entire feature behind a flag:
|
||||
|
||||
```typescript
|
||||
@UseFeatureFlag('ENABLE_DOCKER_TEMPLATES')
|
||||
@Resolver(() => DockerTemplate)
|
||||
export class DockerTemplateResolver {
|
||||
// All resolvers in this class are toggled by the flag
|
||||
}
|
||||
```
|
||||
|
||||
### Pattern 2: Gradual Migration
|
||||
|
||||
Migrate from old to new implementation:
|
||||
|
||||
```typescript
|
||||
@Query(() => [Container])
|
||||
async getContainers(@Args('version') version?: string) {
|
||||
if (version === 'v2') {
|
||||
checkFeatureFlag(FeatureFlags, 'ENABLE_CONTAINERS_V2');
|
||||
return this.getContainersV2();
|
||||
}
|
||||
|
||||
return this.getContainersV1();
|
||||
}
|
||||
```
|
||||
|
||||
### Pattern 3: Beta Features
|
||||
|
||||
Mark features as beta:
|
||||
|
||||
```typescript
|
||||
@UseFeatureFlag('ENABLE_BETA_FEATURES')
|
||||
@ResolveField(() => BetaMetrics, {
|
||||
description: 'BETA: Advanced metrics (requires ENABLE_BETA_FEATURES flag)'
|
||||
})
|
||||
async betaMetrics() {
|
||||
return this.computeBetaMetrics();
|
||||
}
|
||||
```
|
||||
|
||||
### Pattern 4: Performance Optimizations
|
||||
|
||||
Toggle expensive operations:
|
||||
|
||||
```typescript
|
||||
@ResolveField(() => Statistics)
|
||||
async statistics() {
|
||||
const basicStats = await this.getBasicStats();
|
||||
|
||||
try {
|
||||
checkFeatureFlag(FeatureFlags, 'ENABLE_ADVANCED_ANALYTICS');
|
||||
const advancedStats = await this.getAdvancedStats();
|
||||
return { ...basicStats, ...advancedStats };
|
||||
} catch {
|
||||
// Feature disabled, return only basic stats
|
||||
return basicStats;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Testing with Feature Flags
|
||||
|
||||
When writing tests for feature-flagged code, create a mock to control feature flag values:
|
||||
|
||||
```typescript
|
||||
import { vi } from 'vitest';
|
||||
|
||||
// Mock the entire consts module
|
||||
vi.mock('@app/consts.js', async () => {
|
||||
const actual = await vi.importActual('@app/consts.js');
|
||||
return {
|
||||
...actual,
|
||||
FeatureFlags: {
|
||||
ENABLE_MY_NEW_FEATURE: true, // Set your test value
|
||||
ENABLE_NEXT_DOCKER_RELEASE: false,
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
describe('MyResolver', () => {
|
||||
it('should execute new logic when feature is enabled', async () => {
|
||||
// Test new behavior with mocked flag
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Naming Convention**: Use `ENABLE_` prefix for boolean feature flags
|
||||
2. **Environment Variables**: Always use uppercase with underscores
|
||||
3. **Documentation**: Document what each feature flag controls
|
||||
4. **Cleanup**: Remove feature flags once features are stable and fully rolled out
|
||||
5. **Default State**: New features should default to `false` (disabled)
|
||||
6. **Granularity**: Keep feature flags focused on a single feature or capability
|
||||
7. **Testing**: Always test both enabled and disabled states
|
||||
|
||||
## Common Use Cases
|
||||
|
||||
- **Experimental Features**: Hide unstable features in production
|
||||
- **Gradual Rollouts**: Enable features for specific environments first
|
||||
- **A/B Testing**: Toggle between different implementations
|
||||
- **Performance**: Disable expensive operations when not needed
|
||||
- **Breaking Changes**: Provide migration path with both old and new behavior
|
||||
- **Debug Features**: Enable additional logging or debugging tools
|
||||
|
||||
## Checking Active Feature Flags
|
||||
|
||||
To see which feature flags are currently active:
|
||||
|
||||
```typescript
|
||||
// Log all feature flags on startup
|
||||
console.log('Active Feature Flags:', FeatureFlags);
|
||||
```
|
||||
|
||||
Or check via GraphQL introspection to see which fields are available based on current flags.
|
||||
100
api/docs/public/api-key-app-developer-authorization-flow.md
Normal file
100
api/docs/public/api-key-app-developer-authorization-flow.md
Normal file
@@ -0,0 +1,100 @@
|
||||
# API Key Authorization Flow
|
||||
|
||||
This document describes the self-service API key creation flow for third-party applications.
|
||||
|
||||
## Overview
|
||||
|
||||
Applications can request API access to an Unraid server by redirecting users to a special authorization page where users can review requested permissions and create an API key with one click.
|
||||
|
||||
## Flow
|
||||
|
||||
1. **Application initiates request**: The app redirects the user to:
|
||||
|
||||
```
|
||||
https://[unraid-server]/ApiKeyAuthorize?name=MyApp&scopes=docker:read,vm:*&redirect_uri=https://myapp.com/callback&state=abc123
|
||||
```
|
||||
|
||||
2. **User authentication**: If not already logged in, the user is redirected to login first (standard Unraid auth)
|
||||
|
||||
3. **Consent screen**: User sees:
|
||||
- Application name and description
|
||||
- Requested permissions (with checkboxes to approve/deny specific scopes)
|
||||
- API key name field (pre-filled)
|
||||
- Authorize & Cancel buttons
|
||||
|
||||
4. **API key creation**: Upon authorization:
|
||||
- API key is created with approved scopes
|
||||
- Key is displayed to the user
|
||||
- If `redirect_uri` is provided, user is redirected back with the key
|
||||
|
||||
5. **Callback**: App receives the API key:
|
||||
```
|
||||
https://myapp.com/callback?api_key=xxx&state=abc123
|
||||
```
|
||||
|
||||
## Query Parameters
|
||||
|
||||
- `name` (required): Name of the requesting application
|
||||
- `description` (optional): Description of the application
|
||||
- `scopes` (required): Comma-separated list of requested scopes
|
||||
- `redirect_uri` (optional): URL to redirect after authorization
|
||||
- `state` (optional): Opaque value for maintaining state
|
||||
|
||||
## Scope Format
|
||||
|
||||
Scopes follow the pattern: `resource:action`
|
||||
|
||||
### Examples:
|
||||
|
||||
- `docker:read` - Read access to Docker
|
||||
- `vm:*` - Full access to VMs
|
||||
- `system:update` - Update access to system
|
||||
- `role:viewer` - Viewer role access
|
||||
- `role:admin` - Admin role access
|
||||
|
||||
### Available Resources:
|
||||
|
||||
- `docker`, `vm`, `system`, `share`, `user`, `network`, `disk`, etc.
|
||||
|
||||
### Available Actions:
|
||||
|
||||
- `create`, `read`, `update`, `delete` or `*` for all
|
||||
|
||||
## Security Considerations
|
||||
|
||||
1. **HTTPS required**: Redirect URIs must use HTTPS (except localhost for development)
|
||||
2. **User consent**: Users explicitly approve each permission
|
||||
3. **Session-based**: Uses existing Unraid authentication session
|
||||
4. **One-time display**: API keys are shown once and must be saved securely
|
||||
|
||||
## Example Integration
|
||||
|
||||
```javascript
|
||||
// JavaScript example
|
||||
const unraidServer = 'tower.local';
|
||||
const appName = 'My Docker Manager';
|
||||
const scopes = 'docker:*,system:read';
|
||||
const redirectUri = 'https://myapp.com/unraid/callback';
|
||||
const state = generateRandomState();
|
||||
|
||||
// Store state for verification
|
||||
sessionStorage.setItem('oauth_state', state);
|
||||
|
||||
// Redirect user to authorization page
|
||||
window.location.href =
|
||||
`https://${unraidServer}/ApiKeyAuthorize?` +
|
||||
`name=${encodeURIComponent(appName)}&` +
|
||||
`scopes=${encodeURIComponent(scopes)}&` +
|
||||
`redirect_uri=${encodeURIComponent(redirectUri)}&` +
|
||||
`state=${encodeURIComponent(state)}`;
|
||||
|
||||
// Handle callback
|
||||
const urlParams = new URLSearchParams(window.location.search);
|
||||
const apiKey = urlParams.get('api_key');
|
||||
const returnedState = urlParams.get('state');
|
||||
|
||||
if (returnedState === sessionStorage.getItem('oauth_state')) {
|
||||
// Save API key securely
|
||||
saveApiKey(apiKey);
|
||||
}
|
||||
```
|
||||
@@ -21,7 +21,14 @@ unraid-api start [--log-level <level>]
|
||||
Starts the Unraid API service.
|
||||
|
||||
Options:
|
||||
- `--log-level`: Set logging level (trace|debug|info|warn|error)
|
||||
|
||||
- `--log-level`: Set logging level (trace|debug|info|warn|error|fatal)
|
||||
|
||||
Alternative: You can also set the log level using the `LOG_LEVEL` environment variable:
|
||||
|
||||
```bash
|
||||
LOG_LEVEL=trace unraid-api start
|
||||
```
|
||||
|
||||
### Stop
|
||||
|
||||
@@ -36,11 +43,21 @@ Stops the Unraid API service.
|
||||
### Restart
|
||||
|
||||
```bash
|
||||
unraid-api restart
|
||||
unraid-api restart [--log-level <level>]
|
||||
```
|
||||
|
||||
Restarts the Unraid API service.
|
||||
|
||||
Options:
|
||||
|
||||
- `--log-level`: Set logging level (trace|debug|info|warn|error|fatal)
|
||||
|
||||
Alternative: You can also set the log level using the `LOG_LEVEL` environment variable:
|
||||
|
||||
```bash
|
||||
LOG_LEVEL=trace unraid-api restart
|
||||
```
|
||||
|
||||
### Logs
|
||||
|
||||
```bash
|
||||
|
||||
@@ -7,32 +7,34 @@ sidebar_position: 1
|
||||
# Welcome to Unraid API
|
||||
|
||||
:::tip[What's New]
|
||||
Native integration in Unraid v7.2+ brings the API directly into the OS - no plugin needed!
|
||||
Starting with Unraid OS v7.2, the API comes built into the operating system - no plugin installation required!
|
||||
:::
|
||||
|
||||
The Unraid API provides a GraphQL interface for programmatic interaction with your Unraid server. It enables automation, monitoring, and integration capabilities.
|
||||
|
||||
## 📦 Availability
|
||||
|
||||
### ✨ Native Integration (Unraid v7.2-beta.1+)
|
||||
### ✨ Native Integration (Unraid OS v7.2+)
|
||||
|
||||
Starting with Unraid v7.2-beta.1, the API is integrated directly into the Unraid operating system:
|
||||
Starting with Unraid OS v7.2, the API is integrated directly into the operating system:
|
||||
|
||||
- No plugin installation required
|
||||
- Automatically available on system startup
|
||||
- Deep system integration
|
||||
- Access through **Settings** → **Management Access** → **API**
|
||||
|
||||
### 🔌 Plugin Installation (Earlier Versions)
|
||||
### 🔌 Plugin Installation (Pre-7.2 and Advanced Users)
|
||||
|
||||
For Unraid versions prior to v7.2:
|
||||
For Unraid versions prior to v7.2 or to access newer API features:
|
||||
|
||||
1. Install Unraid Connect Plugin from Apps
|
||||
1. Install the Unraid Connect Plugin from Community Apps
|
||||
2. [Configure the plugin](./how-to-use-the-api.md#enabling-the-graphql-sandbox)
|
||||
3. Access API functionality through the [GraphQL Sandbox](./how-to-use-the-api.md)
|
||||
|
||||
:::tip Pre-release Versions
|
||||
You can install the Unraid Connect plugin on any version to access pre-release versions of the API and get early access to new features before they're included in Unraid OS releases.
|
||||
:::info Important Notes
|
||||
- The Unraid Connect plugin provides the API for pre-7.2 versions
|
||||
- You do NOT need to sign in to Unraid Connect to use the API locally
|
||||
- Installing the plugin on 7.2+ gives you access to newer API features before they're included in OS releases
|
||||
:::
|
||||
|
||||
## 📚 Documentation Sections
|
||||
@@ -69,20 +71,22 @@ The API provides:
|
||||
## 🚀 Get Started
|
||||
|
||||
<tabs>
|
||||
<tabItem value="v72" label="Unraid v7.2+" default>
|
||||
<tabItem value="v72" label="Unraid OS v7.2+" default>
|
||||
|
||||
1. Access the API settings at **Settings** → **Management Access** → **API**
|
||||
2. Enable the GraphQL Sandbox for development
|
||||
3. Create your first API key
|
||||
4. Start making GraphQL queries!
|
||||
1. The API is already installed and running
|
||||
2. Access settings at **Settings** → **Management Access** → **API**
|
||||
3. Enable the GraphQL Sandbox for development
|
||||
4. Create your first API key
|
||||
5. Start making GraphQL queries!
|
||||
|
||||
</tabItem>
|
||||
<tabItem value="older" label="Earlier Versions">
|
||||
<tabItem value="older" label="Pre-7.2 Versions">
|
||||
|
||||
1. Install the Unraid Connect plugin from Apps
|
||||
2. Configure the plugin settings
|
||||
3. Enable the GraphQL Sandbox
|
||||
4. Start exploring the API!
|
||||
1. Install the Unraid Connect plugin from Community Apps
|
||||
2. No Unraid Connect login required for local API access
|
||||
3. Configure the plugin settings
|
||||
4. Enable the GraphQL Sandbox
|
||||
5. Start exploring the API!
|
||||
|
||||
</tabItem>
|
||||
</tabs>
|
||||
|
||||
252
api/docs/public/programmatic-api-key-management.md
Normal file
252
api/docs/public/programmatic-api-key-management.md
Normal file
@@ -0,0 +1,252 @@
|
||||
---
|
||||
title: Programmatic API Key Management
|
||||
description: Create, use, and delete API keys programmatically for automated workflows
|
||||
sidebar_position: 4
|
||||
---
|
||||
|
||||
# Programmatic API Key Management
|
||||
|
||||
This guide explains how to create, use, and delete API keys programmatically using the Unraid API CLI, enabling automated workflows and scripts.
|
||||
|
||||
## Overview
|
||||
|
||||
The `unraid-api apikey` command supports both interactive and non-interactive modes, making it suitable for:
|
||||
|
||||
- Automated deployment scripts
|
||||
- CI/CD pipelines
|
||||
- Temporary access provisioning
|
||||
- Infrastructure as code workflows
|
||||
|
||||
:::tip[Quick Start]
|
||||
Jump to the [Complete Workflow Example](#complete-workflow-example) to see everything in action.
|
||||
:::
|
||||
|
||||
## Creating API Keys Programmatically
|
||||
|
||||
### Basic Creation with JSON Output
|
||||
|
||||
Use the `--json` flag to get machine-readable output:
|
||||
|
||||
```bash
|
||||
unraid-api apikey --create --name "workflow key" --roles ADMIN --json
|
||||
```
|
||||
|
||||
**Output:**
|
||||
|
||||
```json
|
||||
{
|
||||
"key": "your-generated-api-key-here",
|
||||
"name": "workflow key",
|
||||
"id": "generated-uuid"
|
||||
}
|
||||
```
|
||||
|
||||
### Advanced Creation with Permissions
|
||||
|
||||
```bash
|
||||
unraid-api apikey --create \
|
||||
--name "limited access key" \
|
||||
--permissions "DOCKER:READ_ANY,ARRAY:READ_ANY" \
|
||||
--description "Read-only access for monitoring" \
|
||||
--json
|
||||
```
|
||||
|
||||
### Handling Existing Keys
|
||||
|
||||
If a key with the same name exists, use `--overwrite`:
|
||||
|
||||
```bash
|
||||
unraid-api apikey --create --name "existing key" --roles ADMIN --overwrite --json
|
||||
```
|
||||
|
||||
:::warning[Key Replacement]
|
||||
The `--overwrite` flag will permanently replace the existing key. The old key will be immediately invalidated.
|
||||
:::
|
||||
|
||||
## Deleting API Keys Programmatically
|
||||
|
||||
### Non-Interactive Deletion
|
||||
|
||||
Delete a key by name without prompts:
|
||||
|
||||
```bash
|
||||
unraid-api apikey --delete --name "workflow key"
|
||||
```
|
||||
|
||||
**Output:**
|
||||
|
||||
```
|
||||
Successfully deleted 1 API key
|
||||
```
|
||||
|
||||
### JSON Output for Deletion
|
||||
|
||||
Use `--json` flag for machine-readable delete confirmation:
|
||||
|
||||
```bash
|
||||
unraid-api apikey --delete --name "workflow key" --json
|
||||
```
|
||||
|
||||
**Success Output:**
|
||||
|
||||
```json
|
||||
{
|
||||
"deleted": 1,
|
||||
"keys": [
|
||||
{
|
||||
"id": "generated-uuid",
|
||||
"name": "workflow key"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
**Error Output:**
|
||||
|
||||
```json
|
||||
{
|
||||
"deleted": 0,
|
||||
"error": "No API key found with name: nonexistent key"
|
||||
}
|
||||
```
|
||||
|
||||
### Error Handling
|
||||
|
||||
When the specified key doesn't exist:
|
||||
|
||||
```bash
|
||||
unraid-api apikey --delete --name "nonexistent key"
|
||||
# Output: No API keys found to delete
|
||||
```
|
||||
|
||||
**JSON Error Output:**
|
||||
|
||||
```json
|
||||
{
|
||||
"deleted": 0,
|
||||
"message": "No API keys found to delete"
|
||||
}
|
||||
```
|
||||
|
||||
## Complete Workflow Example
|
||||
|
||||
Here's a complete example for temporary access provisioning:
|
||||
|
||||
```bash
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# 1. Create temporary API key
|
||||
echo "Creating temporary API key..."
|
||||
KEY_DATA=$(unraid-api apikey --create \
|
||||
--name "temp deployment key" \
|
||||
--roles ADMIN \
|
||||
--description "Temporary key for deployment $(date)" \
|
||||
--json)
|
||||
|
||||
# 2. Extract the API key
|
||||
API_KEY=$(echo "$KEY_DATA" | jq -r '.key')
|
||||
echo "API key created successfully"
|
||||
|
||||
# 3. Use the key for operations
|
||||
echo "Configuring services..."
|
||||
curl -H "Authorization: Bearer $API_KEY" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{"provider": "azure", "clientId": "your-client-id"}' \
|
||||
http://localhost:3001/graphql
|
||||
|
||||
# 4. Clean up (always runs, even on error)
|
||||
trap 'echo "Cleaning up..."; unraid-api apikey --delete --name "temp deployment key"' EXIT
|
||||
|
||||
echo "Deployment completed successfully"
|
||||
```
|
||||
|
||||
## Command Reference
|
||||
|
||||
### Create Command Options
|
||||
|
||||
| Flag | Description | Example |
|
||||
| ----------------------- | ----------------------- | --------------------------------- |
|
||||
| `--name <name>` | Key name (required) | `--name "my key"` |
|
||||
| `--roles <roles>` | Comma-separated roles | `--roles ADMIN,VIEWER` |
|
||||
| `--permissions <perms>` | Resource:action pairs | `--permissions "DOCKER:READ_ANY"` |
|
||||
| `--description <desc>` | Key description | `--description "CI/CD key"` |
|
||||
| `--overwrite` | Replace existing key | `--overwrite` |
|
||||
| `--json` | Machine-readable output | `--json` |
|
||||
|
||||
### Available Roles
|
||||
|
||||
- `ADMIN` - Full system access
|
||||
- `CONNECT` - Unraid Connect features
|
||||
- `VIEWER` - Read-only access
|
||||
- `GUEST` - Limited access
|
||||
|
||||
### Available Resources and Actions
|
||||
|
||||
**Resources:** `ACTIVATION_CODE`, `API_KEY`, `ARRAY`, `CLOUD`, `CONFIG`, `CONNECT`, `CONNECT__REMOTE_ACCESS`, `CUSTOMIZATIONS`, `DASHBOARD`, `DISK`, `DISPLAY`, `DOCKER`, `FLASH`, `INFO`, `LOGS`, `ME`, `NETWORK`, `NOTIFICATIONS`, `ONLINE`, `OS`, `OWNER`, `PERMISSION`, `REGISTRATION`, `SERVERS`, `SERVICES`, `SHARE`, `VARS`, `VMS`, `WELCOME`
|
||||
|
||||
**Actions:** `CREATE_ANY`, `CREATE_OWN`, `READ_ANY`, `READ_OWN`, `UPDATE_ANY`, `UPDATE_OWN`, `DELETE_ANY`, `DELETE_OWN`
|
||||
|
||||
### Delete Command Options
|
||||
|
||||
| Flag | Description | Example |
|
||||
| --------------- | ------------------------ | ----------------- |
|
||||
| `--delete` | Enable delete mode | `--delete` |
|
||||
| `--name <name>` | Key to delete (optional) | `--name "my key"` |
|
||||
|
||||
**Note:** If `--name` is omitted, the command runs interactively.
|
||||
|
||||
## Best Practices
|
||||
|
||||
:::info[Security Best Practices]
|
||||
**Minimal Permissions**
|
||||
|
||||
- Use specific permissions instead of ADMIN role when possible
|
||||
- Example: `--permissions "DOCKER:READ_ANY"` instead of `--roles ADMIN`
|
||||
|
||||
**Key Lifecycle Management**
|
||||
|
||||
- Always clean up temporary keys after use
|
||||
- Store API keys securely (environment variables, secrets management)
|
||||
- Use descriptive names and descriptions for audit trails
|
||||
:::
|
||||
|
||||
### Error Handling
|
||||
|
||||
- Check exit codes (`$?`) after each command
|
||||
- Use `set -e` in bash scripts to fail fast
|
||||
- Implement proper cleanup with `trap`
|
||||
|
||||
### Key Naming
|
||||
|
||||
- Use descriptive names that include purpose and date
|
||||
- Names must contain only letters, numbers, and spaces
|
||||
- Unicode letters are supported
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
:::note[Common Error Messages]
|
||||
|
||||
**"API key name must contain only letters, numbers, and spaces"**
|
||||
|
||||
- **Solution:** Remove special characters like hyphens, underscores, or symbols
|
||||
|
||||
**"API key with name 'x' already exists"**
|
||||
|
||||
- **Solution:** Use `--overwrite` flag or choose a different name
|
||||
|
||||
**"Please add at least one role or permission to the key"**
|
||||
|
||||
- **Solution:** Specify either `--roles` or `--permissions` (or both)
|
||||
|
||||
:::
|
||||
|
||||
### Debug Mode
|
||||
|
||||
For troubleshooting, run with debug logging:
|
||||
|
||||
```bash
|
||||
LOG_LEVEL=debug unraid-api apikey --create --name "debug key" --roles ADMIN
|
||||
```
|
||||
@@ -13,7 +13,9 @@
|
||||
"watch": false,
|
||||
"interpreter": "/usr/local/bin/node",
|
||||
"ignore_watch": ["node_modules", "src", ".env.*", "myservers.cfg"],
|
||||
"log_file": "/var/log/graphql-api.log",
|
||||
"out_file": "/var/log/graphql-api.log",
|
||||
"error_file": "/var/log/graphql-api.log",
|
||||
"merge_logs": true,
|
||||
"kill_timeout": 10000
|
||||
}
|
||||
]
|
||||
|
||||
@@ -4,16 +4,56 @@
|
||||
|
||||
"""Directive to document required permissions for fields"""
|
||||
directive @usePermissions(
|
||||
"""The action verb required for access"""
|
||||
action: AuthActionVerb
|
||||
"""The action required for access (must be a valid AuthAction enum value)"""
|
||||
action: String
|
||||
|
||||
"""The resource required for access"""
|
||||
"""The resource required for access (must be a valid Resource enum value)"""
|
||||
resource: String
|
||||
|
||||
"""The possession type required for access"""
|
||||
possession: AuthPossession
|
||||
) on FIELD_DEFINITION
|
||||
|
||||
type ParityCheck {
|
||||
"""Date of the parity check"""
|
||||
date: DateTime
|
||||
|
||||
"""Duration of the parity check in seconds"""
|
||||
duration: Int
|
||||
|
||||
"""Speed of the parity check, in MB/s"""
|
||||
speed: String
|
||||
|
||||
"""Status of the parity check"""
|
||||
status: ParityCheckStatus!
|
||||
|
||||
"""Number of errors during the parity check"""
|
||||
errors: Int
|
||||
|
||||
"""Progress percentage of the parity check"""
|
||||
progress: Int
|
||||
|
||||
"""Whether corrections are being written to parity"""
|
||||
correcting: Boolean
|
||||
|
||||
"""Whether the parity check is paused"""
|
||||
paused: Boolean
|
||||
|
||||
"""Whether the parity check is running"""
|
||||
running: Boolean
|
||||
}
|
||||
|
||||
"""
|
||||
A date-time string at UTC, such as 2019-12-03T09:54:33Z, compliant with the date-time format.
|
||||
"""
|
||||
scalar DateTime
|
||||
|
||||
enum ParityCheckStatus {
|
||||
NEVER_RUN
|
||||
RUNNING
|
||||
PAUSED
|
||||
COMPLETED
|
||||
CANCELLED
|
||||
FAILED
|
||||
}
|
||||
|
||||
type Capacity {
|
||||
"""Free capacity"""
|
||||
free: String!
|
||||
@@ -99,6 +139,9 @@ type ArrayDisk implements Node {
|
||||
"""ata | nvme | usb | (others)"""
|
||||
transport: String
|
||||
color: ArrayDiskFsColor
|
||||
|
||||
"""Whether the disk is currently spinning"""
|
||||
isSpinning: Boolean
|
||||
}
|
||||
|
||||
interface Node {
|
||||
@@ -156,6 +199,9 @@ type UnraidArray implements Node {
|
||||
"""Parity disks in the current array"""
|
||||
parities: [ArrayDisk!]!
|
||||
|
||||
"""Current parity check status"""
|
||||
parityCheckStatus: ParityCheck!
|
||||
|
||||
"""Data disks in the current array"""
|
||||
disks: [ArrayDisk!]!
|
||||
|
||||
@@ -303,6 +349,9 @@ type Disk implements Node {
|
||||
|
||||
"""The partitions on the disk"""
|
||||
partitions: [DiskPartition!]!
|
||||
|
||||
"""Whether the disk is spinning or not"""
|
||||
isSpinning: Boolean!
|
||||
}
|
||||
|
||||
"""The type of interface the disk uses to connect to the system"""
|
||||
@@ -569,7 +618,9 @@ enum ConfigErrorState {
|
||||
|
||||
type Permission {
|
||||
resource: Resource!
|
||||
actions: [String!]!
|
||||
|
||||
"""Actions allowed on this resource"""
|
||||
actions: [AuthAction!]!
|
||||
}
|
||||
|
||||
"""Available resources for permissions"""
|
||||
@@ -605,8 +656,36 @@ enum Resource {
|
||||
WELCOME
|
||||
}
|
||||
|
||||
"""Authentication actions with possession (e.g., create:any, read:own)"""
|
||||
enum AuthAction {
|
||||
"""Create any resource"""
|
||||
CREATE_ANY
|
||||
|
||||
"""Create own resource"""
|
||||
CREATE_OWN
|
||||
|
||||
"""Read any resource"""
|
||||
READ_ANY
|
||||
|
||||
"""Read own resource"""
|
||||
READ_OWN
|
||||
|
||||
"""Update any resource"""
|
||||
UPDATE_ANY
|
||||
|
||||
"""Update own resource"""
|
||||
UPDATE_OWN
|
||||
|
||||
"""Delete any resource"""
|
||||
DELETE_ANY
|
||||
|
||||
"""Delete own resource"""
|
||||
DELETE_OWN
|
||||
}
|
||||
|
||||
type ApiKey implements Node {
|
||||
id: PrefixedID!
|
||||
key: String!
|
||||
name: String!
|
||||
description: String
|
||||
roles: [Role!]!
|
||||
@@ -616,20 +695,90 @@ type ApiKey implements Node {
|
||||
|
||||
"""Available roles for API keys and users"""
|
||||
enum Role {
|
||||
"""Full administrative access to all resources"""
|
||||
ADMIN
|
||||
USER
|
||||
|
||||
"""Internal Role for Unraid Connect"""
|
||||
CONNECT
|
||||
|
||||
"""Basic read access to user profile only"""
|
||||
GUEST
|
||||
|
||||
"""Read-only access to all resources"""
|
||||
VIEWER
|
||||
}
|
||||
|
||||
type ApiKeyWithSecret implements Node {
|
||||
type SsoSettings implements Node {
|
||||
id: PrefixedID!
|
||||
name: String!
|
||||
description: String
|
||||
roles: [Role!]!
|
||||
createdAt: String!
|
||||
permissions: [Permission!]!
|
||||
key: String!
|
||||
|
||||
"""List of configured OIDC providers"""
|
||||
oidcProviders: [OidcProvider!]!
|
||||
}
|
||||
|
||||
type UnifiedSettings implements Node & FormSchema {
|
||||
id: PrefixedID!
|
||||
|
||||
"""The data schema for the settings"""
|
||||
dataSchema: JSON!
|
||||
|
||||
"""The UI schema for the settings"""
|
||||
uiSchema: JSON!
|
||||
|
||||
"""The current values of the settings"""
|
||||
values: JSON!
|
||||
}
|
||||
|
||||
interface FormSchema {
|
||||
"""The data schema for the form"""
|
||||
dataSchema: JSON!
|
||||
|
||||
"""The UI schema for the form"""
|
||||
uiSchema: JSON!
|
||||
|
||||
"""The current values of the form"""
|
||||
values: JSON!
|
||||
}
|
||||
|
||||
"""
|
||||
The `JSON` scalar type represents JSON values as specified by [ECMA-404](http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf).
|
||||
"""
|
||||
scalar JSON @specifiedBy(url: "http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf")
|
||||
|
||||
type ApiKeyFormSettings implements Node & FormSchema {
|
||||
id: PrefixedID!
|
||||
|
||||
"""The data schema for the API key form"""
|
||||
dataSchema: JSON!
|
||||
|
||||
"""The UI schema for the API key form"""
|
||||
uiSchema: JSON!
|
||||
|
||||
"""The current values of the API key form"""
|
||||
values: JSON!
|
||||
}
|
||||
|
||||
type UpdateSettingsResponse {
|
||||
"""Whether a restart is required for the changes to take effect"""
|
||||
restartRequired: Boolean!
|
||||
|
||||
"""The updated settings values"""
|
||||
values: JSON!
|
||||
|
||||
"""Warning messages about configuration issues found during validation"""
|
||||
warnings: [String!]
|
||||
}
|
||||
|
||||
type Settings implements Node {
|
||||
id: PrefixedID!
|
||||
|
||||
"""A view of all settings"""
|
||||
unified: UnifiedSettings!
|
||||
|
||||
"""SSO settings"""
|
||||
sso: SsoSettings!
|
||||
|
||||
"""The API setting values"""
|
||||
api: ApiConfig!
|
||||
}
|
||||
|
||||
type RCloneDrive {
|
||||
@@ -640,11 +789,6 @@ type RCloneDrive {
|
||||
options: JSON!
|
||||
}
|
||||
|
||||
"""
|
||||
The `JSON` scalar type represents JSON values as specified by [ECMA-404](http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf).
|
||||
"""
|
||||
scalar JSON @specifiedBy(url: "http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf")
|
||||
|
||||
type RCloneBackupConfigForm {
|
||||
id: ID!
|
||||
dataSchema: JSON!
|
||||
@@ -746,7 +890,7 @@ type VmMutations {
|
||||
"""API Key related mutations"""
|
||||
type ApiKeyMutations {
|
||||
"""Create an API key"""
|
||||
create(input: CreateApiKeyInput!): ApiKeyWithSecret!
|
||||
create(input: CreateApiKeyInput!): ApiKey!
|
||||
|
||||
"""Add a role to an API key"""
|
||||
addRole(input: AddRoleForApiKeyInput!): Boolean!
|
||||
@@ -758,7 +902,7 @@ type ApiKeyMutations {
|
||||
delete(input: DeleteApiKeyInput!): Boolean!
|
||||
|
||||
"""Update an API key"""
|
||||
update(input: UpdateApiKeyInput!): ApiKeyWithSecret!
|
||||
update(input: UpdateApiKeyInput!): ApiKey!
|
||||
}
|
||||
|
||||
input CreateApiKeyInput {
|
||||
@@ -775,7 +919,7 @@ input CreateApiKeyInput {
|
||||
|
||||
input AddPermissionInput {
|
||||
resource: Resource!
|
||||
actions: [String!]!
|
||||
actions: [AuthAction!]!
|
||||
}
|
||||
|
||||
input AddRoleForApiKeyInput {
|
||||
@@ -836,40 +980,6 @@ input DeleteRCloneRemoteInput {
|
||||
name: String!
|
||||
}
|
||||
|
||||
type ParityCheck {
|
||||
"""Date of the parity check"""
|
||||
date: DateTime
|
||||
|
||||
"""Duration of the parity check in seconds"""
|
||||
duration: Int
|
||||
|
||||
"""Speed of the parity check, in MB/s"""
|
||||
speed: String
|
||||
|
||||
"""Status of the parity check"""
|
||||
status: String
|
||||
|
||||
"""Number of errors during the parity check"""
|
||||
errors: Int
|
||||
|
||||
"""Progress percentage of the parity check"""
|
||||
progress: Int
|
||||
|
||||
"""Whether corrections are being written to parity"""
|
||||
correcting: Boolean
|
||||
|
||||
"""Whether the parity check is paused"""
|
||||
paused: Boolean
|
||||
|
||||
"""Whether the parity check is running"""
|
||||
running: Boolean
|
||||
}
|
||||
|
||||
"""
|
||||
A date-time string at UTC, such as 2019-12-03T09:54:33Z, compliant with the date-time format.
|
||||
"""
|
||||
scalar DateTime
|
||||
|
||||
type Config implements Node {
|
||||
id: PrefixedID!
|
||||
valid: Boolean
|
||||
@@ -940,6 +1050,19 @@ enum ThemeName {
|
||||
white
|
||||
}
|
||||
|
||||
type ExplicitStatusItem {
|
||||
name: String!
|
||||
updateStatus: UpdateStatus!
|
||||
}
|
||||
|
||||
"""Update status of a container."""
|
||||
enum UpdateStatus {
|
||||
UP_TO_DATE
|
||||
UPDATE_AVAILABLE
|
||||
REBUILD_READY
|
||||
UNKNOWN
|
||||
}
|
||||
|
||||
type ContainerPort {
|
||||
ip: String
|
||||
privatePort: Port
|
||||
@@ -970,8 +1093,8 @@ type DockerContainer implements Node {
|
||||
created: Int!
|
||||
ports: [ContainerPort!]!
|
||||
|
||||
"""Total size of all the files in the container"""
|
||||
sizeRootFs: Int
|
||||
"""Total size of all files in the container (in bytes)"""
|
||||
sizeRootFs: BigInt
|
||||
labels: JSON
|
||||
state: ContainerState!
|
||||
status: String!
|
||||
@@ -979,6 +1102,8 @@ type DockerContainer implements Node {
|
||||
networkSettings: JSON
|
||||
mounts: [JSON!]
|
||||
autoStart: Boolean!
|
||||
isUpdateAvailable: Boolean
|
||||
isRebuildReady: Boolean
|
||||
}
|
||||
|
||||
enum ContainerState {
|
||||
@@ -1009,6 +1134,7 @@ type Docker implements Node {
|
||||
containers(skipCache: Boolean! = false): [DockerContainer!]!
|
||||
networks(skipCache: Boolean! = false): [DockerNetwork!]!
|
||||
organizer: ResolvedOrganizerV1!
|
||||
containerUpdateStatuses: [ExplicitStatusItem!]!
|
||||
}
|
||||
|
||||
type ResolvedOrganizerView {
|
||||
@@ -1257,6 +1383,12 @@ type CpuLoad {
|
||||
|
||||
"""The percentage of time the CPU spent servicing hardware interrupts."""
|
||||
percentIrq: Float!
|
||||
|
||||
"""The percentage of time the CPU spent running virtual machines (guest)."""
|
||||
percentGuest: Float!
|
||||
|
||||
"""The percentage of CPU time stolen by the hypervisor."""
|
||||
percentSteal: Float!
|
||||
}
|
||||
|
||||
type CpuUtilization implements Node {
|
||||
@@ -1501,98 +1633,51 @@ type InfoBaseboard implements Node {
|
||||
memSlots: Float
|
||||
}
|
||||
|
||||
type InfoVersions implements Node {
|
||||
id: PrefixedID!
|
||||
type CoreVersions {
|
||||
"""Unraid version"""
|
||||
unraid: String
|
||||
|
||||
"""Unraid API version"""
|
||||
api: String
|
||||
|
||||
"""Kernel version"""
|
||||
kernel: String
|
||||
}
|
||||
|
||||
type PackageVersions {
|
||||
"""OpenSSL version"""
|
||||
openssl: String
|
||||
|
||||
"""System OpenSSL version"""
|
||||
systemOpenssl: String
|
||||
|
||||
"""Node.js version"""
|
||||
node: String
|
||||
|
||||
"""V8 engine version"""
|
||||
v8: String
|
||||
|
||||
"""npm version"""
|
||||
npm: String
|
||||
|
||||
"""Yarn version"""
|
||||
yarn: String
|
||||
|
||||
"""pm2 version"""
|
||||
pm2: String
|
||||
|
||||
"""Gulp version"""
|
||||
gulp: String
|
||||
|
||||
"""Grunt version"""
|
||||
grunt: String
|
||||
|
||||
"""Git version"""
|
||||
git: String
|
||||
|
||||
"""tsc version"""
|
||||
tsc: String
|
||||
|
||||
"""MySQL version"""
|
||||
mysql: String
|
||||
|
||||
"""Redis version"""
|
||||
redis: String
|
||||
|
||||
"""MongoDB version"""
|
||||
mongodb: String
|
||||
|
||||
"""Apache version"""
|
||||
apache: String
|
||||
|
||||
"""nginx version"""
|
||||
nginx: String
|
||||
|
||||
"""PHP version"""
|
||||
php: String
|
||||
|
||||
"""Postfix version"""
|
||||
postfix: String
|
||||
|
||||
"""PostgreSQL version"""
|
||||
postgresql: String
|
||||
|
||||
"""Perl version"""
|
||||
perl: String
|
||||
|
||||
"""Python version"""
|
||||
python: String
|
||||
|
||||
"""Python3 version"""
|
||||
python3: String
|
||||
|
||||
"""pip version"""
|
||||
pip: String
|
||||
|
||||
"""pip3 version"""
|
||||
pip3: String
|
||||
|
||||
"""Java version"""
|
||||
java: String
|
||||
|
||||
"""gcc version"""
|
||||
gcc: String
|
||||
|
||||
"""VirtualBox version"""
|
||||
virtualbox: String
|
||||
|
||||
"""Docker version"""
|
||||
docker: String
|
||||
}
|
||||
|
||||
"""Unraid version"""
|
||||
unraid: String
|
||||
type InfoVersions implements Node {
|
||||
id: PrefixedID!
|
||||
|
||||
"""Core system versions"""
|
||||
core: CoreVersions!
|
||||
|
||||
"""Software package versions"""
|
||||
packages: PackageVersions
|
||||
}
|
||||
|
||||
type Info implements Node {
|
||||
@@ -1741,6 +1826,8 @@ type Server implements Node {
|
||||
guid: String!
|
||||
apikey: String!
|
||||
name: String!
|
||||
|
||||
"""Whether this server is online or offline"""
|
||||
status: ServerStatus!
|
||||
wanip: String!
|
||||
lanip: String!
|
||||
@@ -1762,50 +1849,6 @@ type ApiConfig {
|
||||
plugins: [String!]!
|
||||
}
|
||||
|
||||
type SsoSettings implements Node {
|
||||
id: PrefixedID!
|
||||
|
||||
"""List of configured OIDC providers"""
|
||||
oidcProviders: [OidcProvider!]!
|
||||
}
|
||||
|
||||
type UnifiedSettings implements Node {
|
||||
id: PrefixedID!
|
||||
|
||||
"""The data schema for the settings"""
|
||||
dataSchema: JSON!
|
||||
|
||||
"""The UI schema for the settings"""
|
||||
uiSchema: JSON!
|
||||
|
||||
"""The current values of the settings"""
|
||||
values: JSON!
|
||||
}
|
||||
|
||||
type UpdateSettingsResponse {
|
||||
"""Whether a restart is required for the changes to take effect"""
|
||||
restartRequired: Boolean!
|
||||
|
||||
"""The updated settings values"""
|
||||
values: JSON!
|
||||
|
||||
"""Warning messages about configuration issues found during validation"""
|
||||
warnings: [String!]
|
||||
}
|
||||
|
||||
type Settings implements Node {
|
||||
id: PrefixedID!
|
||||
|
||||
"""A view of all settings"""
|
||||
unified: UnifiedSettings!
|
||||
|
||||
"""SSO settings"""
|
||||
sso: SsoSettings!
|
||||
|
||||
"""The API setting values"""
|
||||
api: ApiConfig!
|
||||
}
|
||||
|
||||
type OidcAuthorizationRule {
|
||||
"""The claim to check (e.g., email, sub, groups, hd)"""
|
||||
claim: String!
|
||||
@@ -1841,7 +1884,7 @@ type OidcProvider {
|
||||
"""
|
||||
OIDC issuer URL (e.g., https://accounts.google.com). Required for auto-discovery via /.well-known/openid-configuration
|
||||
"""
|
||||
issuer: String!
|
||||
issuer: String
|
||||
|
||||
"""
|
||||
OAuth2 authorization endpoint URL. If omitted, will be auto-discovered from issuer/.well-known/openid-configuration
|
||||
@@ -1894,6 +1937,16 @@ enum AuthorizationRuleMode {
|
||||
AND
|
||||
}
|
||||
|
||||
type OidcConfiguration {
|
||||
"""List of configured OIDC providers"""
|
||||
providers: [OidcProvider!]!
|
||||
|
||||
"""
|
||||
Default allowed redirect origins that apply to all OIDC providers (e.g., Tailscale domains)
|
||||
"""
|
||||
defaultAllowedOrigins: [String!]
|
||||
}
|
||||
|
||||
type OidcSessionValidation {
|
||||
valid: Boolean!
|
||||
username: String
|
||||
@@ -2278,10 +2331,22 @@ type Query {
|
||||
|
||||
"""All possible permissions for API keys"""
|
||||
apiKeyPossiblePermissions: [Permission!]!
|
||||
|
||||
"""Get the actual permissions that would be granted by a set of roles"""
|
||||
getPermissionsForRoles(roles: [Role!]!): [Permission!]!
|
||||
|
||||
"""
|
||||
Preview the effective permissions for a combination of roles and explicit permissions
|
||||
"""
|
||||
previewEffectivePermissions(roles: [Role!], permissions: [AddPermissionInput!]): [Permission!]!
|
||||
|
||||
"""Get all available authentication actions with possession"""
|
||||
getAvailableAuthActions: [AuthAction!]!
|
||||
|
||||
"""Get JSON Schema for API key creation form"""
|
||||
getApiKeyCreationFormSchema: ApiKeyFormSettings!
|
||||
config: Config!
|
||||
flash: Flash!
|
||||
logFiles: [LogFile!]!
|
||||
logFile(path: String!, lines: Int, startLine: Int): LogFileContent!
|
||||
me: UserAccount!
|
||||
|
||||
"""Get all notifications"""
|
||||
@@ -2308,6 +2373,8 @@ type Query {
|
||||
disk(id: PrefixedID!): Disk!
|
||||
rclone: RCloneBackupSettings!
|
||||
info: Info!
|
||||
logFiles: [LogFile!]!
|
||||
logFile(path: String!, lines: Int, startLine: Int): LogFileContent!
|
||||
settings: Settings!
|
||||
isSSOEnabled: Boolean!
|
||||
|
||||
@@ -2320,6 +2387,9 @@ type Query {
|
||||
"""Get a specific OIDC provider by ID"""
|
||||
oidcProvider(id: PrefixedID!): OidcProvider
|
||||
|
||||
"""Get the full OIDC configuration (admin only)"""
|
||||
oidcConfiguration: OidcConfiguration!
|
||||
|
||||
"""Validate an OIDC session token (internal use for CLI validation)"""
|
||||
validateOidcSession(token: String!): OidcSessionValidation!
|
||||
metrics: Metrics!
|
||||
@@ -2365,6 +2435,7 @@ type Mutation {
|
||||
setDockerFolderChildren(folderId: String, childrenIds: [String!]!): ResolvedOrganizerV1!
|
||||
deleteDockerEntries(entryIds: [String!]!): ResolvedOrganizerV1!
|
||||
moveDockerEntriesToFolder(sourceEntryIds: [String!]!, destinationFolderId: String!): ResolvedOrganizerV1!
|
||||
refreshDockerDigests: Boolean!
|
||||
|
||||
"""Initiates a flash drive backup using a configured remote."""
|
||||
initiateFlashBackup(input: InitiateFlashBackupInput!): FlashBackupStatus!
|
||||
@@ -2563,29 +2634,14 @@ input AccessUrlInput {
|
||||
}
|
||||
|
||||
type Subscription {
|
||||
logFile(path: String!): LogFileContent!
|
||||
notificationAdded: Notification!
|
||||
notificationsOverview: NotificationOverview!
|
||||
ownerSubscription: Owner!
|
||||
serversSubscription: Server!
|
||||
parityHistorySubscription: ParityCheck!
|
||||
arraySubscription: UnraidArray!
|
||||
logFile(path: String!): LogFileContent!
|
||||
systemMetricsCpu: CpuUtilization!
|
||||
systemMetricsMemory: MemoryUtilization!
|
||||
upsUpdates: UPSDevice!
|
||||
}
|
||||
|
||||
"""Available authentication action verbs"""
|
||||
enum AuthActionVerb {
|
||||
CREATE
|
||||
UPDATE
|
||||
DELETE
|
||||
READ
|
||||
}
|
||||
|
||||
"""Available authentication possession types"""
|
||||
enum AuthPossession {
|
||||
ANY
|
||||
OWN
|
||||
OWN_ANY
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@unraid/api",
|
||||
"version": "4.14.0",
|
||||
"version": "4.22.2",
|
||||
"main": "src/cli/index.ts",
|
||||
"type": "module",
|
||||
"corepack": {
|
||||
@@ -10,7 +10,7 @@
|
||||
"author": "Lime Technology, Inc. <unraid.net>",
|
||||
"license": "GPL-2.0-or-later",
|
||||
"engines": {
|
||||
"pnpm": "10.14.0"
|
||||
"pnpm": "10.15.0"
|
||||
},
|
||||
"scripts": {
|
||||
"// Development": "",
|
||||
@@ -51,12 +51,12 @@
|
||||
"unraid-api": "dist/cli.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"@apollo/client": "3.13.9",
|
||||
"@apollo/client": "3.14.0",
|
||||
"@apollo/server": "4.12.2",
|
||||
"@as-integrations/fastify": "2.1.1",
|
||||
"@fastify/cookie": "11.0.2",
|
||||
"@fastify/helmet": "13.0.1",
|
||||
"@graphql-codegen/client-preset": "4.8.3",
|
||||
"@graphql-codegen/client-preset": "5.0.0",
|
||||
"@graphql-tools/load-files": "7.0.1",
|
||||
"@graphql-tools/merge": "9.1.1",
|
||||
"@graphql-tools/schema": "10.0.25",
|
||||
@@ -82,9 +82,9 @@
|
||||
"atomically": "2.0.3",
|
||||
"bycontract": "2.0.11",
|
||||
"bytes": "3.1.2",
|
||||
"cache-manager": "7.1.1",
|
||||
"cache-manager": "7.2.0",
|
||||
"cacheable-lookup": "7.0.0",
|
||||
"camelcase-keys": "9.1.3",
|
||||
"camelcase-keys": "10.0.0",
|
||||
"casbin": "5.38.0",
|
||||
"change-case": "5.4.4",
|
||||
"chokidar": "4.0.3",
|
||||
@@ -94,15 +94,16 @@
|
||||
"command-exists": "1.2.9",
|
||||
"convert": "5.12.0",
|
||||
"cookie": "1.0.2",
|
||||
"cron": "4.3.3",
|
||||
"cron": "4.3.0",
|
||||
"cross-fetch": "4.1.0",
|
||||
"diff": "8.0.2",
|
||||
"dockerode": "4.0.7",
|
||||
"dotenv": "17.2.1",
|
||||
"escape-html": "1.0.3",
|
||||
"execa": "9.6.0",
|
||||
"exit-hook": "4.0.0",
|
||||
"fastify": "5.5.0",
|
||||
"filenamify": "6.0.0",
|
||||
"filenamify": "7.0.0",
|
||||
"fs-extra": "11.3.1",
|
||||
"glob": "11.0.3",
|
||||
"global-agent": "3.0.0",
|
||||
@@ -115,22 +116,22 @@
|
||||
"graphql-ws": "6.0.6",
|
||||
"ini": "5.0.0",
|
||||
"ip": "2.0.1",
|
||||
"jose": "6.0.12",
|
||||
"jose": "6.0.13",
|
||||
"json-bigint-patch": "0.0.8",
|
||||
"lodash-es": "4.17.21",
|
||||
"multi-ini": "2.3.2",
|
||||
"mustache": "4.2.0",
|
||||
"nest-authz": "2.17.0",
|
||||
"nest-commander": "3.18.0",
|
||||
"nest-commander": "3.19.0",
|
||||
"nestjs-pino": "4.4.0",
|
||||
"node-cache": "5.1.2",
|
||||
"node-window-polyfill": "1.0.4",
|
||||
"openid-client": "6.6.2",
|
||||
"p-retry": "6.2.1",
|
||||
"openid-client": "6.6.4",
|
||||
"p-retry": "7.0.0",
|
||||
"passport-custom": "1.1.1",
|
||||
"passport-http-header-strategy": "1.1.0",
|
||||
"path-type": "6.0.0",
|
||||
"pino": "9.8.0",
|
||||
"pino": "9.9.0",
|
||||
"pino-http": "10.5.0",
|
||||
"pino-pretty": "13.1.1",
|
||||
"pm2": "6.0.8",
|
||||
@@ -138,9 +139,9 @@
|
||||
"rxjs": "7.8.2",
|
||||
"semver": "7.7.2",
|
||||
"strftime": "0.10.3",
|
||||
"systeminformation": "5.27.7",
|
||||
"undici": "7.13.0",
|
||||
"uuid": "11.1.0",
|
||||
"systeminformation": "5.27.8",
|
||||
"undici": "7.15.0",
|
||||
"uuid": "13.0.0",
|
||||
"ws": "8.18.3",
|
||||
"zen-observable-ts": "1.1.0",
|
||||
"zod": "3.25.76"
|
||||
@@ -154,27 +155,27 @@
|
||||
}
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/js": "9.33.0",
|
||||
"@graphql-codegen/add": "5.0.3",
|
||||
"@graphql-codegen/cli": "5.0.7",
|
||||
"@graphql-codegen/fragment-matcher": "5.1.0",
|
||||
"@eslint/js": "9.34.0",
|
||||
"@graphql-codegen/add": "6.0.0",
|
||||
"@graphql-codegen/cli": "6.0.0",
|
||||
"@graphql-codegen/fragment-matcher": "6.0.0",
|
||||
"@graphql-codegen/import-types-preset": "3.0.1",
|
||||
"@graphql-codegen/typed-document-node": "5.1.2",
|
||||
"@graphql-codegen/typescript": "4.1.6",
|
||||
"@graphql-codegen/typescript-operations": "4.6.1",
|
||||
"@graphql-codegen/typescript-resolvers": "4.5.1",
|
||||
"@graphql-codegen/typed-document-node": "6.0.0",
|
||||
"@graphql-codegen/typescript": "5.0.0",
|
||||
"@graphql-codegen/typescript-operations": "5.0.0",
|
||||
"@graphql-codegen/typescript-resolvers": "5.0.0",
|
||||
"@graphql-typed-document-node/core": "3.2.0",
|
||||
"@ianvs/prettier-plugin-sort-imports": "4.6.1",
|
||||
"@ianvs/prettier-plugin-sort-imports": "4.6.3",
|
||||
"@nestjs/testing": "11.1.6",
|
||||
"@originjs/vite-plugin-commonjs": "1.0.3",
|
||||
"@rollup/plugin-node-resolve": "16.0.1",
|
||||
"@swc/core": "1.13.3",
|
||||
"@swc/core": "1.13.5",
|
||||
"@types/async-exit-hook": "2.0.2",
|
||||
"@types/bytes": "3.1.5",
|
||||
"@types/cli-table": "0.3.4",
|
||||
"@types/command-exists": "1.2.3",
|
||||
"@types/cors": "2.8.19",
|
||||
"@types/dockerode": "3.3.42",
|
||||
"@types/dockerode": "3.3.43",
|
||||
"@types/graphql-fields": "1.3.9",
|
||||
"@types/graphql-type-uuid": "0.2.6",
|
||||
"@types/ini": "4.1.1",
|
||||
@@ -182,7 +183,7 @@
|
||||
"@types/lodash": "4.17.20",
|
||||
"@types/lodash-es": "4.17.12",
|
||||
"@types/mustache": "4.2.6",
|
||||
"@types/node": "22.17.1",
|
||||
"@types/node": "22.18.0",
|
||||
"@types/pify": "6.1.0",
|
||||
"@types/semver": "7.7.0",
|
||||
"@types/sendmail": "1.4.7",
|
||||
@@ -191,28 +192,28 @@
|
||||
"@types/supertest": "6.0.3",
|
||||
"@types/uuid": "10.0.0",
|
||||
"@types/ws": "8.18.1",
|
||||
"@types/wtfnode": "0.7.3",
|
||||
"@types/wtfnode": "0.10.0",
|
||||
"@vitest/coverage-v8": "3.2.4",
|
||||
"@vitest/ui": "3.2.4",
|
||||
"eslint": "9.33.0",
|
||||
"eslint": "9.34.0",
|
||||
"eslint-plugin-import": "2.32.0",
|
||||
"eslint-plugin-no-relative-import-paths": "1.6.1",
|
||||
"eslint-plugin-prettier": "5.5.4",
|
||||
"jiti": "2.5.1",
|
||||
"nodemon": "3.1.10",
|
||||
"prettier": "3.6.2",
|
||||
"rollup-plugin-node-externals": "8.0.1",
|
||||
"rollup-plugin-node-externals": "8.1.0",
|
||||
"supertest": "7.1.4",
|
||||
"tsx": "4.20.3",
|
||||
"type-fest": "4.41.0",
|
||||
"tsx": "4.20.5",
|
||||
"type-fest": "5.0.0",
|
||||
"typescript": "5.9.2",
|
||||
"typescript-eslint": "8.39.1",
|
||||
"unplugin-swc": "1.5.5",
|
||||
"vite": "7.1.1",
|
||||
"typescript-eslint": "8.41.0",
|
||||
"unplugin-swc": "1.5.7",
|
||||
"vite": "7.1.3",
|
||||
"vite-plugin-node": "7.0.0",
|
||||
"vite-tsconfig-paths": "5.1.4",
|
||||
"vitest": "3.2.4",
|
||||
"zx": "8.8.0"
|
||||
"zx": "8.8.1"
|
||||
},
|
||||
"overrides": {
|
||||
"eslint": {
|
||||
@@ -227,5 +228,5 @@
|
||||
}
|
||||
},
|
||||
"private": true,
|
||||
"packageManager": "pnpm@10.14.0"
|
||||
"packageManager": "pnpm@10.15.0"
|
||||
}
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
import { expect, test } from 'vitest';
|
||||
import { expect, test, vi } from 'vitest';
|
||||
|
||||
import { store } from '@app/store/index.js';
|
||||
import { FileLoadStatus, StateFileKey } from '@app/store/types.js';
|
||||
|
||||
import '@app/core/utils/misc/get-key-file.js';
|
||||
import '@app/store/modules/emhttp.js';
|
||||
|
||||
vi.mock('fs/promises');
|
||||
|
||||
test('Before loading key returns null', async () => {
|
||||
const { getKeyFile } = await import('@app/core/utils/misc/get-key-file.js');
|
||||
const { status } = store.getState().registration;
|
||||
@@ -48,21 +49,70 @@ test('Returns empty key if key location is empty', async () => {
|
||||
await expect(getKeyFile()).resolves.toBe('');
|
||||
});
|
||||
|
||||
test(
|
||||
'Returns decoded key file if key location exists',
|
||||
async () => {
|
||||
const { getKeyFile } = await import('@app/core/utils/misc/get-key-file.js');
|
||||
const { loadStateFiles } = await import('@app/store/modules/emhttp.js');
|
||||
const { loadRegistrationKey } = await import('@app/store/modules/registration.js');
|
||||
// Load state files into store
|
||||
await store.dispatch(loadStateFiles());
|
||||
await store.dispatch(loadRegistrationKey());
|
||||
// Check if store has state files loaded
|
||||
const { status } = store.getState().registration;
|
||||
expect(status).toBe(FileLoadStatus.LOADED);
|
||||
await expect(getKeyFile()).resolves.toMatchInlineSnapshot(
|
||||
'"hVs1tLjvC9FiiQsIwIQ7G1KszAcexf0IneThhnmf22SB0dGs5WzRkqMiSMmt2DtR5HOXFUD32YyxuzGeUXmky3zKpSu6xhZNKVg5atGM1OfvkzHBMldI3SeBLuUFSgejLbpNUMdTrbk64JJdbzle4O8wiQgkIpAMIGxeYLwLBD4zHBcfyzq40QnxG--HcX6j25eE0xqa2zWj-j0b0rCAXahJV2a3ySCbPzr1MvfPRTVb0rr7KJ-25R592hYrz4H7Sc1B3p0lr6QUxHE6o7bcYrWKDRtIVoZ8SMPpd1_0gzYIcl5GsDFzFumTXUh8NEnl0Q8hwW1YE-tRc6Y_rrvd7w"'
|
||||
);
|
||||
},
|
||||
{ timeout: 10000 }
|
||||
);
|
||||
test('Returns empty string when key file does not exist (ENOENT)', async () => {
|
||||
const { readFile } = await import('fs/promises');
|
||||
|
||||
// Mock readFile to throw ENOENT error
|
||||
const readFileMock = vi.mocked(readFile);
|
||||
readFileMock.mockRejectedValueOnce(
|
||||
Object.assign(new Error('ENOENT: no such file or directory'), { code: 'ENOENT' })
|
||||
);
|
||||
|
||||
// Clear the module cache and re-import to get fresh module with mock
|
||||
vi.resetModules();
|
||||
const { getKeyFile } = await import('@app/core/utils/misc/get-key-file.js');
|
||||
const { updateEmhttpState } = await import('@app/store/modules/emhttp.js');
|
||||
const { store: freshStore } = await import('@app/store/index.js');
|
||||
|
||||
// Set key file location to a non-existent file
|
||||
freshStore.dispatch(
|
||||
updateEmhttpState({
|
||||
field: StateFileKey.var,
|
||||
state: {
|
||||
regFile: '/boot/config/Pro.key',
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
// Should return empty string when file doesn't exist
|
||||
await expect(getKeyFile()).resolves.toBe('');
|
||||
|
||||
// Clear mock
|
||||
readFileMock.mockReset();
|
||||
vi.resetModules();
|
||||
});
|
||||
|
||||
test('Returns decoded key file if key location exists', async () => {
|
||||
const { readFile } = await import('fs/promises');
|
||||
|
||||
// Mock a valid key file content
|
||||
const mockKeyContent =
|
||||
'hVs1tLjvC9FiiQsIwIQ7G1KszAcexf0IneThhnmf22SB0dGs5WzRkqMiSMmt2DtR5HOXFUD32YyxuzGeUXmky3zKpSu6xhZNKVg5atGM1OfvkzHBMldI3SeBLuUFSgejLbpNUMdTrbk64JJdbzle4O8wiQgkIpAMIGxeYLwLBD4zHBcfyzq40QnxG--HcX6j25eE0xqa2zWj-j0b0rCAXahJV2a3ySCbPzr1MvfPRTVb0rr7KJ-25R592hYrz4H7Sc1B3p0lr6QUxHE6o7bcYrWKDRtIVoZ8SMPpd1_0gzYIcl5GsDFzFumTXUh8NEnl0Q8hwW1YE-tRc6Y_rrvd7w==';
|
||||
const binaryContent = Buffer.from(mockKeyContent, 'base64').toString('binary');
|
||||
|
||||
const readFileMock = vi.mocked(readFile);
|
||||
readFileMock.mockResolvedValue(binaryContent);
|
||||
|
||||
// Clear the module cache and re-import to get fresh module with mock
|
||||
vi.resetModules();
|
||||
const { getKeyFile } = await import('@app/core/utils/misc/get-key-file.js');
|
||||
const { loadStateFiles } = await import('@app/store/modules/emhttp.js');
|
||||
const { loadRegistrationKey } = await import('@app/store/modules/registration.js');
|
||||
const { store: freshStore } = await import('@app/store/index.js');
|
||||
|
||||
// Load state files into store
|
||||
await freshStore.dispatch(loadStateFiles());
|
||||
await freshStore.dispatch(loadRegistrationKey());
|
||||
// Check if store has state files loaded
|
||||
const { status } = freshStore.getState().registration;
|
||||
expect(status).toBe(FileLoadStatus.LOADED);
|
||||
|
||||
const result = await getKeyFile();
|
||||
expect(result).toBe(
|
||||
'hVs1tLjvC9FiiQsIwIQ7G1KszAcexf0IneThhnmf22SB0dGs5WzRkqMiSMmt2DtR5HOXFUD32YyxuzGeUXmky3zKpSu6xhZNKVg5atGM1OfvkzHBMldI3SeBLuUFSgejLbpNUMdTrbk64JJdbzle4O8wiQgkIpAMIGxeYLwLBD4zHBcfyzq40QnxG--HcX6j25eE0xqa2zWj-j0b0rCAXahJV2a3ySCbPzr1MvfPRTVb0rr7KJ-25R592hYrz4H7Sc1B3p0lr6QUxHE6o7bcYrWKDRtIVoZ8SMPpd1_0gzYIcl5GsDFzFumTXUh8NEnl0Q8hwW1YE-tRc6Y_rrvd7w'
|
||||
);
|
||||
|
||||
// Clear mock
|
||||
readFileMock.mockReset();
|
||||
vi.resetModules();
|
||||
}, 10000);
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import { existsSync } from 'node:fs';
|
||||
import { homedir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
|
||||
import { execa } from 'execa';
|
||||
import pm2 from 'pm2';
|
||||
import { afterAll, afterEach, beforeAll, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
import { afterAll, afterEach, beforeAll, describe, expect, it } from 'vitest';
|
||||
|
||||
import { isUnraidApiRunning } from '@app/core/utils/pm2/unraid-api-running.js';
|
||||
|
||||
@@ -17,11 +18,6 @@ const TEST_PROCESS_NAME = 'test-unraid-api';
|
||||
// Shared PM2 connection state
|
||||
let pm2Connected = false;
|
||||
|
||||
// Helper function to run CLI command (assumes CLI is built)
|
||||
async function runCliCommand(command: string, options: any = {}) {
|
||||
return await execa('node', [CLI_PATH, command], options);
|
||||
}
|
||||
|
||||
// Helper to ensure PM2 connection is established
|
||||
async function ensurePM2Connection() {
|
||||
if (pm2Connected) return;
|
||||
@@ -57,7 +53,7 @@ async function deleteTestProcesses() {
|
||||
}
|
||||
|
||||
const processName = processNames[deletedCount];
|
||||
pm2.delete(processName, (deleteErr) => {
|
||||
pm2.delete(processName, () => {
|
||||
// Ignore errors, process might not exist
|
||||
deletedCount++;
|
||||
deleteNext();
|
||||
@@ -92,7 +88,7 @@ async function cleanupAllPM2Processes() {
|
||||
}
|
||||
|
||||
// Kill the daemon to ensure fresh state
|
||||
pm2.killDaemon((killErr) => {
|
||||
pm2.killDaemon(() => {
|
||||
pm2.disconnect();
|
||||
pm2Connected = false;
|
||||
// Small delay to let PM2 fully shutdown
|
||||
@@ -104,6 +100,9 @@ async function cleanupAllPM2Processes() {
|
||||
|
||||
describe.skipIf(!!process.env.CI)('PM2 integration tests', () => {
|
||||
beforeAll(async () => {
|
||||
// Set PM2_HOME to use home directory for testing (not /var/log)
|
||||
process.env.PM2_HOME = join(homedir(), '.pm2');
|
||||
|
||||
// Build the CLI if it doesn't exist (only for CLI tests)
|
||||
if (!existsSync(CLI_PATH)) {
|
||||
console.log('Building CLI for integration tests...');
|
||||
@@ -198,6 +197,13 @@ describe.skipIf(!!process.env.CI)('PM2 integration tests', () => {
|
||||
}, 30000);
|
||||
|
||||
it('should handle PM2 connection errors gracefully', async () => {
|
||||
// Disconnect PM2 first to ensure we're testing fresh connection
|
||||
await new Promise<void>((resolve) => {
|
||||
pm2.disconnect();
|
||||
pm2Connected = false;
|
||||
setTimeout(resolve, 100);
|
||||
});
|
||||
|
||||
// Set an invalid PM2_HOME to force connection failure
|
||||
const originalPM2Home = process.env.PM2_HOME;
|
||||
process.env.PM2_HOME = '/invalid/path/that/does/not/exist';
|
||||
|
||||
@@ -95,6 +95,48 @@ test('Returns both disk and user shares', async () => {
|
||||
"type": "user",
|
||||
"used": 33619300,
|
||||
},
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "system data with periods",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"id": "system.with.periods",
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "system.with.periods",
|
||||
"nameOrig": "system.with.periods",
|
||||
"nfs": {},
|
||||
"size": 0,
|
||||
"smb": {},
|
||||
"splitLevel": "1",
|
||||
"type": "user",
|
||||
"used": 33619300,
|
||||
},
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "system data with 🚀",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"id": "system.with.🚀",
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "system.with.🚀",
|
||||
"nameOrig": "system.with.🚀",
|
||||
"nfs": {},
|
||||
"size": 0,
|
||||
"smb": {},
|
||||
"splitLevel": "1",
|
||||
"type": "user",
|
||||
"used": 33619300,
|
||||
},
|
||||
],
|
||||
}
|
||||
`);
|
||||
@@ -211,6 +253,48 @@ test('Returns shares by type', async () => {
|
||||
"type": "user",
|
||||
"used": 33619300,
|
||||
},
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "system data with periods",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"id": "system.with.periods",
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "system.with.periods",
|
||||
"nameOrig": "system.with.periods",
|
||||
"nfs": {},
|
||||
"size": 0,
|
||||
"smb": {},
|
||||
"splitLevel": "1",
|
||||
"type": "user",
|
||||
"used": 33619300,
|
||||
},
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "system data with 🚀",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"id": "system.with.🚀",
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "system.with.🚀",
|
||||
"nameOrig": "system.with.🚀",
|
||||
"nfs": {},
|
||||
"size": 0,
|
||||
"smb": {},
|
||||
"splitLevel": "1",
|
||||
"type": "user",
|
||||
"used": 33619300,
|
||||
},
|
||||
]
|
||||
`);
|
||||
expect(getShares('disk')).toMatchInlineSnapshot('null');
|
||||
|
||||
@@ -12,7 +12,22 @@ import {
|
||||
UpdateRCloneRemoteDto,
|
||||
} from '@app/unraid-api/graph/resolvers/rclone/rclone.model.js';
|
||||
|
||||
vi.mock('got');
|
||||
vi.mock('got', () => {
|
||||
const mockPost = vi.fn();
|
||||
const gotMock = {
|
||||
post: mockPost,
|
||||
};
|
||||
return {
|
||||
default: gotMock,
|
||||
HTTPError: class HTTPError extends Error {
|
||||
response?: any;
|
||||
constructor(response?: any) {
|
||||
super('HTTP Error');
|
||||
this.response = response;
|
||||
}
|
||||
},
|
||||
};
|
||||
});
|
||||
vi.mock('execa');
|
||||
vi.mock('p-retry');
|
||||
vi.mock('node:fs', () => ({
|
||||
@@ -60,7 +75,7 @@ vi.mock('@nestjs/common', async (importOriginal) => {
|
||||
|
||||
describe('RCloneApiService', () => {
|
||||
let service: RCloneApiService;
|
||||
let mockGot: any;
|
||||
let mockGotPost: any;
|
||||
let mockExeca: any;
|
||||
let mockPRetry: any;
|
||||
let mockExistsSync: any;
|
||||
@@ -68,19 +83,19 @@ describe('RCloneApiService', () => {
|
||||
beforeEach(async () => {
|
||||
vi.clearAllMocks();
|
||||
|
||||
const { default: got } = await import('got');
|
||||
const got = await import('got');
|
||||
const { execa } = await import('execa');
|
||||
const pRetry = await import('p-retry');
|
||||
const { existsSync } = await import('node:fs');
|
||||
const { fileExists } = await import('@app/core/utils/files/file-exists.js');
|
||||
|
||||
mockGot = vi.mocked(got);
|
||||
mockGotPost = vi.mocked(got.default.post);
|
||||
mockExeca = vi.mocked(execa);
|
||||
mockPRetry = vi.mocked(pRetry.default);
|
||||
mockExistsSync = vi.mocked(existsSync);
|
||||
|
||||
// Mock successful RClone API response for socket check
|
||||
mockGot.post = vi.fn().mockResolvedValue({ body: { pid: 12345 } });
|
||||
mockGotPost.mockResolvedValue({ body: { pid: 12345 } });
|
||||
|
||||
// Mock RClone binary exists check
|
||||
vi.mocked(fileExists).mockResolvedValue(true);
|
||||
@@ -97,10 +112,10 @@ describe('RCloneApiService', () => {
|
||||
mockPRetry.mockResolvedValue(undefined);
|
||||
|
||||
service = new RCloneApiService();
|
||||
await service.onModuleInit();
|
||||
await service.onApplicationBootstrap();
|
||||
|
||||
// Reset the mock after initialization to prepare for test-specific responses
|
||||
mockGot.post.mockClear();
|
||||
mockGotPost.mockClear();
|
||||
});
|
||||
|
||||
describe('getProviders', () => {
|
||||
@@ -109,15 +124,15 @@ describe('RCloneApiService', () => {
|
||||
{ name: 'aws', prefix: 's3', description: 'Amazon S3' },
|
||||
{ name: 'google', prefix: 'drive', description: 'Google Drive' },
|
||||
];
|
||||
mockGot.post.mockResolvedValue({
|
||||
mockGotPost.mockResolvedValue({
|
||||
body: { providers: mockProviders },
|
||||
});
|
||||
|
||||
const result = await service.getProviders();
|
||||
|
||||
expect(result).toEqual(mockProviders);
|
||||
expect(mockGot.post).toHaveBeenCalledWith(
|
||||
'http://unix:/tmp/rclone.sock:/config/providers',
|
||||
expect(mockGotPost).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/config\/providers$/),
|
||||
expect.objectContaining({
|
||||
json: {},
|
||||
responseType: 'json',
|
||||
@@ -130,7 +145,7 @@ describe('RCloneApiService', () => {
|
||||
});
|
||||
|
||||
it('should return empty array when no providers', async () => {
|
||||
mockGot.post.mockResolvedValue({ body: {} });
|
||||
mockGotPost.mockResolvedValue({ body: {} });
|
||||
|
||||
const result = await service.getProviders();
|
||||
|
||||
@@ -141,15 +156,15 @@ describe('RCloneApiService', () => {
|
||||
describe('listRemotes', () => {
|
||||
it('should return list of remotes', async () => {
|
||||
const mockRemotes = ['backup-s3', 'drive-storage'];
|
||||
mockGot.post.mockResolvedValue({
|
||||
mockGotPost.mockResolvedValue({
|
||||
body: { remotes: mockRemotes },
|
||||
});
|
||||
|
||||
const result = await service.listRemotes();
|
||||
|
||||
expect(result).toEqual(mockRemotes);
|
||||
expect(mockGot.post).toHaveBeenCalledWith(
|
||||
'http://unix:/tmp/rclone.sock:/config/listremotes',
|
||||
expect(mockGotPost).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/config\/listremotes$/),
|
||||
expect.objectContaining({
|
||||
json: {},
|
||||
responseType: 'json',
|
||||
@@ -162,7 +177,7 @@ describe('RCloneApiService', () => {
|
||||
});
|
||||
|
||||
it('should return empty array when no remotes', async () => {
|
||||
mockGot.post.mockResolvedValue({ body: {} });
|
||||
mockGotPost.mockResolvedValue({ body: {} });
|
||||
|
||||
const result = await service.listRemotes();
|
||||
|
||||
@@ -174,13 +189,13 @@ describe('RCloneApiService', () => {
|
||||
it('should return remote details', async () => {
|
||||
const input: GetRCloneRemoteDetailsDto = { name: 'test-remote' };
|
||||
const mockConfig = { type: 's3', provider: 'AWS' };
|
||||
mockGot.post.mockResolvedValue({ body: mockConfig });
|
||||
mockGotPost.mockResolvedValue({ body: mockConfig });
|
||||
|
||||
const result = await service.getRemoteDetails(input);
|
||||
|
||||
expect(result).toEqual(mockConfig);
|
||||
expect(mockGot.post).toHaveBeenCalledWith(
|
||||
'http://unix:/tmp/rclone.sock:/config/get',
|
||||
expect(mockGotPost).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/config\/get$/),
|
||||
expect.objectContaining({
|
||||
json: { name: 'test-remote' },
|
||||
responseType: 'json',
|
||||
@@ -197,7 +212,7 @@ describe('RCloneApiService', () => {
|
||||
it('should return remote configuration', async () => {
|
||||
const input: GetRCloneRemoteConfigDto = { name: 'test-remote' };
|
||||
const mockConfig = { type: 's3', access_key_id: 'AKIA...' };
|
||||
mockGot.post.mockResolvedValue({ body: mockConfig });
|
||||
mockGotPost.mockResolvedValue({ body: mockConfig });
|
||||
|
||||
const result = await service.getRemoteConfig(input);
|
||||
|
||||
@@ -213,13 +228,13 @@ describe('RCloneApiService', () => {
|
||||
parameters: { access_key_id: 'AKIA...', secret_access_key: 'secret' },
|
||||
};
|
||||
const mockResponse = { success: true };
|
||||
mockGot.post.mockResolvedValue({ body: mockResponse });
|
||||
mockGotPost.mockResolvedValue({ body: mockResponse });
|
||||
|
||||
const result = await service.createRemote(input);
|
||||
|
||||
expect(result).toEqual(mockResponse);
|
||||
expect(mockGot.post).toHaveBeenCalledWith(
|
||||
'http://unix:/tmp/rclone.sock:/config/create',
|
||||
expect(mockGotPost).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/config\/create$/),
|
||||
expect.objectContaining({
|
||||
json: {
|
||||
name: 'new-remote',
|
||||
@@ -243,13 +258,13 @@ describe('RCloneApiService', () => {
|
||||
parameters: { access_key_id: 'NEW_AKIA...' },
|
||||
};
|
||||
const mockResponse = { success: true };
|
||||
mockGot.post.mockResolvedValue({ body: mockResponse });
|
||||
mockGotPost.mockResolvedValue({ body: mockResponse });
|
||||
|
||||
const result = await service.updateRemote(input);
|
||||
|
||||
expect(result).toEqual(mockResponse);
|
||||
expect(mockGot.post).toHaveBeenCalledWith(
|
||||
'http://unix:/tmp/rclone.sock:/config/update',
|
||||
expect(mockGotPost).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/config\/update$/),
|
||||
expect.objectContaining({
|
||||
json: {
|
||||
name: 'existing-remote',
|
||||
@@ -269,13 +284,13 @@ describe('RCloneApiService', () => {
|
||||
it('should delete a remote', async () => {
|
||||
const input: DeleteRCloneRemoteDto = { name: 'remote-to-delete' };
|
||||
const mockResponse = { success: true };
|
||||
mockGot.post.mockResolvedValue({ body: mockResponse });
|
||||
mockGotPost.mockResolvedValue({ body: mockResponse });
|
||||
|
||||
const result = await service.deleteRemote(input);
|
||||
|
||||
expect(result).toEqual(mockResponse);
|
||||
expect(mockGot.post).toHaveBeenCalledWith(
|
||||
'http://unix:/tmp/rclone.sock:/config/delete',
|
||||
expect(mockGotPost).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/config\/delete$/),
|
||||
expect.objectContaining({
|
||||
json: { name: 'remote-to-delete' },
|
||||
responseType: 'json',
|
||||
@@ -296,13 +311,13 @@ describe('RCloneApiService', () => {
|
||||
options: { delete_on: 'dst' },
|
||||
};
|
||||
const mockResponse = { jobid: 'job-123' };
|
||||
mockGot.post.mockResolvedValue({ body: mockResponse });
|
||||
mockGotPost.mockResolvedValue({ body: mockResponse });
|
||||
|
||||
const result = await service.startBackup(input);
|
||||
|
||||
expect(result).toEqual(mockResponse);
|
||||
expect(mockGot.post).toHaveBeenCalledWith(
|
||||
'http://unix:/tmp/rclone.sock:/sync/copy',
|
||||
expect(mockGotPost).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/sync\/copy$/),
|
||||
expect.objectContaining({
|
||||
json: {
|
||||
srcFs: '/source/path',
|
||||
@@ -323,13 +338,13 @@ describe('RCloneApiService', () => {
|
||||
it('should return job status', async () => {
|
||||
const input: GetRCloneJobStatusDto = { jobId: 'job-123' };
|
||||
const mockStatus = { status: 'running', progress: 0.5 };
|
||||
mockGot.post.mockResolvedValue({ body: mockStatus });
|
||||
mockGotPost.mockResolvedValue({ body: mockStatus });
|
||||
|
||||
const result = await service.getJobStatus(input);
|
||||
|
||||
expect(result).toEqual(mockStatus);
|
||||
expect(mockGot.post).toHaveBeenCalledWith(
|
||||
'http://unix:/tmp/rclone.sock:/job/status',
|
||||
expect(mockGotPost).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/job\/status$/),
|
||||
expect.objectContaining({
|
||||
json: { jobid: 'job-123' },
|
||||
responseType: 'json',
|
||||
@@ -348,13 +363,13 @@ describe('RCloneApiService', () => {
|
||||
{ id: 'job-1', status: 'running' },
|
||||
{ id: 'job-2', status: 'finished' },
|
||||
];
|
||||
mockGot.post.mockResolvedValue({ body: mockJobs });
|
||||
mockGotPost.mockResolvedValue({ body: mockJobs });
|
||||
|
||||
const result = await service.listRunningJobs();
|
||||
|
||||
expect(result).toEqual(mockJobs);
|
||||
expect(mockGot.post).toHaveBeenCalledWith(
|
||||
'http://unix:/tmp/rclone.sock:/job/list',
|
||||
expect(mockGotPost).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/job\/list$/),
|
||||
expect.objectContaining({
|
||||
json: {},
|
||||
responseType: 'json',
|
||||
@@ -378,7 +393,7 @@ describe('RCloneApiService', () => {
|
||||
},
|
||||
};
|
||||
Object.setPrototypeOf(httpError, HTTPError.prototype);
|
||||
mockGot.post.mockRejectedValue(httpError);
|
||||
mockGotPost.mockRejectedValue(httpError);
|
||||
|
||||
await expect(service.getProviders()).rejects.toThrow(
|
||||
'Rclone API Error (config/providers, HTTP 500): Rclone Error: Internal server error'
|
||||
@@ -395,7 +410,7 @@ describe('RCloneApiService', () => {
|
||||
},
|
||||
};
|
||||
Object.setPrototypeOf(httpError, HTTPError.prototype);
|
||||
mockGot.post.mockRejectedValue(httpError);
|
||||
mockGotPost.mockRejectedValue(httpError);
|
||||
|
||||
await expect(service.getProviders()).rejects.toThrow(
|
||||
'Rclone API Error (config/providers, HTTP 404): Failed to process error response body. Raw body:'
|
||||
@@ -412,7 +427,7 @@ describe('RCloneApiService', () => {
|
||||
},
|
||||
};
|
||||
Object.setPrototypeOf(httpError, HTTPError.prototype);
|
||||
mockGot.post.mockRejectedValue(httpError);
|
||||
mockGotPost.mockRejectedValue(httpError);
|
||||
|
||||
await expect(service.getProviders()).rejects.toThrow(
|
||||
'Rclone API Error (config/providers, HTTP 400): Failed to process error response body. Raw body: invalid json'
|
||||
@@ -421,17 +436,108 @@ describe('RCloneApiService', () => {
|
||||
|
||||
it('should handle non-HTTP errors', async () => {
|
||||
const networkError = new Error('Network connection failed');
|
||||
mockGot.post.mockRejectedValue(networkError);
|
||||
mockGotPost.mockRejectedValue(networkError);
|
||||
|
||||
await expect(service.getProviders()).rejects.toThrow('Network connection failed');
|
||||
});
|
||||
|
||||
it('should handle unknown errors', async () => {
|
||||
mockGot.post.mockRejectedValue('unknown error');
|
||||
mockGotPost.mockRejectedValue('unknown error');
|
||||
|
||||
await expect(service.getProviders()).rejects.toThrow(
|
||||
'Unknown error calling RClone API (config/providers) with params {}: unknown error'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('checkRcloneBinaryExists', () => {
|
||||
beforeEach(() => {
|
||||
// Create a new service instance without initializing for these tests
|
||||
service = new RCloneApiService();
|
||||
});
|
||||
|
||||
it('should return true when rclone version is 1.70.0', async () => {
|
||||
mockExeca.mockResolvedValueOnce({
|
||||
stdout: 'rclone v1.70.0\n- os/version: darwin 14.0 (64 bit)\n- os/kernel: 23.0.0 (arm64)',
|
||||
stderr: '',
|
||||
} as any);
|
||||
|
||||
const result = await (service as any).checkRcloneBinaryExists();
|
||||
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it('should return true when rclone version is newer than 1.70.0', async () => {
|
||||
mockExeca.mockResolvedValueOnce({
|
||||
stdout: 'rclone v1.75.2\n- os/version: darwin 14.0 (64 bit)\n- os/kernel: 23.0.0 (arm64)',
|
||||
stderr: '',
|
||||
} as any);
|
||||
|
||||
const result = await (service as any).checkRcloneBinaryExists();
|
||||
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false when rclone version is older than 1.70.0', async () => {
|
||||
mockExeca.mockResolvedValueOnce({
|
||||
stdout: 'rclone v1.69.0\n- os/version: darwin 14.0 (64 bit)\n- os/kernel: 23.0.0 (arm64)',
|
||||
stderr: '',
|
||||
} as any);
|
||||
|
||||
const result = await (service as any).checkRcloneBinaryExists();
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false when rclone version is much older', async () => {
|
||||
mockExeca.mockResolvedValueOnce({
|
||||
stdout: 'rclone v1.50.0\n- os/version: darwin 14.0 (64 bit)\n- os/kernel: 23.0.0 (arm64)',
|
||||
stderr: '',
|
||||
} as any);
|
||||
|
||||
const result = await (service as any).checkRcloneBinaryExists();
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false when version cannot be parsed', async () => {
|
||||
mockExeca.mockResolvedValueOnce({
|
||||
stdout: 'rclone unknown version format',
|
||||
stderr: '',
|
||||
} as any);
|
||||
|
||||
const result = await (service as any).checkRcloneBinaryExists();
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false when rclone binary is not found', async () => {
|
||||
const error = new Error('Command not found') as any;
|
||||
error.code = 'ENOENT';
|
||||
mockExeca.mockRejectedValueOnce(error);
|
||||
|
||||
const result = await (service as any).checkRcloneBinaryExists();
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false and log error for other exceptions', async () => {
|
||||
mockExeca.mockRejectedValueOnce(new Error('Some other error'));
|
||||
|
||||
const result = await (service as any).checkRcloneBinaryExists();
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle beta/rc versions correctly', async () => {
|
||||
mockExeca.mockResolvedValueOnce({
|
||||
stdout: 'rclone v1.70.0-beta.1\n- os/version: darwin 14.0 (64 bit)\n- os/kernel: 23.0.0 (arm64)',
|
||||
stderr: '',
|
||||
} as any);
|
||||
|
||||
const result = await (service as any).checkRcloneBinaryExists();
|
||||
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { expect, test } from 'vitest';
|
||||
import { beforeEach, describe, expect, test, vi } from 'vitest';
|
||||
|
||||
import { parseConfig } from '@app/core/utils/misc/parse-config.js';
|
||||
import { store } from '@app/store/index.js';
|
||||
import { FileLoadStatus } from '@app/store/types.js';
|
||||
|
||||
@@ -210,6 +211,7 @@ test('After init returns values from cfg file for all fields', { timeout: 30000
|
||||
"fsUsed": null,
|
||||
"id": "ST18000NM000J-2TV103_ZR585CPY",
|
||||
"idx": 0,
|
||||
"isSpinning": true,
|
||||
"name": "parity",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
@@ -234,6 +236,7 @@ test('After init returns values from cfg file for all fields', { timeout: 30000
|
||||
"fsUsed": 4116003021,
|
||||
"id": "ST18000NM000J-2TV103_ZR5B1W9X",
|
||||
"idx": 1,
|
||||
"isSpinning": true,
|
||||
"name": "disk1",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
@@ -258,6 +261,7 @@ test('After init returns values from cfg file for all fields', { timeout: 30000
|
||||
"fsUsed": 11904860828,
|
||||
"id": "WDC_WD120EDAZ-11F3RA0_5PJRD45C",
|
||||
"idx": 2,
|
||||
"isSpinning": true,
|
||||
"name": "disk2",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
@@ -282,6 +286,7 @@ test('After init returns values from cfg file for all fields', { timeout: 30000
|
||||
"fsUsed": 6478056481,
|
||||
"id": "WDC_WD120EMAZ-11BLFA0_5PH8BTYD",
|
||||
"idx": 3,
|
||||
"isSpinning": true,
|
||||
"name": "disk3",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
@@ -306,6 +311,7 @@ test('After init returns values from cfg file for all fields', { timeout: 30000
|
||||
"fsUsed": 137273827,
|
||||
"id": "Samsung_SSD_850_EVO_250GB_S2R5NX0H643734Z",
|
||||
"idx": 30,
|
||||
"isSpinning": true,
|
||||
"name": "cache",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
@@ -330,6 +336,7 @@ test('After init returns values from cfg file for all fields', { timeout: 30000
|
||||
"fsUsed": null,
|
||||
"id": "KINGSTON_SA2000M8250G_50026B7282669D9E",
|
||||
"idx": 31,
|
||||
"isSpinning": true,
|
||||
"name": "cache2",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
@@ -354,6 +361,7 @@ test('After init returns values from cfg file for all fields', { timeout: 30000
|
||||
"fsUsed": 851325,
|
||||
"id": "Cruzer",
|
||||
"idx": 32,
|
||||
"isSpinning": true,
|
||||
"name": "flash",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
@@ -446,6 +454,44 @@ test('After init returns values from cfg file for all fields', { timeout: 30000
|
||||
"splitLevel": "1",
|
||||
"used": 33619300,
|
||||
},
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cache": false,
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "system data with periods",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"id": "system.with.periods",
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "system.with.periods",
|
||||
"nameOrig": "system.with.periods",
|
||||
"size": 0,
|
||||
"splitLevel": "1",
|
||||
"used": 33619300,
|
||||
},
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cache": false,
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "system data with 🚀",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"id": "system.with.🚀",
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "system.with.🚀",
|
||||
"nameOrig": "system.with.🚀",
|
||||
"size": 0,
|
||||
"splitLevel": "1",
|
||||
"used": 33619300,
|
||||
},
|
||||
]
|
||||
`);
|
||||
expect(nfsShares).toMatchInlineSnapshot(`
|
||||
@@ -1110,3 +1156,209 @@ test('After init returns values from cfg file for all fields', { timeout: 30000
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
describe('Share parsing with periods in names', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
test('parseConfig handles periods in INI section names', () => {
|
||||
const mockIniContent = `
|
||||
["share.with.periods"]
|
||||
name=share.with.periods
|
||||
useCache=yes
|
||||
include=
|
||||
exclude=
|
||||
|
||||
[normal_share]
|
||||
name=normal_share
|
||||
useCache=no
|
||||
include=
|
||||
exclude=
|
||||
`;
|
||||
|
||||
const result = parseConfig<any>({
|
||||
file: mockIniContent,
|
||||
type: 'ini',
|
||||
});
|
||||
|
||||
// The result should now have properly flattened keys
|
||||
|
||||
expect(result).toHaveProperty('shareWithPeriods');
|
||||
expect(result).toHaveProperty('normalShare');
|
||||
expect(result.shareWithPeriods.name).toBe('share.with.periods');
|
||||
expect(result.normalShare.name).toBe('normal_share');
|
||||
});
|
||||
|
||||
test('shares parser handles periods in share names correctly', async () => {
|
||||
const { parse } = await import('@app/store/state-parsers/shares.js');
|
||||
|
||||
// The parser expects an object where values are share configs
|
||||
const mockSharesState = {
|
||||
shareWithPeriods: {
|
||||
name: 'share.with.periods',
|
||||
free: '1000000',
|
||||
used: '500000',
|
||||
size: '1500000',
|
||||
include: '',
|
||||
exclude: '',
|
||||
useCache: 'yes',
|
||||
},
|
||||
normalShare: {
|
||||
name: 'normal_share',
|
||||
free: '2000000',
|
||||
used: '750000',
|
||||
size: '2750000',
|
||||
include: '',
|
||||
exclude: '',
|
||||
useCache: 'no',
|
||||
},
|
||||
} as any;
|
||||
|
||||
const result = parse(mockSharesState);
|
||||
|
||||
expect(result).toHaveLength(2);
|
||||
const periodShare = result.find((s) => s.name === 'share.with.periods');
|
||||
const normalShare = result.find((s) => s.name === 'normal_share');
|
||||
|
||||
expect(periodShare).toBeDefined();
|
||||
expect(periodShare?.id).toBe('share.with.periods');
|
||||
expect(periodShare?.name).toBe('share.with.periods');
|
||||
expect(periodShare?.cache).toBe(true);
|
||||
|
||||
expect(normalShare).toBeDefined();
|
||||
expect(normalShare?.id).toBe('normal_share');
|
||||
expect(normalShare?.name).toBe('normal_share');
|
||||
expect(normalShare?.cache).toBe(false);
|
||||
});
|
||||
|
||||
test('SMB parser handles periods in share names', async () => {
|
||||
const { parse } = await import('@app/store/state-parsers/smb.js');
|
||||
|
||||
const mockSmbState = {
|
||||
'share.with.periods': {
|
||||
export: 'e',
|
||||
security: 'public',
|
||||
writeList: '',
|
||||
readList: '',
|
||||
volsizelimit: '0',
|
||||
},
|
||||
normal_share: {
|
||||
export: 'e',
|
||||
security: 'private',
|
||||
writeList: 'user1,user2',
|
||||
readList: '',
|
||||
volsizelimit: '1000',
|
||||
},
|
||||
} as any;
|
||||
|
||||
const result = parse(mockSmbState);
|
||||
|
||||
expect(result).toHaveLength(2);
|
||||
const periodShare = result.find((s) => s.name === 'share.with.periods');
|
||||
const normalShare = result.find((s) => s.name === 'normal_share');
|
||||
|
||||
expect(periodShare).toBeDefined();
|
||||
expect(periodShare?.name).toBe('share.with.periods');
|
||||
expect(periodShare?.enabled).toBe(true);
|
||||
|
||||
expect(normalShare).toBeDefined();
|
||||
expect(normalShare?.name).toBe('normal_share');
|
||||
expect(normalShare?.writeList).toEqual(['user1', 'user2']);
|
||||
});
|
||||
|
||||
test('NFS parser handles periods in share names', async () => {
|
||||
const { parse } = await import('@app/store/state-parsers/nfs.js');
|
||||
|
||||
const mockNfsState = {
|
||||
'share.with.periods': {
|
||||
export: 'e',
|
||||
security: 'public',
|
||||
writeList: '',
|
||||
readList: 'user1',
|
||||
hostList: '',
|
||||
},
|
||||
normal_share: {
|
||||
export: 'd',
|
||||
security: 'private',
|
||||
writeList: 'user2',
|
||||
readList: '',
|
||||
hostList: '192.168.1.0/24',
|
||||
},
|
||||
} as any;
|
||||
|
||||
const result = parse(mockNfsState);
|
||||
|
||||
expect(result).toHaveLength(2);
|
||||
const periodShare = result.find((s) => s.name === 'share.with.periods');
|
||||
const normalShare = result.find((s) => s.name === 'normal_share');
|
||||
|
||||
expect(periodShare).toBeDefined();
|
||||
expect(periodShare?.name).toBe('share.with.periods');
|
||||
expect(periodShare?.enabled).toBe(true);
|
||||
expect(periodShare?.readList).toEqual(['user1']);
|
||||
|
||||
expect(normalShare).toBeDefined();
|
||||
expect(normalShare?.name).toBe('normal_share');
|
||||
expect(normalShare?.enabled).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Share lookup with periods in names', () => {
|
||||
test('getShares finds user shares with periods in names', async () => {
|
||||
// Mock the store state
|
||||
const mockStore = await import('@app/store/index.js');
|
||||
const mockEmhttpState = {
|
||||
shares: [
|
||||
{
|
||||
id: 'share.with.periods',
|
||||
name: 'share.with.periods',
|
||||
cache: true,
|
||||
free: 1000000,
|
||||
used: 500000,
|
||||
size: 1500000,
|
||||
include: [],
|
||||
exclude: [],
|
||||
},
|
||||
{
|
||||
id: 'normal_share',
|
||||
name: 'normal_share',
|
||||
cache: false,
|
||||
free: 2000000,
|
||||
used: 750000,
|
||||
size: 2750000,
|
||||
include: [],
|
||||
exclude: [],
|
||||
},
|
||||
],
|
||||
smbShares: [
|
||||
{ name: 'share.with.periods', enabled: true, security: 'public' },
|
||||
{ name: 'normal_share', enabled: true, security: 'private' },
|
||||
],
|
||||
nfsShares: [
|
||||
{ name: 'share.with.periods', enabled: false },
|
||||
{ name: 'normal_share', enabled: true },
|
||||
],
|
||||
disks: [],
|
||||
};
|
||||
|
||||
const gettersSpy = vi.spyOn(mockStore, 'getters', 'get').mockReturnValue({
|
||||
emhttp: () => mockEmhttpState,
|
||||
} as any);
|
||||
|
||||
const { getShares } = await import('@app/core/utils/shares/get-shares.js');
|
||||
|
||||
const periodShare = getShares('user', { name: 'share.with.periods' });
|
||||
const normalShare = getShares('user', { name: 'normal_share' });
|
||||
|
||||
expect(periodShare).not.toBeNull();
|
||||
expect(periodShare?.name).toBe('share.with.periods');
|
||||
expect(periodShare?.type).toBe('user');
|
||||
|
||||
expect(normalShare).not.toBeNull();
|
||||
expect(normalShare?.name).toBe('normal_share');
|
||||
expect(normalShare?.type).toBe('user');
|
||||
|
||||
gettersSpy.mockRestore();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -92,6 +92,44 @@ test('Returns parsed state file', async () => {
|
||||
"splitLevel": "1",
|
||||
"used": 33619300,
|
||||
},
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cache": false,
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "system data with periods",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"id": "system.with.periods",
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "system.with.periods",
|
||||
"nameOrig": "system.with.periods",
|
||||
"size": 0,
|
||||
"splitLevel": "1",
|
||||
"used": 33619300,
|
||||
},
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cache": false,
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "system data with 🚀",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"id": "system.with.🚀",
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "system.with.🚀",
|
||||
"nameOrig": "system.with.🚀",
|
||||
"size": 0,
|
||||
"splitLevel": "1",
|
||||
"used": 33619300,
|
||||
},
|
||||
]
|
||||
`);
|
||||
});
|
||||
|
||||
@@ -28,6 +28,7 @@ test('Returns parsed state file', async () => {
|
||||
"fsUsed": null,
|
||||
"id": "ST18000NM000J-2TV103_ZR585CPY",
|
||||
"idx": 0,
|
||||
"isSpinning": true,
|
||||
"name": "parity",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
@@ -52,6 +53,7 @@ test('Returns parsed state file', async () => {
|
||||
"fsUsed": 4116003021,
|
||||
"id": "ST18000NM000J-2TV103_ZR5B1W9X",
|
||||
"idx": 1,
|
||||
"isSpinning": true,
|
||||
"name": "disk1",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
@@ -76,6 +78,7 @@ test('Returns parsed state file', async () => {
|
||||
"fsUsed": 11904860828,
|
||||
"id": "WDC_WD120EDAZ-11F3RA0_5PJRD45C",
|
||||
"idx": 2,
|
||||
"isSpinning": true,
|
||||
"name": "disk2",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
@@ -100,6 +103,7 @@ test('Returns parsed state file', async () => {
|
||||
"fsUsed": 6478056481,
|
||||
"id": "WDC_WD120EMAZ-11BLFA0_5PH8BTYD",
|
||||
"idx": 3,
|
||||
"isSpinning": true,
|
||||
"name": "disk3",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
@@ -124,6 +128,7 @@ test('Returns parsed state file', async () => {
|
||||
"fsUsed": 137273827,
|
||||
"id": "Samsung_SSD_850_EVO_250GB_S2R5NX0H643734Z",
|
||||
"idx": 30,
|
||||
"isSpinning": true,
|
||||
"name": "cache",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
@@ -148,6 +153,7 @@ test('Returns parsed state file', async () => {
|
||||
"fsUsed": null,
|
||||
"id": "KINGSTON_SA2000M8250G_50026B7282669D9E",
|
||||
"idx": 31,
|
||||
"isSpinning": true,
|
||||
"name": "cache2",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
@@ -172,6 +178,7 @@ test('Returns parsed state file', async () => {
|
||||
"fsUsed": 851325,
|
||||
"id": "Cruzer",
|
||||
"idx": 32,
|
||||
"isSpinning": true,
|
||||
"name": "flash",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
|
||||
@@ -2,7 +2,7 @@ import { join } from 'path';
|
||||
|
||||
import type { JSONWebKeySet } from 'jose';
|
||||
|
||||
import { PORT } from '@app/environment.js';
|
||||
import { ENABLE_NEXT_DOCKER_RELEASE, PORT } from '@app/environment.js';
|
||||
|
||||
export const getInternalApiAddress = (isHttp = true, nginxPort = 80) => {
|
||||
const envPort = PORT;
|
||||
@@ -79,3 +79,14 @@ export const KEYSERVER_VALIDATION_ENDPOINT = 'https://keys.lime-technology.com/v
|
||||
|
||||
/** Set the max retries for the GraphQL Client */
|
||||
export const MAX_RETRIES_FOR_LINEAR_BACKOFF = 100;
|
||||
|
||||
/**
|
||||
* Feature flags are used to conditionally enable or disable functionality in the Unraid API.
|
||||
*
|
||||
* Keys are human readable feature flag names -- will be used to construct error messages.
|
||||
*
|
||||
* Values are boolean/truthy values.
|
||||
*/
|
||||
export const FeatureFlags = Object.freeze({
|
||||
ENABLE_NEXT_DOCKER_RELEASE,
|
||||
});
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import pino from 'pino';
|
||||
import pretty from 'pino-pretty';
|
||||
|
||||
import { API_VERSION, LOG_LEVEL, LOG_TYPE, PATHS_LOGS_FILE, SUPPRESS_LOGS } from '@app/environment.js';
|
||||
import { API_VERSION, LOG_LEVEL, LOG_TYPE, SUPPRESS_LOGS } from '@app/environment.js';
|
||||
|
||||
export const levels = ['trace', 'debug', 'info', 'warn', 'error', 'fatal'] as const;
|
||||
|
||||
@@ -17,20 +17,33 @@ const nullDestination = pino.destination({
|
||||
|
||||
export const logDestination =
|
||||
process.env.SUPPRESS_LOGS === 'true' ? nullDestination : pino.destination();
|
||||
const localFileDestination = pino.destination({
|
||||
dest: PATHS_LOGS_FILE,
|
||||
sync: true,
|
||||
});
|
||||
|
||||
// Since PM2 captures stdout and writes to the log file, we should not colorize stdout
|
||||
// to avoid ANSI escape codes in the log file
|
||||
const stream = SUPPRESS_LOGS
|
||||
? nullDestination
|
||||
: LOG_TYPE === 'pretty'
|
||||
? pretty({
|
||||
singleLine: true,
|
||||
hideObject: false,
|
||||
colorize: true,
|
||||
colorize: false, // No colors since PM2 writes stdout to file
|
||||
colorizeObjects: false,
|
||||
levelFirst: false,
|
||||
ignore: 'hostname,pid',
|
||||
destination: logDestination,
|
||||
translateTime: 'HH:mm:ss',
|
||||
customPrettifiers: {
|
||||
time: (timestamp: string | object) => `[${timestamp}`,
|
||||
level: (_logLevel: string | object, _key: string, log: any, extras: any) => {
|
||||
// Use label instead of labelColorized for non-colored output
|
||||
const { label } = extras;
|
||||
const context = log.context || log.logger || 'app';
|
||||
return `${label} ${context}]`;
|
||||
},
|
||||
},
|
||||
messageFormat: (log: any, messageKey: string) => {
|
||||
const msg = log[messageKey] || log.msg || '';
|
||||
return msg;
|
||||
},
|
||||
})
|
||||
: logDestination;
|
||||
|
||||
@@ -82,7 +95,7 @@ export const keyServerLogger = logger.child({ logger: 'key-server' });
|
||||
export const remoteAccessLogger = logger.child({ logger: 'remote-access' });
|
||||
export const remoteQueryLogger = logger.child({ logger: 'remote-query' });
|
||||
export const apiLogger = logger.child({ logger: 'api' });
|
||||
export const pluginLogger = logger.child({ logger: 'plugin', stream: localFileDestination });
|
||||
export const pluginLogger = logger.child({ logger: 'plugin' });
|
||||
|
||||
export const loggers = [
|
||||
internalLogger,
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { GraphQLError } from 'graphql';
|
||||
import { sum } from 'lodash-es';
|
||||
|
||||
import { getParityCheckStatus } from '@app/core/modules/array/parity-check-status.js';
|
||||
import { store } from '@app/store/index.js';
|
||||
import { FileLoadStatus } from '@app/store/types.js';
|
||||
import {
|
||||
@@ -61,5 +62,6 @@ export const getArrayData = (getState = store.getState): UnraidArray => {
|
||||
parities,
|
||||
disks,
|
||||
caches,
|
||||
parityCheckStatus: getParityCheckStatus(emhttp.var),
|
||||
};
|
||||
};
|
||||
|
||||
1080
api/src/core/modules/array/parity-check-status.test.ts
Normal file
1080
api/src/core/modules/array/parity-check-status.test.ts
Normal file
File diff suppressed because it is too large
Load Diff
72
api/src/core/modules/array/parity-check-status.ts
Normal file
72
api/src/core/modules/array/parity-check-status.ts
Normal file
@@ -0,0 +1,72 @@
|
||||
import { toNumberAlways } from '@unraid/shared/util/data.js';
|
||||
|
||||
import type { Var } from '@app/core/types/states/var.js';
|
||||
import type { ParityCheck } from '@app/unraid-api/graph/resolvers/array/parity.model.js';
|
||||
|
||||
export enum ParityCheckStatus {
|
||||
NEVER_RUN = 'never_run',
|
||||
RUNNING = 'running',
|
||||
PAUSED = 'paused',
|
||||
COMPLETED = 'completed',
|
||||
CANCELLED = 'cancelled',
|
||||
FAILED = 'failed',
|
||||
}
|
||||
|
||||
function calculateParitySpeed(deltaTime: number, deltaBlocks: number) {
|
||||
if (deltaTime === 0 || deltaBlocks === 0) return 0;
|
||||
const deltaBytes = deltaBlocks * 1024;
|
||||
const speedMBps = deltaBytes / deltaTime / 1024 / 1024;
|
||||
return Math.round(speedMBps);
|
||||
}
|
||||
|
||||
type RelevantVarData = Pick<
|
||||
Var,
|
||||
| 'mdResyncPos'
|
||||
| 'mdResyncDt'
|
||||
| 'sbSyncExit'
|
||||
| 'sbSynced'
|
||||
| 'sbSynced2'
|
||||
| 'mdResyncDb'
|
||||
| 'mdResyncSize'
|
||||
>;
|
||||
|
||||
function getStatusFromVarData(varData: RelevantVarData): ParityCheckStatus {
|
||||
const { mdResyncPos, mdResyncDt, sbSyncExit, sbSynced, sbSynced2 } = varData;
|
||||
const mdResyncDtNumber = toNumberAlways(mdResyncDt, 0);
|
||||
const sbSyncExitNumber = toNumberAlways(sbSyncExit, 0);
|
||||
|
||||
switch (true) {
|
||||
case mdResyncPos > 0:
|
||||
return mdResyncDtNumber > 0 ? ParityCheckStatus.RUNNING : ParityCheckStatus.PAUSED;
|
||||
case sbSynced === 0:
|
||||
return ParityCheckStatus.NEVER_RUN;
|
||||
case sbSyncExitNumber === -4:
|
||||
return ParityCheckStatus.CANCELLED;
|
||||
case sbSyncExitNumber !== 0:
|
||||
return ParityCheckStatus.FAILED;
|
||||
case sbSynced2 > 0:
|
||||
return ParityCheckStatus.COMPLETED;
|
||||
default:
|
||||
return ParityCheckStatus.NEVER_RUN;
|
||||
}
|
||||
}
|
||||
|
||||
export function getParityCheckStatus(varData: RelevantVarData): ParityCheck {
|
||||
const { sbSynced, sbSynced2, mdResyncDt, mdResyncDb, mdResyncPos, mdResyncSize } = varData;
|
||||
const deltaTime = toNumberAlways(mdResyncDt, 0);
|
||||
const deltaBlocks = toNumberAlways(mdResyncDb, 0);
|
||||
|
||||
// seconds since epoch (unix timestamp)
|
||||
const now = sbSynced2 > 0 ? sbSynced2 : Date.now() / 1000;
|
||||
return {
|
||||
status: getStatusFromVarData(varData),
|
||||
speed: String(calculateParitySpeed(deltaTime, deltaBlocks)),
|
||||
date: sbSynced > 0 ? new Date(sbSynced * 1000) : undefined,
|
||||
duration: sbSynced > 0 ? Math.round(now - sbSynced) : undefined,
|
||||
// percentage as integer, clamped to [0, 100]
|
||||
progress:
|
||||
mdResyncSize <= 0
|
||||
? 0
|
||||
: Math.round(Math.min(100, Math.max(0, (mdResyncPos / mdResyncSize) * 100))),
|
||||
};
|
||||
}
|
||||
@@ -13,10 +13,11 @@ export const pubsub = new PubSub({ eventEmitter });
|
||||
|
||||
/**
|
||||
* Create a pubsub subscription.
|
||||
* @param channel The pubsub channel to subscribe to.
|
||||
* @param channel The pubsub channel to subscribe to. Can be either a predefined GRAPHQL_PUBSUB_CHANNEL
|
||||
* or a dynamic string for runtime-generated topics (e.g., log file paths like "LOG_FILE:/var/log/test.log")
|
||||
*/
|
||||
export const createSubscription = <T = any>(
|
||||
channel: GRAPHQL_PUBSUB_CHANNEL
|
||||
channel: GRAPHQL_PUBSUB_CHANNEL | string
|
||||
): AsyncIterableIterator<T> => {
|
||||
return pubsub.asyncIterableIterator<T>(channel);
|
||||
};
|
||||
|
||||
@@ -68,11 +68,24 @@ export type Var = {
|
||||
mdNumStripes: number;
|
||||
mdNumStripesDefault: number;
|
||||
mdNumStripesStatus: string;
|
||||
/**
|
||||
* Serves a dual purpose depending on context:
|
||||
* - Total size of the operation (in sectors/blocks)
|
||||
* - Running state indicator (0 = paused, >0 = running)
|
||||
*/
|
||||
mdResync: number;
|
||||
mdResyncAction: string;
|
||||
mdResyncCorr: string;
|
||||
mdResyncDb: string;
|
||||
/** Average time interval (delta time) in seconds of current parity operations */
|
||||
mdResyncDt: string;
|
||||
/**
|
||||
* Current position in the parity operation (in sectors/blocks).
|
||||
* When mdResyncPos > 0, a parity operation is active.
|
||||
* When mdResyncPos = 0, no parity operation is running.
|
||||
*
|
||||
* Used to calculate progress percentage.
|
||||
*/
|
||||
mdResyncPos: number;
|
||||
mdResyncSize: number;
|
||||
mdState: ArrayState;
|
||||
@@ -136,9 +149,36 @@ export type Var = {
|
||||
sbName: string;
|
||||
sbNumDisks: number;
|
||||
sbState: string;
|
||||
/**
|
||||
* Unix timestamp when parity operation started.
|
||||
* When sbSynced = 0, indicates no parity check has ever been run.
|
||||
*
|
||||
* Used to calculate elapsed time during active operations.
|
||||
*/
|
||||
sbSynced: number;
|
||||
sbSynced2: number;
|
||||
/**
|
||||
* Unix timestamp when parity operation completed (successfully or with errors).
|
||||
* Used to display completion time in status messages.
|
||||
*
|
||||
* When sbSynced2 = 0, indicates operation started but not yet finished
|
||||
*/
|
||||
sbSyncErrs: number;
|
||||
/**
|
||||
* Exit status code that indicates how the last parity operation completed, following standard Unix conventions.
|
||||
*
|
||||
* sbSyncExit = 0 - Successful Completion
|
||||
* - Parity operation completed normally without errors
|
||||
* - Used to calculate speed and display success message
|
||||
*
|
||||
* sbSyncExit = -4 - Aborted/Cancelled
|
||||
* - Operation was manually cancelled by user
|
||||
* - Displays as "aborted" in the UI
|
||||
*
|
||||
* sbSyncExit ≠ 0 (other values) - Failed/Incomplete
|
||||
* - Operation failed due to errors or other issues
|
||||
* - Displays the numeric error code
|
||||
*/
|
||||
sbSyncExit: string;
|
||||
sbUpdated: string;
|
||||
sbVersion: string;
|
||||
|
||||
@@ -16,11 +16,22 @@ export const getKeyFile = async function (appStore: RootState = store.getState()
|
||||
|
||||
const keyFileName = basename(emhttp.var?.regFile);
|
||||
const registrationKeyFilePath = join(paths['keyfile-base'], keyFileName);
|
||||
const keyFile = await readFile(registrationKeyFilePath, 'binary');
|
||||
return Buffer.from(keyFile, 'binary')
|
||||
.toString('base64')
|
||||
.trim()
|
||||
.replace(/\+/g, '-')
|
||||
.replace(/\//g, '_')
|
||||
.replace(/=/g, '');
|
||||
|
||||
try {
|
||||
const keyFile = await readFile(registrationKeyFilePath, 'binary');
|
||||
return Buffer.from(keyFile, 'binary')
|
||||
.toString('base64')
|
||||
.trim()
|
||||
.replace(/\+/g, '-')
|
||||
.replace(/\//g, '_')
|
||||
.replace(/=/g, '');
|
||||
} catch (error) {
|
||||
// Handle ENOENT error when Pro.key file doesn't exist
|
||||
if (error instanceof Error && 'code' in error && error.code === 'ENOENT') {
|
||||
// Return empty string when key file is missing (ENOKEYFILE state)
|
||||
return '';
|
||||
}
|
||||
// Re-throw other errors
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
@@ -23,6 +23,54 @@ type OptionsWithLoadedFile = {
|
||||
type: ConfigType;
|
||||
};
|
||||
|
||||
/**
|
||||
* Flattens nested objects that were incorrectly created by periods in INI section names.
|
||||
* For example: { system: { with: { periods: {...} } } } -> { "system.with.periods": {...} }
|
||||
*/
|
||||
const flattenPeriodSections = (obj: Record<string, any>, prefix = ''): Record<string, any> => {
|
||||
const result: Record<string, any> = {};
|
||||
const isNestedObject = (value: unknown) =>
|
||||
Boolean(value && typeof value === 'object' && !Array.isArray(value));
|
||||
// prevent prototype pollution/injection
|
||||
const isUnsafeKey = (k: string) => k === '__proto__' || k === 'prototype' || k === 'constructor';
|
||||
|
||||
for (const [key, value] of Object.entries(obj)) {
|
||||
if (isUnsafeKey(key)) continue;
|
||||
const fullKey = prefix ? `${prefix}.${key}` : key;
|
||||
|
||||
if (!isNestedObject(value)) {
|
||||
result[fullKey] = value;
|
||||
continue;
|
||||
}
|
||||
|
||||
const section = {};
|
||||
const nestedObjs = {};
|
||||
let hasSectionProps = false;
|
||||
|
||||
for (const [propKey, propValue] of Object.entries(value)) {
|
||||
if (isUnsafeKey(propKey)) continue;
|
||||
if (isNestedObject(propValue)) {
|
||||
nestedObjs[propKey] = propValue;
|
||||
} else {
|
||||
section[propKey] = propValue;
|
||||
hasSectionProps = true;
|
||||
}
|
||||
}
|
||||
|
||||
// Process direct properties first to maintain order
|
||||
if (hasSectionProps) {
|
||||
result[fullKey] = section;
|
||||
}
|
||||
|
||||
// Then process nested objects
|
||||
if (Object.keys(nestedObjs).length > 0) {
|
||||
Object.assign(result, flattenPeriodSections(nestedObjs, fullKey));
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* Converts the following
|
||||
* ```
|
||||
@@ -127,6 +175,8 @@ export const parseConfig = <T extends Record<string, any>>(
|
||||
let data: Record<string, any>;
|
||||
try {
|
||||
data = parseIni(fileContents);
|
||||
// Fix nested objects created by periods in section names
|
||||
data = flattenPeriodSections(data);
|
||||
} catch (error) {
|
||||
throw new AppError(
|
||||
`Failed to parse config file: ${error instanceof Error ? error.message : String(error)}`
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
// Non-function exports from this module are loaded into the NestJS Config at runtime.
|
||||
|
||||
import { readFileSync } from 'node:fs';
|
||||
import { homedir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
|
||||
@@ -99,7 +98,7 @@ export const MOTHERSHIP_GRAPHQL_LINK = process.env.MOTHERSHIP_GRAPHQL_LINK
|
||||
? 'https://staging.mothership.unraid.net/ws'
|
||||
: 'https://mothership.unraid.net/ws';
|
||||
|
||||
export const PM2_HOME = process.env.PM2_HOME ?? join(homedir(), '.pm2');
|
||||
export const PM2_HOME = process.env.PM2_HOME ?? '/var/log/.pm2';
|
||||
export const PM2_PATH = join(import.meta.dirname, '../../', 'node_modules', 'pm2', 'bin', 'pm2');
|
||||
export const ECOSYSTEM_PATH = join(import.meta.dirname, '../../', 'ecosystem.config.json');
|
||||
export const PATHS_LOGS_DIR =
|
||||
@@ -108,3 +107,9 @@ export const PATHS_LOGS_FILE = process.env.PATHS_LOGS_FILE ?? '/var/log/graphql-
|
||||
|
||||
export const PATHS_CONFIG_MODULES =
|
||||
process.env.PATHS_CONFIG_MODULES ?? '/boot/config/plugins/dynamix.my.servers/configs';
|
||||
|
||||
export const PATHS_LOCAL_SESSION_FILE =
|
||||
process.env.PATHS_LOCAL_SESSION_FILE ?? '/var/run/unraid-api/local-session';
|
||||
|
||||
/** feature flag for the upcoming docker release */
|
||||
export const ENABLE_NEXT_DOCKER_RELEASE = process.env.ENABLE_NEXT_DOCKER_RELEASE === 'true';
|
||||
|
||||
@@ -36,6 +36,7 @@ export type IniSlot = {
|
||||
size: string;
|
||||
sizeSb: string;
|
||||
slots: string;
|
||||
spundown: string;
|
||||
status: SlotStatus;
|
||||
temp: string;
|
||||
type: SlotType;
|
||||
@@ -82,6 +83,7 @@ export const parse: StateFileToIniParserMap['disks'] = (disksIni) =>
|
||||
fsType: slot.fsType ?? null,
|
||||
format: slot.format === '-' ? null : slot.format,
|
||||
transport: slot.transport ?? null,
|
||||
isSpinning: slot.spundown ? slot.spundown === '0' : null,
|
||||
};
|
||||
// @TODO Zod Parse This
|
||||
return result;
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { CacheModule } from '@nestjs/cache-manager';
|
||||
import { Test } from '@nestjs/testing';
|
||||
|
||||
import { describe, expect, it } from 'vitest';
|
||||
@@ -9,7 +10,7 @@ describe('Module Dependencies Integration', () => {
|
||||
let module;
|
||||
try {
|
||||
module = await Test.createTestingModule({
|
||||
imports: [RestModule],
|
||||
imports: [CacheModule.register({ isGlobal: true }), RestModule],
|
||||
}).compile();
|
||||
|
||||
expect(module).toBeDefined();
|
||||
|
||||
@@ -14,6 +14,7 @@ import { AuthModule } from '@app/unraid-api/auth/auth.module.js';
|
||||
import { AuthenticationGuard } from '@app/unraid-api/auth/authentication.guard.js';
|
||||
import { LegacyConfigModule } from '@app/unraid-api/config/legacy-config.module.js';
|
||||
import { CronModule } from '@app/unraid-api/cron/cron.module.js';
|
||||
import { JobModule } from '@app/unraid-api/cron/job.module.js';
|
||||
import { GraphModule } from '@app/unraid-api/graph/graph.module.js';
|
||||
import { GlobalDepsModule } from '@app/unraid-api/plugin/global-deps.module.js';
|
||||
import { RestModule } from '@app/unraid-api/rest/rest.module.js';
|
||||
@@ -24,7 +25,7 @@ import { UnraidFileModifierModule } from '@app/unraid-api/unraid-file-modifier/u
|
||||
GlobalDepsModule,
|
||||
LegacyConfigModule,
|
||||
PubSubModule,
|
||||
ScheduleModule.forRoot(),
|
||||
JobModule,
|
||||
LoggerModule.forRoot({
|
||||
pinoHttp: {
|
||||
logger: apiLogger,
|
||||
@@ -34,6 +35,15 @@ import { UnraidFileModifierModule } from '@app/unraid-api/unraid-file-modifier/u
|
||||
req: () => undefined,
|
||||
res: () => undefined,
|
||||
},
|
||||
formatters: {
|
||||
log: (obj) => {
|
||||
// Map NestJS context to Pino context field for pino-pretty
|
||||
if (obj.context && !obj.logger) {
|
||||
return { ...obj, logger: obj.context };
|
||||
}
|
||||
return obj;
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
AuthModule,
|
||||
|
||||
@@ -2,15 +2,14 @@ import { Logger } from '@nestjs/common';
|
||||
import { readdir, readFile, writeFile } from 'fs/promises';
|
||||
import { join } from 'path';
|
||||
|
||||
import { Resource, Role } from '@unraid/shared/graphql.model.js';
|
||||
import { AuthAction, Resource, Role } from '@unraid/shared/graphql.model.js';
|
||||
import { ensureDir, ensureDirSync } from 'fs-extra';
|
||||
import { AuthActionVerb } from 'nest-authz';
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { environment } from '@app/environment.js';
|
||||
import { getters } from '@app/store/index.js';
|
||||
import { ApiKeyService } from '@app/unraid-api/auth/api-key.service.js';
|
||||
import { ApiKey, ApiKeyWithSecret } from '@app/unraid-api/graph/resolvers/api-key/api-key.model.js';
|
||||
import { ApiKey } from '@app/unraid-api/graph/resolvers/api-key/api-key.model.js';
|
||||
|
||||
// Mock the store and its modules
|
||||
vi.mock('@app/store/index.js', () => ({
|
||||
@@ -48,28 +47,14 @@ describe('ApiKeyService', () => {
|
||||
|
||||
const mockApiKey: ApiKey = {
|
||||
id: 'test-api-id',
|
||||
key: 'test-secret-key',
|
||||
name: 'Test API Key',
|
||||
description: 'Test API Key Description',
|
||||
roles: [Role.GUEST],
|
||||
permissions: [
|
||||
{
|
||||
resource: Resource.CONNECT,
|
||||
actions: [AuthActionVerb.READ],
|
||||
},
|
||||
],
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
const mockApiKeyWithSecret: ApiKeyWithSecret = {
|
||||
id: 'test-api-id',
|
||||
key: 'test-api-key',
|
||||
name: 'Test API Key',
|
||||
description: 'Test API Key Description',
|
||||
roles: [Role.GUEST],
|
||||
permissions: [
|
||||
{
|
||||
resource: Resource.CONNECT,
|
||||
actions: [AuthActionVerb.READ],
|
||||
actions: [AuthAction.READ_ANY],
|
||||
},
|
||||
],
|
||||
createdAt: new Date().toISOString(),
|
||||
@@ -130,21 +115,23 @@ describe('ApiKeyService', () => {
|
||||
});
|
||||
|
||||
describe('create', () => {
|
||||
it('should create ApiKeyWithSecret with generated key', async () => {
|
||||
it('should create ApiKey with generated key', async () => {
|
||||
const saveSpy = vi.spyOn(apiKeyService, 'saveApiKey').mockResolvedValue();
|
||||
const { key, id, description, roles } = mockApiKeyWithSecret;
|
||||
const { id, description, roles } = mockApiKey;
|
||||
const name = 'Test API Key';
|
||||
|
||||
const result = await apiKeyService.create({ name, description: description ?? '', roles });
|
||||
|
||||
expect(result).toMatchObject({
|
||||
id,
|
||||
key,
|
||||
name: name,
|
||||
description,
|
||||
roles,
|
||||
createdAt: expect.any(String),
|
||||
});
|
||||
expect(result.key).toBeDefined();
|
||||
expect(typeof result.key).toBe('string');
|
||||
expect(result.key.length).toBeGreaterThan(0);
|
||||
|
||||
expect(saveSpy).toHaveBeenCalledWith(result);
|
||||
});
|
||||
@@ -177,8 +164,8 @@ describe('ApiKeyService', () => {
|
||||
describe('findAll', () => {
|
||||
it('should return all API keys', async () => {
|
||||
vi.spyOn(apiKeyService, 'loadAllFromDisk').mockResolvedValue([
|
||||
mockApiKeyWithSecret,
|
||||
{ ...mockApiKeyWithSecret, id: 'second-id' },
|
||||
mockApiKey,
|
||||
{ ...mockApiKey, id: 'second-id' },
|
||||
]);
|
||||
await apiKeyService.onModuleInit();
|
||||
|
||||
@@ -191,7 +178,7 @@ describe('ApiKeyService', () => {
|
||||
permissions: [
|
||||
{
|
||||
resource: Resource.CONNECT,
|
||||
actions: [AuthActionVerb.READ],
|
||||
actions: [AuthAction.READ_ANY],
|
||||
},
|
||||
],
|
||||
};
|
||||
@@ -202,7 +189,7 @@ describe('ApiKeyService', () => {
|
||||
permissions: [
|
||||
{
|
||||
resource: Resource.CONNECT,
|
||||
actions: [AuthActionVerb.READ],
|
||||
actions: [AuthAction.READ_ANY],
|
||||
},
|
||||
],
|
||||
};
|
||||
@@ -219,17 +206,17 @@ describe('ApiKeyService', () => {
|
||||
|
||||
describe('findById', () => {
|
||||
it('should return API key by id when found', async () => {
|
||||
vi.spyOn(apiKeyService, 'loadAllFromDisk').mockResolvedValue([mockApiKeyWithSecret]);
|
||||
vi.spyOn(apiKeyService, 'loadAllFromDisk').mockResolvedValue([mockApiKey]);
|
||||
await apiKeyService.onModuleInit();
|
||||
|
||||
const result = await apiKeyService.findById(mockApiKeyWithSecret.id);
|
||||
const result = await apiKeyService.findById(mockApiKey.id);
|
||||
|
||||
expect(result).toMatchObject({ ...mockApiKey, createdAt: expect.any(String) });
|
||||
});
|
||||
|
||||
it('should return null if API key not found', async () => {
|
||||
vi.spyOn(apiKeyService, 'loadAllFromDisk').mockResolvedValue([
|
||||
{ ...mockApiKeyWithSecret, id: 'different-id' },
|
||||
{ ...mockApiKey, id: 'different-id' },
|
||||
]);
|
||||
await apiKeyService.onModuleInit();
|
||||
|
||||
@@ -239,21 +226,21 @@ describe('ApiKeyService', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('findByIdWithSecret', () => {
|
||||
it('should return API key with secret when found', async () => {
|
||||
vi.spyOn(apiKeyService, 'loadAllFromDisk').mockResolvedValue([mockApiKeyWithSecret]);
|
||||
describe('findById', () => {
|
||||
it('should return API key when found', async () => {
|
||||
vi.spyOn(apiKeyService, 'loadAllFromDisk').mockResolvedValue([mockApiKey]);
|
||||
await apiKeyService.onModuleInit();
|
||||
|
||||
const result = await apiKeyService.findByIdWithSecret(mockApiKeyWithSecret.id);
|
||||
const result = await apiKeyService.findById(mockApiKey.id);
|
||||
|
||||
expect(result).toEqual(mockApiKeyWithSecret);
|
||||
expect(result).toEqual(mockApiKey);
|
||||
});
|
||||
|
||||
it('should return null when API key not found', async () => {
|
||||
vi.spyOn(apiKeyService, 'loadAllFromDisk').mockResolvedValue([]);
|
||||
await apiKeyService.onModuleInit();
|
||||
|
||||
const result = await apiKeyService.findByIdWithSecret('non-existent-id');
|
||||
const result = await apiKeyService.findById('non-existent-id');
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
@@ -274,23 +261,20 @@ describe('ApiKeyService', () => {
|
||||
|
||||
describe('findByKey', () => {
|
||||
it('should return API key by key value when multiple keys exist', async () => {
|
||||
const differentKey = { ...mockApiKeyWithSecret, key: 'different-key' };
|
||||
vi.spyOn(apiKeyService, 'loadAllFromDisk').mockResolvedValue([
|
||||
differentKey,
|
||||
mockApiKeyWithSecret,
|
||||
]);
|
||||
const differentKey = { ...mockApiKey, key: 'different-key' };
|
||||
vi.spyOn(apiKeyService, 'loadAllFromDisk').mockResolvedValue([differentKey, mockApiKey]);
|
||||
|
||||
await apiKeyService.onModuleInit();
|
||||
|
||||
const result = await apiKeyService.findByKey(mockApiKeyWithSecret.key);
|
||||
const result = await apiKeyService.findByKey(mockApiKey.key);
|
||||
|
||||
expect(result).toEqual(mockApiKeyWithSecret);
|
||||
expect(result).toEqual(mockApiKey);
|
||||
});
|
||||
|
||||
it('should return null if key not found in any file', async () => {
|
||||
vi.spyOn(apiKeyService, 'loadAllFromDisk').mockResolvedValue([
|
||||
{ ...mockApiKeyWithSecret, key: 'different-key-1' },
|
||||
{ ...mockApiKeyWithSecret, key: 'different-key-2' },
|
||||
{ ...mockApiKey, key: 'different-key-1' },
|
||||
{ ...mockApiKey, key: 'different-key-2' },
|
||||
]);
|
||||
await apiKeyService.onModuleInit();
|
||||
|
||||
@@ -314,21 +298,21 @@ describe('ApiKeyService', () => {
|
||||
it('should save API key to file', async () => {
|
||||
vi.mocked(writeFile).mockResolvedValue(undefined);
|
||||
|
||||
await apiKeyService.saveApiKey(mockApiKeyWithSecret);
|
||||
await apiKeyService.saveApiKey(mockApiKey);
|
||||
|
||||
const writeFileCalls = vi.mocked(writeFile).mock.calls;
|
||||
|
||||
expect(writeFileCalls.length).toBe(1);
|
||||
|
||||
const [filePath, fileContent] = writeFileCalls[0] ?? [];
|
||||
const expectedPath = join(mockBasePath, `${mockApiKeyWithSecret.id}.json`);
|
||||
const expectedPath = join(mockBasePath, `${mockApiKey.id}.json`);
|
||||
|
||||
expect(filePath).toBe(expectedPath);
|
||||
|
||||
if (typeof fileContent === 'string') {
|
||||
const savedApiKey = JSON.parse(fileContent);
|
||||
|
||||
expect(savedApiKey).toEqual(mockApiKeyWithSecret);
|
||||
expect(savedApiKey).toEqual(mockApiKey);
|
||||
} else {
|
||||
throw new Error('File content should be a string');
|
||||
}
|
||||
@@ -337,16 +321,16 @@ describe('ApiKeyService', () => {
|
||||
it('should throw GraphQLError on write error', async () => {
|
||||
vi.mocked(writeFile).mockRejectedValue(new Error('Write failed'));
|
||||
|
||||
await expect(apiKeyService.saveApiKey(mockApiKeyWithSecret)).rejects.toThrow(
|
||||
await expect(apiKeyService.saveApiKey(mockApiKey)).rejects.toThrow(
|
||||
'Failed to save API key: Write failed'
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw GraphQLError on invalid API key structure', async () => {
|
||||
const invalidApiKey = {
|
||||
...mockApiKeyWithSecret,
|
||||
...mockApiKey,
|
||||
name: '', // Invalid: name cannot be empty
|
||||
} as ApiKeyWithSecret;
|
||||
} as ApiKey;
|
||||
|
||||
await expect(apiKeyService.saveApiKey(invalidApiKey)).rejects.toThrow(
|
||||
'Failed to save API key: Invalid data structure'
|
||||
@@ -355,10 +339,10 @@ describe('ApiKeyService', () => {
|
||||
|
||||
it('should throw GraphQLError when roles and permissions array is empty', async () => {
|
||||
const invalidApiKey = {
|
||||
...mockApiKeyWithSecret,
|
||||
...mockApiKey,
|
||||
permissions: [],
|
||||
roles: [],
|
||||
} as ApiKeyWithSecret;
|
||||
} as ApiKey;
|
||||
|
||||
await expect(apiKeyService.saveApiKey(invalidApiKey)).rejects.toThrow(
|
||||
'At least one of permissions or roles must be specified'
|
||||
@@ -367,9 +351,9 @@ describe('ApiKeyService', () => {
|
||||
});
|
||||
|
||||
describe('update', () => {
|
||||
let updateMockApiKey: ApiKeyWithSecret;
|
||||
let updateMockApiKey: ApiKey;
|
||||
|
||||
beforeEach(() => {
|
||||
beforeEach(async () => {
|
||||
// Create a fresh copy of the mock data for update tests
|
||||
updateMockApiKey = {
|
||||
id: 'test-api-id',
|
||||
@@ -380,15 +364,17 @@ describe('ApiKeyService', () => {
|
||||
permissions: [
|
||||
{
|
||||
resource: Resource.CONNECT,
|
||||
actions: [AuthActionVerb.READ],
|
||||
actions: [AuthAction.READ_ANY],
|
||||
},
|
||||
],
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
vi.spyOn(apiKeyService, 'loadAllFromDisk').mockResolvedValue([updateMockApiKey]);
|
||||
// Initialize the memoryApiKeys with the test data
|
||||
// The loadAllFromDisk mock will be called by onModuleInit
|
||||
vi.spyOn(apiKeyService, 'loadAllFromDisk').mockResolvedValue([{ ...updateMockApiKey }]);
|
||||
vi.spyOn(apiKeyService, 'saveApiKey').mockResolvedValue();
|
||||
apiKeyService.onModuleInit();
|
||||
await apiKeyService.onModuleInit();
|
||||
});
|
||||
|
||||
it('should update name and description', async () => {
|
||||
@@ -400,7 +386,6 @@ describe('ApiKeyService', () => {
|
||||
name: updatedName,
|
||||
description: updatedDescription,
|
||||
});
|
||||
|
||||
expect(result.name).toBe(updatedName);
|
||||
expect(result.description).toBe(updatedDescription);
|
||||
expect(result.roles).toEqual(updateMockApiKey.roles);
|
||||
@@ -427,7 +412,7 @@ describe('ApiKeyService', () => {
|
||||
const updatedPermissions = [
|
||||
{
|
||||
resource: Resource.CONNECT,
|
||||
actions: [AuthActionVerb.READ, AuthActionVerb.UPDATE],
|
||||
actions: [AuthAction.READ_ANY, AuthAction.UPDATE_ANY],
|
||||
},
|
||||
];
|
||||
|
||||
@@ -474,7 +459,7 @@ describe('ApiKeyService', () => {
|
||||
});
|
||||
|
||||
describe('loadAllFromDisk', () => {
|
||||
let loadMockApiKey: ApiKeyWithSecret;
|
||||
let loadMockApiKey: ApiKey;
|
||||
|
||||
beforeEach(() => {
|
||||
// Create a fresh copy of the mock data for loadAllFromDisk tests
|
||||
@@ -487,7 +472,7 @@ describe('ApiKeyService', () => {
|
||||
permissions: [
|
||||
{
|
||||
resource: Resource.CONNECT,
|
||||
actions: [AuthActionVerb.READ],
|
||||
actions: [AuthAction.READ_ANY],
|
||||
},
|
||||
],
|
||||
createdAt: new Date().toISOString(),
|
||||
@@ -550,15 +535,62 @@ describe('ApiKeyService', () => {
|
||||
key: 'unique-key',
|
||||
});
|
||||
});
|
||||
|
||||
it('should normalize permission actions to lowercase when loading from disk', async () => {
|
||||
const apiKeyWithMixedCaseActions = {
|
||||
...loadMockApiKey,
|
||||
permissions: [
|
||||
{
|
||||
resource: Resource.DOCKER,
|
||||
actions: ['READ:ANY', 'Update:Any', 'create:any', 'DELETE:ANY'], // Mixed case actions
|
||||
},
|
||||
{
|
||||
resource: Resource.ARRAY,
|
||||
actions: ['Read:Any'], // Mixed case
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
vi.mocked(readdir).mockResolvedValue(['key1.json'] as any);
|
||||
vi.mocked(readFile).mockResolvedValueOnce(JSON.stringify(apiKeyWithMixedCaseActions));
|
||||
|
||||
const result = await apiKeyService.loadAllFromDisk();
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
// All actions should be normalized to lowercase
|
||||
expect(result[0].permissions[0].actions).toEqual([
|
||||
AuthAction.READ_ANY,
|
||||
AuthAction.UPDATE_ANY,
|
||||
AuthAction.CREATE_ANY,
|
||||
AuthAction.DELETE_ANY,
|
||||
]);
|
||||
expect(result[0].permissions[1].actions).toEqual([AuthAction.READ_ANY]);
|
||||
});
|
||||
|
||||
it('should normalize roles to uppercase when loading from disk', async () => {
|
||||
const apiKeyWithMixedCaseRoles = {
|
||||
...loadMockApiKey,
|
||||
roles: ['admin', 'Viewer', 'CONNECT'], // Mixed case roles
|
||||
};
|
||||
|
||||
vi.mocked(readdir).mockResolvedValue(['key1.json'] as any);
|
||||
vi.mocked(readFile).mockResolvedValueOnce(JSON.stringify(apiKeyWithMixedCaseRoles));
|
||||
|
||||
const result = await apiKeyService.loadAllFromDisk();
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
// All roles should be normalized to uppercase
|
||||
expect(result[0].roles).toEqual(['ADMIN', 'VIEWER', 'CONNECT']);
|
||||
});
|
||||
});
|
||||
|
||||
describe('loadApiKeyFile', () => {
|
||||
it('should load and parse a valid API key file', async () => {
|
||||
vi.mocked(readFile).mockResolvedValue(JSON.stringify(mockApiKeyWithSecret));
|
||||
vi.mocked(readFile).mockResolvedValue(JSON.stringify(mockApiKey));
|
||||
|
||||
const result = await apiKeyService['loadApiKeyFile']('test.json');
|
||||
|
||||
expect(result).toEqual(mockApiKeyWithSecret);
|
||||
expect(result).toEqual(mockApiKey);
|
||||
expect(readFile).toHaveBeenCalledWith(join(mockBasePath, 'test.json'), 'utf8');
|
||||
});
|
||||
|
||||
@@ -592,7 +624,7 @@ describe('ApiKeyService', () => {
|
||||
expect.stringContaining('Error validating API key file test.json')
|
||||
);
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
expect.stringContaining('An instance of ApiKeyWithSecret has failed the validation')
|
||||
expect.stringContaining('An instance of ApiKey has failed the validation')
|
||||
);
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(expect.stringContaining('property key'));
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(expect.stringContaining('property id'));
|
||||
@@ -603,5 +635,150 @@ describe('ApiKeyService', () => {
|
||||
expect.stringContaining('property permissions')
|
||||
);
|
||||
});
|
||||
|
||||
it('should normalize legacy action formats when loading API keys', async () => {
|
||||
const legacyApiKey = {
|
||||
...mockApiKey,
|
||||
permissions: [
|
||||
{
|
||||
resource: Resource.DOCKER,
|
||||
actions: ['create', 'READ', 'Update', 'DELETE'], // Mixed case legacy verbs
|
||||
},
|
||||
{
|
||||
resource: Resource.VMS,
|
||||
actions: ['READ_ANY', 'UPDATE_OWN'], // GraphQL enum style
|
||||
},
|
||||
{
|
||||
resource: Resource.CONNECT,
|
||||
actions: ['read:own', 'update:any'], // Casbin colon format
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
vi.mocked(readFile).mockResolvedValue(JSON.stringify(legacyApiKey));
|
||||
|
||||
const result = await apiKeyService['loadApiKeyFile']('legacy.json');
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result?.permissions).toEqual([
|
||||
{
|
||||
resource: Resource.DOCKER,
|
||||
actions: [
|
||||
AuthAction.CREATE_ANY,
|
||||
AuthAction.READ_ANY,
|
||||
AuthAction.UPDATE_ANY,
|
||||
AuthAction.DELETE_ANY,
|
||||
],
|
||||
},
|
||||
{
|
||||
resource: Resource.VMS,
|
||||
actions: [AuthAction.READ_ANY, AuthAction.UPDATE_OWN],
|
||||
},
|
||||
{
|
||||
resource: Resource.CONNECT,
|
||||
actions: [AuthAction.READ_OWN, AuthAction.UPDATE_ANY],
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('convertRolesStringArrayToRoles', () => {
|
||||
beforeEach(async () => {
|
||||
vi.mocked(getters.paths).mockReturnValue({
|
||||
'auth-keys': mockBasePath,
|
||||
} as ReturnType<typeof getters.paths>);
|
||||
|
||||
// Create a fresh mock logger for each test
|
||||
mockLogger = {
|
||||
log: vi.fn(),
|
||||
error: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
verbose: vi.fn(),
|
||||
};
|
||||
|
||||
apiKeyService = new ApiKeyService();
|
||||
// Replace the logger with our mock
|
||||
(apiKeyService as any).logger = mockLogger;
|
||||
});
|
||||
|
||||
it('should convert uppercase role strings to Role enum values', () => {
|
||||
const roles = ['ADMIN', 'CONNECT', 'VIEWER'];
|
||||
const result = apiKeyService.convertRolesStringArrayToRoles(roles);
|
||||
|
||||
expect(result).toEqual([Role.ADMIN, Role.CONNECT, Role.VIEWER]);
|
||||
});
|
||||
|
||||
it('should convert lowercase role strings to Role enum values', () => {
|
||||
const roles = ['admin', 'connect', 'guest'];
|
||||
const result = apiKeyService.convertRolesStringArrayToRoles(roles);
|
||||
|
||||
expect(result).toEqual([Role.ADMIN, Role.CONNECT, Role.GUEST]);
|
||||
});
|
||||
|
||||
it('should convert mixed case role strings to Role enum values', () => {
|
||||
const roles = ['Admin', 'CoNnEcT', 'ViEwEr'];
|
||||
const result = apiKeyService.convertRolesStringArrayToRoles(roles);
|
||||
|
||||
expect(result).toEqual([Role.ADMIN, Role.CONNECT, Role.VIEWER]);
|
||||
});
|
||||
|
||||
it('should handle roles with whitespace', () => {
|
||||
const roles = [' ADMIN ', ' CONNECT ', 'VIEWER '];
|
||||
const result = apiKeyService.convertRolesStringArrayToRoles(roles);
|
||||
|
||||
expect(result).toEqual([Role.ADMIN, Role.CONNECT, Role.VIEWER]);
|
||||
});
|
||||
|
||||
it('should filter out invalid roles and warn', () => {
|
||||
const roles = ['ADMIN', 'INVALID_ROLE', 'VIEWER', 'ANOTHER_INVALID'];
|
||||
const result = apiKeyService.convertRolesStringArrayToRoles(roles);
|
||||
|
||||
expect(result).toEqual([Role.ADMIN, Role.VIEWER]);
|
||||
expect(mockLogger.warn).toHaveBeenCalledWith(
|
||||
'Ignoring invalid roles: INVALID_ROLE, ANOTHER_INVALID'
|
||||
);
|
||||
});
|
||||
|
||||
it('should return empty array when all roles are invalid', () => {
|
||||
const roles = ['INVALID1', 'INVALID2', 'INVALID3'];
|
||||
const result = apiKeyService.convertRolesStringArrayToRoles(roles);
|
||||
|
||||
expect(result).toEqual([]);
|
||||
expect(mockLogger.warn).toHaveBeenCalledWith(
|
||||
'Ignoring invalid roles: INVALID1, INVALID2, INVALID3'
|
||||
);
|
||||
});
|
||||
|
||||
it('should return empty array for empty input', () => {
|
||||
const result = apiKeyService.convertRolesStringArrayToRoles([]);
|
||||
|
||||
expect(result).toEqual([]);
|
||||
expect(mockLogger.warn).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle all valid Role enum values', () => {
|
||||
const roles = Object.values(Role);
|
||||
const result = apiKeyService.convertRolesStringArrayToRoles(roles);
|
||||
|
||||
expect(result).toEqual(Object.values(Role));
|
||||
expect(mockLogger.warn).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should deduplicate roles', () => {
|
||||
const roles = ['ADMIN', 'admin', 'ADMIN', 'VIEWER', 'viewer'];
|
||||
const result = apiKeyService.convertRolesStringArrayToRoles(roles);
|
||||
|
||||
// Note: Current implementation doesn't deduplicate, but this test documents the behavior
|
||||
expect(result).toEqual([Role.ADMIN, Role.ADMIN, Role.ADMIN, Role.VIEWER, Role.VIEWER]);
|
||||
});
|
||||
|
||||
it('should handle mixed valid and invalid roles correctly', () => {
|
||||
const roles = ['ADMIN', 'invalid', 'CONNECT', 'bad_role', 'GUEST', 'VIEWER'];
|
||||
const result = apiKeyService.convertRolesStringArrayToRoles(roles);
|
||||
|
||||
expect(result).toEqual([Role.ADMIN, Role.CONNECT, Role.GUEST, Role.VIEWER]);
|
||||
expect(mockLogger.warn).toHaveBeenCalledWith('Ignoring invalid roles: invalid, bad_role');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -3,12 +3,12 @@ import crypto from 'crypto';
|
||||
import { readdir, readFile, unlink, writeFile } from 'fs/promises';
|
||||
import { join } from 'path';
|
||||
|
||||
import { Resource, Role } from '@unraid/shared/graphql.model.js';
|
||||
import { AuthAction, Resource, Role } from '@unraid/shared/graphql.model.js';
|
||||
import { normalizeLegacyActions } from '@unraid/shared/util/permissions.js';
|
||||
import { watch } from 'chokidar';
|
||||
import { ValidationError } from 'class-validator';
|
||||
import { ensureDirSync } from 'fs-extra';
|
||||
import { GraphQLError } from 'graphql';
|
||||
import { AuthActionVerb } from 'nest-authz';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
|
||||
import { environment } from '@app/environment.js';
|
||||
@@ -16,7 +16,6 @@ import { getters } from '@app/store/index.js';
|
||||
import {
|
||||
AddPermissionInput,
|
||||
ApiKey,
|
||||
ApiKeyWithSecret,
|
||||
Permission,
|
||||
} from '@app/unraid-api/graph/resolvers/api-key/api-key.model.js';
|
||||
import { validateObject } from '@app/unraid-api/graph/resolvers/validation.utils.js';
|
||||
@@ -26,7 +25,7 @@ import { batchProcess } from '@app/utils.js';
|
||||
export class ApiKeyService implements OnModuleInit {
|
||||
private readonly logger = new Logger(ApiKeyService.name);
|
||||
protected readonly basePath: string;
|
||||
protected memoryApiKeys: Array<ApiKeyWithSecret> = [];
|
||||
protected memoryApiKeys: Array<ApiKey> = [];
|
||||
private static readonly validRoles: Set<Role> = new Set(Object.values(Role));
|
||||
|
||||
constructor() {
|
||||
@@ -36,23 +35,31 @@ export class ApiKeyService implements OnModuleInit {
|
||||
|
||||
async onModuleInit() {
|
||||
this.memoryApiKeys = await this.loadAllFromDisk();
|
||||
await this.cleanupLegacyInternalKeys();
|
||||
if (environment.IS_MAIN_PROCESS) {
|
||||
this.setupWatch();
|
||||
}
|
||||
}
|
||||
|
||||
public convertApiKeyWithSecretToApiKey(key: ApiKeyWithSecret): ApiKey {
|
||||
const { key: _, ...rest } = key;
|
||||
return rest;
|
||||
private async cleanupLegacyInternalKeys() {
|
||||
const legacyNames = ['CliInternal', 'ConnectInternal'];
|
||||
const keysToDelete = this.memoryApiKeys.filter((key) => legacyNames.includes(key.name));
|
||||
|
||||
if (keysToDelete.length > 0) {
|
||||
try {
|
||||
await this.deleteApiKeys(keysToDelete.map((key) => key.id));
|
||||
this.logger.log(`Cleaned up ${keysToDelete.length} legacy internal keys`);
|
||||
} catch (error) {
|
||||
this.logger.debug(
|
||||
error,
|
||||
`Failed to delete legacy internal keys: ${keysToDelete.map((key) => key.name).join(', ')}`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public async findAll(): Promise<ApiKey[]> {
|
||||
return Promise.all(
|
||||
this.memoryApiKeys.map(async (key) => {
|
||||
const keyWithoutSecret = this.convertApiKeyWithSecretToApiKey(key);
|
||||
return keyWithoutSecret;
|
||||
})
|
||||
);
|
||||
return this.memoryApiKeys;
|
||||
}
|
||||
|
||||
private setupWatch() {
|
||||
@@ -76,17 +83,18 @@ export class ApiKeyService implements OnModuleInit {
|
||||
public getAllValidPermissions(): Permission[] {
|
||||
return Object.values(Resource).map((res) => ({
|
||||
resource: res,
|
||||
actions: Object.values(AuthActionVerb),
|
||||
actions: Object.values(AuthAction),
|
||||
}));
|
||||
}
|
||||
|
||||
public convertPermissionsStringArrayToPermissions(permissions: string[]): Permission[] {
|
||||
return permissions.reduce<Array<Permission>>((acc, permission) => {
|
||||
const [resource, action] = permission.split(':');
|
||||
const [resource, ...actionParts] = permission.split(':');
|
||||
const action = actionParts.join(':'); // Handle actions like "read:any"
|
||||
const validatedResource = Resource[resource.toUpperCase() as keyof typeof Resource] ?? null;
|
||||
// Pull the actual enum value from the graphql schema
|
||||
const validatedAction =
|
||||
AuthActionVerb[action.toUpperCase() as keyof typeof AuthActionVerb] ?? null;
|
||||
AuthAction[action.toUpperCase().replace(':', '_') as keyof typeof AuthAction] ?? null;
|
||||
if (validatedAction && validatedResource) {
|
||||
const existingEntry = acc.find((p) => p.resource === validatedResource);
|
||||
if (existingEntry) {
|
||||
@@ -102,9 +110,25 @@ export class ApiKeyService implements OnModuleInit {
|
||||
}
|
||||
|
||||
public convertRolesStringArrayToRoles(roles: string[]): Role[] {
|
||||
return roles
|
||||
.map((roleStr) => Role[roleStr.trim().toUpperCase() as keyof typeof Role])
|
||||
.filter(Boolean);
|
||||
const validRoles: Role[] = [];
|
||||
const invalidRoles: string[] = [];
|
||||
|
||||
for (const roleStr of roles) {
|
||||
const upperRole = roleStr.trim().toUpperCase();
|
||||
const role = Role[upperRole as keyof typeof Role];
|
||||
|
||||
if (role && ApiKeyService.validRoles.has(role)) {
|
||||
validRoles.push(role);
|
||||
} else {
|
||||
invalidRoles.push(roleStr);
|
||||
}
|
||||
}
|
||||
|
||||
if (invalidRoles.length > 0) {
|
||||
this.logger.warn(`Ignoring invalid roles: ${invalidRoles.join(', ')}`);
|
||||
}
|
||||
|
||||
return validRoles;
|
||||
}
|
||||
|
||||
async create({
|
||||
@@ -119,7 +143,7 @@ export class ApiKeyService implements OnModuleInit {
|
||||
roles?: Role[];
|
||||
permissions?: Permission[] | AddPermissionInput[];
|
||||
overwrite?: boolean;
|
||||
}): Promise<ApiKeyWithSecret> {
|
||||
}): Promise<ApiKey> {
|
||||
const trimmedName = name?.trim();
|
||||
const sanitizedName = this.sanitizeName(trimmedName);
|
||||
|
||||
@@ -139,7 +163,7 @@ export class ApiKeyService implements OnModuleInit {
|
||||
if (!overwrite && existingKey) {
|
||||
return existingKey;
|
||||
}
|
||||
const apiKey: Partial<ApiKeyWithSecret> = {
|
||||
const apiKey: Partial<ApiKey> = {
|
||||
id: uuidv4(),
|
||||
key: this.generateApiKey(),
|
||||
name: sanitizedName,
|
||||
@@ -152,18 +176,18 @@ export class ApiKeyService implements OnModuleInit {
|
||||
// Update createdAt date
|
||||
apiKey.createdAt = new Date().toISOString();
|
||||
|
||||
await this.saveApiKey(apiKey as ApiKeyWithSecret);
|
||||
await this.saveApiKey(apiKey as ApiKey);
|
||||
|
||||
return apiKey as ApiKeyWithSecret;
|
||||
return apiKey as ApiKey;
|
||||
}
|
||||
|
||||
async loadAllFromDisk(): Promise<ApiKeyWithSecret[]> {
|
||||
async loadAllFromDisk(): Promise<ApiKey[]> {
|
||||
const files = await readdir(this.basePath).catch((error) => {
|
||||
this.logger.error(`Failed to read API key directory: ${error}`);
|
||||
throw new Error('Failed to list API keys');
|
||||
});
|
||||
|
||||
const apiKeys: ApiKeyWithSecret[] = [];
|
||||
const apiKeys: ApiKey[] = [];
|
||||
const jsonFiles = files.filter((file) => file.includes('.json'));
|
||||
|
||||
for (const file of jsonFiles) {
|
||||
@@ -186,7 +210,7 @@ export class ApiKeyService implements OnModuleInit {
|
||||
* @param file The file to load
|
||||
* @returns The API key with secret
|
||||
*/
|
||||
private async loadApiKeyFile(file: string): Promise<ApiKeyWithSecret | null> {
|
||||
private async loadApiKeyFile(file: string): Promise<ApiKey | null> {
|
||||
try {
|
||||
const content = await readFile(join(this.basePath, file), 'utf8');
|
||||
|
||||
@@ -196,7 +220,17 @@ export class ApiKeyService implements OnModuleInit {
|
||||
if (parsedContent.roles) {
|
||||
parsedContent.roles = parsedContent.roles.map((role: string) => role.toUpperCase());
|
||||
}
|
||||
return await validateObject(ApiKeyWithSecret, parsedContent);
|
||||
|
||||
// Normalize permission actions to AuthAction enum values
|
||||
// Uses shared helper to handle all legacy formats
|
||||
if (parsedContent.permissions) {
|
||||
parsedContent.permissions = parsedContent.permissions.map((permission: any) => ({
|
||||
...permission,
|
||||
actions: normalizeLegacyActions(permission.actions || []),
|
||||
}));
|
||||
}
|
||||
|
||||
return await validateObject(ApiKey, parsedContent);
|
||||
} catch (error) {
|
||||
if (error instanceof SyntaxError) {
|
||||
this.logger.error(`Corrupted key file: ${file}`);
|
||||
@@ -216,12 +250,7 @@ export class ApiKeyService implements OnModuleInit {
|
||||
|
||||
async findById(id: string): Promise<ApiKey | null> {
|
||||
try {
|
||||
const key = this.findByField('id', id);
|
||||
|
||||
if (key) {
|
||||
return this.convertApiKeyWithSecretToApiKey(key);
|
||||
}
|
||||
return null;
|
||||
return this.findByField('id', id);
|
||||
} catch (error) {
|
||||
if (error instanceof ValidationError) {
|
||||
this.logApiKeyValidationError(id, error);
|
||||
@@ -231,17 +260,13 @@ export class ApiKeyService implements OnModuleInit {
|
||||
}
|
||||
}
|
||||
|
||||
public findByIdWithSecret(id: string): ApiKeyWithSecret | null {
|
||||
return this.findByField('id', id);
|
||||
}
|
||||
|
||||
public findByField(field: keyof ApiKeyWithSecret, value: string): ApiKeyWithSecret | null {
|
||||
public findByField(field: keyof ApiKey, value: string): ApiKey | null {
|
||||
if (!value) return null;
|
||||
|
||||
return this.memoryApiKeys.find((k) => k[field] === value) ?? null;
|
||||
}
|
||||
|
||||
findByKey(key: string): ApiKeyWithSecret | null {
|
||||
findByKey(key: string): ApiKey | null {
|
||||
return this.findByField('key', key);
|
||||
}
|
||||
|
||||
@@ -254,9 +279,9 @@ export class ApiKeyService implements OnModuleInit {
|
||||
Errors: ${JSON.stringify(error.constraints, null, 2)}`);
|
||||
}
|
||||
|
||||
public async saveApiKey(apiKey: ApiKeyWithSecret): Promise<void> {
|
||||
public async saveApiKey(apiKey: ApiKey): Promise<void> {
|
||||
try {
|
||||
const validatedApiKey = await validateObject(ApiKeyWithSecret, apiKey);
|
||||
const validatedApiKey = await validateObject(ApiKey, apiKey);
|
||||
if (!validatedApiKey.permissions?.length && !validatedApiKey.roles?.length) {
|
||||
throw new GraphQLError('At least one of permissions or roles must be specified');
|
||||
}
|
||||
@@ -266,7 +291,7 @@ export class ApiKeyService implements OnModuleInit {
|
||||
.reduce((acc, key) => {
|
||||
acc[key] = validatedApiKey[key];
|
||||
return acc;
|
||||
}, {} as ApiKeyWithSecret);
|
||||
}, {} as ApiKey);
|
||||
|
||||
await writeFile(
|
||||
join(this.basePath, `${validatedApiKey.id}.json`),
|
||||
@@ -334,8 +359,8 @@ export class ApiKeyService implements OnModuleInit {
|
||||
description?: string;
|
||||
roles?: Role[];
|
||||
permissions?: Permission[] | AddPermissionInput[];
|
||||
}): Promise<ApiKeyWithSecret> {
|
||||
const apiKey = this.findByIdWithSecret(id);
|
||||
}): Promise<ApiKey> {
|
||||
const apiKey = await this.findById(id);
|
||||
if (!apiKey) {
|
||||
throw new GraphQLError('API key not found');
|
||||
}
|
||||
@@ -345,13 +370,15 @@ export class ApiKeyService implements OnModuleInit {
|
||||
if (description !== undefined) {
|
||||
apiKey.description = description;
|
||||
}
|
||||
if (roles) {
|
||||
if (roles !== undefined) {
|
||||
// Handle both empty array (to clear roles) and populated array
|
||||
if (roles.some((role) => !ApiKeyService.validRoles.has(role))) {
|
||||
throw new GraphQLError('Invalid role specified');
|
||||
}
|
||||
apiKey.roles = roles;
|
||||
}
|
||||
if (permissions) {
|
||||
if (permissions !== undefined) {
|
||||
// Handle both empty array (to clear permissions) and populated array
|
||||
apiKey.permissions = permissions;
|
||||
}
|
||||
await this.saveApiKey(apiKey);
|
||||
|
||||
@@ -11,13 +11,19 @@ import { BASE_POLICY, CASBIN_MODEL } from '@app/unraid-api/auth/casbin/index.js'
|
||||
import { CookieService, SESSION_COOKIE_CONFIG } from '@app/unraid-api/auth/cookie.service.js';
|
||||
import { UserCookieStrategy } from '@app/unraid-api/auth/cookie.strategy.js';
|
||||
import { ServerHeaderStrategy } from '@app/unraid-api/auth/header.strategy.js';
|
||||
import { AdminKeyService } from '@app/unraid-api/cli/admin-key.service.js';
|
||||
import { LocalSessionLifecycleService } from '@app/unraid-api/auth/local-session-lifecycle.service.js';
|
||||
import { LocalSessionService } from '@app/unraid-api/auth/local-session.service.js';
|
||||
import { LocalSessionStrategy } from '@app/unraid-api/auth/local-session.strategy.js';
|
||||
import { getRequest } from '@app/utils.js';
|
||||
|
||||
@Module({
|
||||
imports: [
|
||||
PassportModule.register({
|
||||
defaultStrategy: [ServerHeaderStrategy.key, UserCookieStrategy.key],
|
||||
defaultStrategy: [
|
||||
ServerHeaderStrategy.key,
|
||||
LocalSessionStrategy.key,
|
||||
UserCookieStrategy.key,
|
||||
],
|
||||
}),
|
||||
CasbinModule,
|
||||
AuthZModule.register({
|
||||
@@ -51,10 +57,12 @@ import { getRequest } from '@app/utils.js';
|
||||
providers: [
|
||||
AuthService,
|
||||
ApiKeyService,
|
||||
AdminKeyService,
|
||||
ServerHeaderStrategy,
|
||||
LocalSessionStrategy,
|
||||
UserCookieStrategy,
|
||||
CookieService,
|
||||
LocalSessionService,
|
||||
LocalSessionLifecycleService,
|
||||
{
|
||||
provide: SESSION_COOKIE_CONFIG,
|
||||
useValue: CookieService.defaultOpts(),
|
||||
@@ -65,8 +73,11 @@ import { getRequest } from '@app/utils.js';
|
||||
ApiKeyService,
|
||||
PassportModule,
|
||||
ServerHeaderStrategy,
|
||||
LocalSessionStrategy,
|
||||
UserCookieStrategy,
|
||||
CookieService,
|
||||
LocalSessionService,
|
||||
LocalSessionLifecycleService,
|
||||
AuthZModule,
|
||||
],
|
||||
})
|
||||
|
||||
@@ -1,14 +1,15 @@
|
||||
import { UnauthorizedException } from '@nestjs/common';
|
||||
|
||||
import { Resource, Role } from '@unraid/shared/graphql.model.js';
|
||||
import { AuthAction, Resource, Role } from '@unraid/shared/graphql.model.js';
|
||||
import { newEnforcer } from 'casbin';
|
||||
import { AuthActionVerb, AuthZService } from 'nest-authz';
|
||||
import { AuthZService } from 'nest-authz';
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { ApiKeyService } from '@app/unraid-api/auth/api-key.service.js';
|
||||
import { AuthService } from '@app/unraid-api/auth/auth.service.js';
|
||||
import { CookieService } from '@app/unraid-api/auth/cookie.service.js';
|
||||
import { ApiKey, ApiKeyWithSecret } from '@app/unraid-api/graph/resolvers/api-key/api-key.model.js';
|
||||
import { LocalSessionService } from '@app/unraid-api/auth/local-session.service.js';
|
||||
import { ApiKey } from '@app/unraid-api/graph/resolvers/api-key/api-key.model.js';
|
||||
import { UserAccount } from '@app/unraid-api/graph/user/user.model.js';
|
||||
import { FastifyRequest } from '@app/unraid-api/types/fastify.js';
|
||||
|
||||
@@ -17,17 +18,9 @@ describe('AuthService', () => {
|
||||
let apiKeyService: ApiKeyService;
|
||||
let authzService: AuthZService;
|
||||
let cookieService: CookieService;
|
||||
let localSessionService: LocalSessionService;
|
||||
|
||||
const mockApiKey: ApiKey = {
|
||||
id: '10f356da-1e9e-43b8-9028-a26a645539a6',
|
||||
name: 'Test API Key',
|
||||
description: 'Test API Key Description',
|
||||
roles: [Role.GUEST, Role.CONNECT],
|
||||
createdAt: new Date().toISOString(),
|
||||
permissions: [],
|
||||
};
|
||||
|
||||
const mockApiKeyWithSecret: ApiKeyWithSecret = {
|
||||
id: 'test-api-id',
|
||||
key: 'test-api-key',
|
||||
name: 'Test API Key',
|
||||
@@ -36,7 +29,7 @@ describe('AuthService', () => {
|
||||
permissions: [
|
||||
{
|
||||
resource: Resource.CONNECT,
|
||||
actions: [AuthActionVerb.READ.toUpperCase()],
|
||||
actions: [AuthAction.READ_ANY],
|
||||
},
|
||||
],
|
||||
createdAt: new Date().toISOString(),
|
||||
@@ -64,7 +57,10 @@ describe('AuthService', () => {
|
||||
apiKeyService = new ApiKeyService();
|
||||
authzService = new AuthZService(enforcer);
|
||||
cookieService = new CookieService();
|
||||
authService = new AuthService(cookieService, apiKeyService, authzService);
|
||||
localSessionService = {
|
||||
validateLocalSession: vi.fn(),
|
||||
} as any;
|
||||
authService = new AuthService(cookieService, apiKeyService, localSessionService, authzService);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
@@ -98,6 +94,43 @@ describe('AuthService', () => {
|
||||
);
|
||||
});
|
||||
|
||||
it('should validate API key with only permissions (no roles)', async () => {
|
||||
const apiKeyWithOnlyPermissions: ApiKey = {
|
||||
...mockApiKey,
|
||||
roles: [], // No roles, only permissions
|
||||
permissions: [
|
||||
{
|
||||
resource: Resource.DOCKER,
|
||||
actions: [AuthAction.READ_ANY, AuthAction.UPDATE_ANY],
|
||||
},
|
||||
{
|
||||
resource: Resource.VMS,
|
||||
actions: [AuthAction.READ_ANY],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
vi.spyOn(apiKeyService, 'findByKey').mockResolvedValue(apiKeyWithOnlyPermissions);
|
||||
vi.spyOn(authService, 'syncApiKeyRoles').mockResolvedValue(undefined);
|
||||
vi.spyOn(authService, 'syncApiKeyPermissions').mockResolvedValue(undefined);
|
||||
vi.spyOn(authzService, 'getRolesForUser').mockResolvedValue([]);
|
||||
|
||||
const result = await authService.validateApiKeyCasbin('test-api-key');
|
||||
|
||||
expect(result).toEqual({
|
||||
id: apiKeyWithOnlyPermissions.id,
|
||||
name: apiKeyWithOnlyPermissions.name,
|
||||
description: apiKeyWithOnlyPermissions.description,
|
||||
roles: [],
|
||||
permissions: apiKeyWithOnlyPermissions.permissions,
|
||||
});
|
||||
expect(authService.syncApiKeyRoles).toHaveBeenCalledWith(apiKeyWithOnlyPermissions.id, []);
|
||||
expect(authService.syncApiKeyPermissions).toHaveBeenCalledWith(
|
||||
apiKeyWithOnlyPermissions.id,
|
||||
apiKeyWithOnlyPermissions.permissions
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw UnauthorizedException when session user is missing', async () => {
|
||||
vi.spyOn(cookieService, 'hasValidAuthCookie').mockResolvedValue(true);
|
||||
vi.spyOn(authService, 'getSessionUser').mockResolvedValue(null as unknown as UserAccount);
|
||||
@@ -195,10 +228,6 @@ describe('AuthService', () => {
|
||||
};
|
||||
|
||||
vi.spyOn(apiKeyService, 'findById').mockResolvedValue(mockApiKeyWithoutRole);
|
||||
vi.spyOn(apiKeyService, 'findByIdWithSecret').mockResolvedValue({
|
||||
...mockApiKeyWithSecret,
|
||||
roles: [Role.ADMIN],
|
||||
});
|
||||
vi.spyOn(apiKeyService, 'saveApiKey').mockResolvedValue();
|
||||
vi.spyOn(authzService, 'addRoleForUser').mockResolvedValue(true);
|
||||
|
||||
@@ -206,9 +235,8 @@ describe('AuthService', () => {
|
||||
|
||||
expect(result).toBe(true);
|
||||
expect(apiKeyService.findById).toHaveBeenCalledWith(apiKeyId);
|
||||
expect(apiKeyService.findByIdWithSecret).toHaveBeenCalledWith(apiKeyId);
|
||||
expect(apiKeyService.saveApiKey).toHaveBeenCalledWith({
|
||||
...mockApiKeyWithSecret,
|
||||
...mockApiKeyWithoutRole,
|
||||
roles: [Role.ADMIN, role],
|
||||
});
|
||||
expect(authzService.addRoleForUser).toHaveBeenCalledWith(apiKeyId, role);
|
||||
@@ -226,13 +254,8 @@ describe('AuthService', () => {
|
||||
describe('removeRoleFromApiKey', () => {
|
||||
it('should remove role from API key', async () => {
|
||||
const apiKey = { ...mockApiKey, roles: [Role.ADMIN, Role.GUEST] };
|
||||
const apiKeyWithSecret = {
|
||||
...mockApiKeyWithSecret,
|
||||
roles: [Role.ADMIN, Role.GUEST],
|
||||
};
|
||||
|
||||
vi.spyOn(apiKeyService, 'findById').mockResolvedValue(apiKey);
|
||||
vi.spyOn(apiKeyService, 'findByIdWithSecret').mockResolvedValue(apiKeyWithSecret);
|
||||
vi.spyOn(apiKeyService, 'saveApiKey').mockResolvedValue();
|
||||
vi.spyOn(authzService, 'deleteRoleForUser').mockResolvedValue(true);
|
||||
|
||||
@@ -240,9 +263,8 @@ describe('AuthService', () => {
|
||||
|
||||
expect(result).toBe(true);
|
||||
expect(apiKeyService.findById).toHaveBeenCalledWith(apiKey.id);
|
||||
expect(apiKeyService.findByIdWithSecret).toHaveBeenCalledWith(apiKey.id);
|
||||
expect(apiKeyService.saveApiKey).toHaveBeenCalledWith({
|
||||
...apiKeyWithSecret,
|
||||
...apiKey,
|
||||
roles: [Role.GUEST],
|
||||
});
|
||||
expect(authzService.deleteRoleForUser).toHaveBeenCalledWith(apiKey.id, Role.ADMIN);
|
||||
@@ -256,4 +278,229 @@ describe('AuthService', () => {
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('VIEWER role API_KEY access restriction', () => {
|
||||
it('should deny VIEWER role access to API_KEY resource', async () => {
|
||||
// Test that VIEWER role cannot access API_KEY resource
|
||||
const mockCasbinPermissions = Object.values(Resource)
|
||||
.filter((resource) => resource !== Resource.API_KEY)
|
||||
.map((resource) => ['VIEWER', resource, AuthAction.READ_ANY]);
|
||||
|
||||
vi.spyOn(authzService, 'getImplicitPermissionsForUser').mockResolvedValue(
|
||||
mockCasbinPermissions
|
||||
);
|
||||
|
||||
const result = await authService.getImplicitPermissionsForRole(Role.VIEWER);
|
||||
|
||||
// VIEWER should have read access to all resources EXCEPT API_KEY
|
||||
expect(result).toBeInstanceOf(Map);
|
||||
expect(result.size).toBeGreaterThan(0);
|
||||
|
||||
// Should NOT have API_KEY in the permissions
|
||||
expect(result.has(Resource.API_KEY)).toBe(false);
|
||||
|
||||
// Should have read access to other resources
|
||||
expect(result.get(Resource.DOCKER)).toEqual([AuthAction.READ_ANY]);
|
||||
expect(result.get(Resource.ARRAY)).toEqual([AuthAction.READ_ANY]);
|
||||
expect(result.get(Resource.CONFIG)).toEqual([AuthAction.READ_ANY]);
|
||||
expect(result.get(Resource.ME)).toEqual([AuthAction.READ_ANY]);
|
||||
});
|
||||
|
||||
it('should allow ADMIN role access to API_KEY resource', async () => {
|
||||
// Test that ADMIN role CAN access API_KEY resource
|
||||
const mockCasbinPermissions = [
|
||||
['ADMIN', '*', '*'], // Admin has wildcard access
|
||||
];
|
||||
|
||||
vi.spyOn(authzService, 'getImplicitPermissionsForUser').mockResolvedValue(
|
||||
mockCasbinPermissions
|
||||
);
|
||||
|
||||
const result = await authService.getImplicitPermissionsForRole(Role.ADMIN);
|
||||
|
||||
// ADMIN should have access to API_KEY through wildcard
|
||||
expect(result).toBeInstanceOf(Map);
|
||||
expect(result.has(Resource.API_KEY)).toBe(true);
|
||||
expect(result.get(Resource.API_KEY)).toContain(AuthAction.CREATE_ANY);
|
||||
expect(result.get(Resource.API_KEY)).toContain(AuthAction.READ_ANY);
|
||||
expect(result.get(Resource.API_KEY)).toContain(AuthAction.UPDATE_ANY);
|
||||
expect(result.get(Resource.API_KEY)).toContain(AuthAction.DELETE_ANY);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getImplicitPermissionsForRole', () => {
|
||||
it('should return permissions for a role', async () => {
|
||||
const mockCasbinPermissions = [
|
||||
['ADMIN', 'DOCKER', 'READ'],
|
||||
['ADMIN', 'DOCKER', 'UPDATE'],
|
||||
['ADMIN', 'VMS', 'READ'],
|
||||
];
|
||||
|
||||
vi.spyOn(authzService, 'getImplicitPermissionsForUser').mockResolvedValue(
|
||||
mockCasbinPermissions
|
||||
);
|
||||
|
||||
const result = await authService.getImplicitPermissionsForRole(Role.ADMIN);
|
||||
|
||||
expect(result).toBeInstanceOf(Map);
|
||||
expect(result.size).toBe(2);
|
||||
expect(result.get(Resource.DOCKER)).toEqual([AuthAction.READ_ANY, AuthAction.UPDATE_ANY]);
|
||||
expect(result.get(Resource.VMS)).toEqual([AuthAction.READ_ANY]);
|
||||
});
|
||||
|
||||
it('should handle wildcard permissions for admin role', async () => {
|
||||
const mockCasbinPermissions = [
|
||||
['ADMIN', '*', '*'],
|
||||
['ADMIN', 'ME', 'READ'], // Inherited from GUEST
|
||||
];
|
||||
|
||||
vi.spyOn(authzService, 'getImplicitPermissionsForUser').mockResolvedValue(
|
||||
mockCasbinPermissions
|
||||
);
|
||||
|
||||
const result = await authService.getImplicitPermissionsForRole(Role.ADMIN);
|
||||
|
||||
expect(result).toBeInstanceOf(Map);
|
||||
expect(result.size).toBeGreaterThan(0);
|
||||
// Should have expanded CRUD actions with proper format for all resources
|
||||
expect(result.get(Resource.DOCKER)).toContain(AuthAction.CREATE_ANY);
|
||||
expect(result.get(Resource.DOCKER)).toContain(AuthAction.READ_ANY);
|
||||
expect(result.get(Resource.DOCKER)).toContain(AuthAction.UPDATE_ANY);
|
||||
expect(result.get(Resource.DOCKER)).toContain(AuthAction.DELETE_ANY);
|
||||
expect(result.get(Resource.VMS)).toContain(AuthAction.CREATE_ANY);
|
||||
expect(result.get(Resource.VMS)).toContain(AuthAction.READ_ANY);
|
||||
expect(result.get(Resource.VMS)).toContain(AuthAction.UPDATE_ANY);
|
||||
expect(result.get(Resource.VMS)).toContain(AuthAction.DELETE_ANY);
|
||||
expect(result.get(Resource.ME)).toContain(AuthAction.READ_ANY);
|
||||
expect(result.get(Resource.ME)).toContain(AuthAction.CREATE_ANY); // Also gets CRUD from wildcard
|
||||
expect(result.has('*' as any)).toBe(false); // Still shouldn't have literal wildcard
|
||||
});
|
||||
|
||||
it('should handle connect role with wildcard resource and specific action', async () => {
|
||||
const mockCasbinPermissions = [
|
||||
['CONNECT', '*', 'READ'],
|
||||
['CONNECT', 'CONNECT__REMOTE_ACCESS', 'UPDATE'],
|
||||
['CONNECT', 'ME', 'READ'], // Inherited from GUEST
|
||||
];
|
||||
|
||||
vi.spyOn(authzService, 'getImplicitPermissionsForUser').mockResolvedValue(
|
||||
mockCasbinPermissions
|
||||
);
|
||||
|
||||
const result = await authService.getImplicitPermissionsForRole(Role.CONNECT);
|
||||
|
||||
expect(result).toBeInstanceOf(Map);
|
||||
expect(result.size).toBeGreaterThan(0);
|
||||
// All resources should have READ
|
||||
expect(result.get(Resource.DOCKER)).toContain(AuthAction.READ_ANY);
|
||||
expect(result.get(Resource.VMS)).toContain(AuthAction.READ_ANY);
|
||||
expect(result.get(Resource.ARRAY)).toContain(AuthAction.READ_ANY);
|
||||
// CONNECT__REMOTE_ACCESS should have both READ and UPDATE
|
||||
expect(result.get(Resource.CONNECT__REMOTE_ACCESS)).toContain(AuthAction.READ_ANY);
|
||||
expect(result.get(Resource.CONNECT__REMOTE_ACCESS)).toContain(AuthAction.UPDATE_ANY);
|
||||
});
|
||||
|
||||
it('should expand resource-specific wildcard actions to CRUD', async () => {
|
||||
const mockCasbinPermissions = [
|
||||
['DOCKER_MANAGER', 'DOCKER', '*'],
|
||||
['DOCKER_MANAGER', 'ARRAY', 'READ'],
|
||||
];
|
||||
|
||||
vi.spyOn(authzService, 'getImplicitPermissionsForUser').mockResolvedValue(
|
||||
mockCasbinPermissions
|
||||
);
|
||||
|
||||
const result = await authService.getImplicitPermissionsForRole(Role.ADMIN);
|
||||
|
||||
expect(result).toBeInstanceOf(Map);
|
||||
// Docker should have all CRUD actions with proper format
|
||||
expect(result.get(Resource.DOCKER)).toEqual(
|
||||
expect.arrayContaining([
|
||||
AuthAction.CREATE_ANY,
|
||||
AuthAction.READ_ANY,
|
||||
AuthAction.UPDATE_ANY,
|
||||
AuthAction.DELETE_ANY,
|
||||
])
|
||||
);
|
||||
// Array should only have READ
|
||||
expect(result.get(Resource.ARRAY)).toEqual([AuthAction.READ_ANY]);
|
||||
});
|
||||
|
||||
it('should skip invalid resources', async () => {
|
||||
const mockCasbinPermissions = [
|
||||
['ADMIN', 'INVALID_RESOURCE', 'READ'],
|
||||
['ADMIN', 'DOCKER', 'UPDATE'],
|
||||
['ADMIN', '', 'READ'],
|
||||
] as string[][];
|
||||
|
||||
vi.spyOn(authzService, 'getImplicitPermissionsForUser').mockResolvedValue(
|
||||
mockCasbinPermissions
|
||||
);
|
||||
|
||||
const result = await authService.getImplicitPermissionsForRole(Role.ADMIN);
|
||||
|
||||
expect(result).toBeInstanceOf(Map);
|
||||
expect(result.size).toBe(1);
|
||||
expect(result.get(Resource.DOCKER)).toEqual([AuthAction.UPDATE_ANY]);
|
||||
});
|
||||
|
||||
it('should handle empty permissions', async () => {
|
||||
vi.spyOn(authzService, 'getImplicitPermissionsForUser').mockResolvedValue([]);
|
||||
|
||||
const result = await authService.getImplicitPermissionsForRole(Role.ADMIN);
|
||||
|
||||
expect(result).toBeInstanceOf(Map);
|
||||
expect(result.size).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle malformed permission entries', async () => {
|
||||
const mockCasbinPermissions = [
|
||||
['ADMIN'], // Too short
|
||||
['ADMIN', 'DOCKER'], // Missing action
|
||||
['ADMIN', 'DOCKER', 'READ', 'EXTRA'], // Extra fields are ok
|
||||
['ADMIN', 'VMS', 'UPDATE'],
|
||||
];
|
||||
|
||||
vi.spyOn(authzService, 'getImplicitPermissionsForUser').mockResolvedValue(
|
||||
mockCasbinPermissions
|
||||
);
|
||||
|
||||
const result = await authService.getImplicitPermissionsForRole(Role.ADMIN);
|
||||
|
||||
expect(result).toBeInstanceOf(Map);
|
||||
expect(result.size).toBe(2);
|
||||
expect(result.get(Resource.DOCKER)).toEqual([AuthAction.READ_ANY]);
|
||||
expect(result.get(Resource.VMS)).toEqual([AuthAction.UPDATE_ANY]);
|
||||
});
|
||||
|
||||
it('should not duplicate actions for the same resource', async () => {
|
||||
const mockCasbinPermissions = [
|
||||
['ADMIN', 'DOCKER', 'READ'],
|
||||
['ADMIN', 'DOCKER', 'READ'],
|
||||
['ADMIN', 'DOCKER', 'UPDATE'],
|
||||
['ADMIN', 'DOCKER', 'UPDATE'],
|
||||
];
|
||||
|
||||
vi.spyOn(authzService, 'getImplicitPermissionsForUser').mockResolvedValue(
|
||||
mockCasbinPermissions
|
||||
);
|
||||
|
||||
const result = await authService.getImplicitPermissionsForRole(Role.ADMIN);
|
||||
|
||||
expect(result).toBeInstanceOf(Map);
|
||||
expect(result.size).toBe(1);
|
||||
expect(result.get(Resource.DOCKER)).toEqual([AuthAction.READ_ANY, AuthAction.UPDATE_ANY]);
|
||||
});
|
||||
|
||||
it('should handle errors gracefully', async () => {
|
||||
vi.spyOn(authzService, 'getImplicitPermissionsForUser').mockRejectedValue(
|
||||
new Error('Casbin error')
|
||||
);
|
||||
|
||||
const result = await authService.getImplicitPermissionsForRole(Role.ADMIN);
|
||||
|
||||
expect(result).toBeInstanceOf(Map);
|
||||
expect(result.size).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,11 +1,19 @@
|
||||
import { Injectable, Logger, UnauthorizedException } from '@nestjs/common';
|
||||
import { timingSafeEqual } from 'node:crypto';
|
||||
|
||||
import { Role } from '@unraid/shared/graphql.model.js';
|
||||
import { AuthAction, Resource, Role } from '@unraid/shared/graphql.model.js';
|
||||
import {
|
||||
convertPermissionSetsToArrays,
|
||||
expandWildcardAction,
|
||||
parseActionToAuthAction,
|
||||
reconcileWildcardPermissions,
|
||||
} from '@unraid/shared/util/permissions.js';
|
||||
import { AuthZService } from 'nest-authz';
|
||||
|
||||
import { getters } from '@app/store/index.js';
|
||||
import { ApiKeyService } from '@app/unraid-api/auth/api-key.service.js';
|
||||
import { CookieService } from '@app/unraid-api/auth/cookie.service.js';
|
||||
import { LocalSessionService } from '@app/unraid-api/auth/local-session.service.js';
|
||||
import { Permission } from '@app/unraid-api/graph/resolvers/api-key/api-key.model.js';
|
||||
import { UserAccount } from '@app/unraid-api/graph/user/user.model.js';
|
||||
import { FastifyRequest } from '@app/unraid-api/types/fastify.js';
|
||||
@@ -18,6 +26,7 @@ export class AuthService {
|
||||
constructor(
|
||||
private cookieService: CookieService,
|
||||
private apiKeyService: ApiKeyService,
|
||||
private localSessionService: LocalSessionService,
|
||||
private authzService: AuthZService
|
||||
) {}
|
||||
|
||||
@@ -83,6 +92,30 @@ export class AuthService {
|
||||
}
|
||||
}
|
||||
|
||||
async validateLocalSession(localSessionToken: string): Promise<UserAccount> {
|
||||
try {
|
||||
const isValid = await this.localSessionService.validateLocalSession(localSessionToken);
|
||||
|
||||
if (!isValid) {
|
||||
throw new UnauthorizedException('Invalid local session token');
|
||||
}
|
||||
|
||||
// Local session has admin privileges
|
||||
const user = await this.getLocalSessionUser();
|
||||
|
||||
// Sync the user's roles before checking them
|
||||
await this.syncUserRoles(user.id, user.roles);
|
||||
|
||||
// Now get the updated roles
|
||||
const existingRoles = await this.authzService.getRolesForUser(user.id);
|
||||
this.logger.debug(`Local session user ${user.id} has roles: ${existingRoles}`);
|
||||
|
||||
return user;
|
||||
} catch (error: unknown) {
|
||||
handleAuthError(this.logger, 'Failed to validate local session', error);
|
||||
}
|
||||
}
|
||||
|
||||
public async syncApiKeyRoles(apiKeyId: string, roles: string[]): Promise<void> {
|
||||
try {
|
||||
// Get existing roles and convert to Set
|
||||
@@ -111,12 +144,36 @@ export class AuthService {
|
||||
await this.authzService.deletePermissionsForUser(apiKeyId);
|
||||
|
||||
// Create array of permission-action pairs for processing
|
||||
const permissionActions = permissions.flatMap((permission) =>
|
||||
(permission.actions || []).map((action) => ({
|
||||
resource: permission.resource,
|
||||
action,
|
||||
}))
|
||||
);
|
||||
// Filter out any permissions with empty or undefined resources
|
||||
const permissionActions = permissions
|
||||
.filter((permission) => permission.resource && permission.resource.trim() !== '')
|
||||
.flatMap((permission) =>
|
||||
(permission.actions || [])
|
||||
.filter((action) => action && String(action).trim() !== '')
|
||||
.flatMap((action) => {
|
||||
const actionStr = String(action);
|
||||
// Handle wildcard - expand to all CRUD actions
|
||||
if (actionStr === '*' || actionStr.toLowerCase() === '*') {
|
||||
return expandWildcardAction().map((expandedAction) => ({
|
||||
resource: permission.resource,
|
||||
action: expandedAction,
|
||||
}));
|
||||
}
|
||||
|
||||
// Use the shared helper to parse and validate the action
|
||||
const parsedAction = parseActionToAuthAction(actionStr);
|
||||
|
||||
// Only include valid AuthAction values
|
||||
return parsedAction
|
||||
? [
|
||||
{
|
||||
resource: permission.resource,
|
||||
action: parsedAction,
|
||||
},
|
||||
]
|
||||
: [];
|
||||
})
|
||||
);
|
||||
|
||||
const { errors, errorOccurred: errorOccured } = await batchProcess(
|
||||
permissionActions,
|
||||
@@ -144,15 +201,12 @@ export class AuthService {
|
||||
}
|
||||
|
||||
try {
|
||||
if (!apiKey.roles) {
|
||||
apiKey.roles = [];
|
||||
}
|
||||
if (!apiKey.roles.includes(role)) {
|
||||
const apiKeyWithSecret = await this.apiKeyService.findByIdWithSecret(apiKeyId);
|
||||
|
||||
if (!apiKeyWithSecret) {
|
||||
throw new UnauthorizedException('API key not found with secret');
|
||||
}
|
||||
|
||||
apiKeyWithSecret.roles.push(role);
|
||||
await this.apiKeyService.saveApiKey(apiKeyWithSecret);
|
||||
apiKey.roles.push(role);
|
||||
await this.apiKeyService.saveApiKey(apiKey);
|
||||
await this.authzService.addRoleForUser(apiKeyId, role);
|
||||
}
|
||||
|
||||
@@ -174,14 +228,11 @@ export class AuthService {
|
||||
}
|
||||
|
||||
try {
|
||||
const apiKeyWithSecret = await this.apiKeyService.findByIdWithSecret(apiKeyId);
|
||||
|
||||
if (!apiKeyWithSecret) {
|
||||
throw new UnauthorizedException('API key not found with secret');
|
||||
if (!apiKey.roles) {
|
||||
apiKey.roles = [];
|
||||
}
|
||||
|
||||
apiKeyWithSecret.roles = apiKeyWithSecret.roles.filter((r) => r !== role);
|
||||
await this.apiKeyService.saveApiKey(apiKeyWithSecret);
|
||||
apiKey.roles = apiKey.roles.filter((r) => r !== role);
|
||||
await this.apiKeyService.saveApiKey(apiKey);
|
||||
await this.authzService.deleteRoleForUser(apiKeyId, role);
|
||||
|
||||
return true;
|
||||
@@ -224,7 +275,67 @@ export class AuthService {
|
||||
}
|
||||
|
||||
public validateCsrfToken(token?: string): boolean {
|
||||
return Boolean(token) && token === getters.emhttp().var.csrfToken;
|
||||
if (!token) return false;
|
||||
const csrfToken = getters.emhttp().var.csrfToken;
|
||||
if (!csrfToken) return false;
|
||||
return timingSafeEqual(Buffer.from(token, 'utf-8'), Buffer.from(csrfToken, 'utf-8'));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get implicit permissions for a role (including inherited permissions)
|
||||
*/
|
||||
public async getImplicitPermissionsForRole(role: Role): Promise<Map<Resource, AuthAction[]>> {
|
||||
// Use Set internally for efficient deduplication, with '*' as a special key for wildcards
|
||||
const permissionsWithSets = new Map<Resource | '*', Set<AuthAction>>();
|
||||
|
||||
// Load permissions from Casbin, defaulting to empty array on error
|
||||
let casbinPermissions: string[][] = [];
|
||||
try {
|
||||
casbinPermissions = await this.authzService.getImplicitPermissionsForUser(role);
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to get permissions for role ${role}:`, error);
|
||||
}
|
||||
|
||||
// Parse the Casbin permissions format: [["role", "resource", "action"], ...]
|
||||
for (const perm of casbinPermissions) {
|
||||
if (perm.length < 3) continue;
|
||||
|
||||
const resourceStr = perm[1];
|
||||
const action = perm[2];
|
||||
|
||||
if (!resourceStr) continue;
|
||||
|
||||
// Skip invalid resources (except wildcard)
|
||||
if (resourceStr !== '*' && !Object.values(Resource).includes(resourceStr as Resource)) {
|
||||
this.logger.debug(`Skipping invalid resource from Casbin: ${resourceStr}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Initialize Set if needed
|
||||
if (!permissionsWithSets.has(resourceStr as Resource | '*')) {
|
||||
permissionsWithSets.set(resourceStr as Resource | '*', new Set());
|
||||
}
|
||||
|
||||
const actionsSet = permissionsWithSets.get(resourceStr as Resource | '*')!;
|
||||
|
||||
// Handle wildcard or parse to valid AuthAction
|
||||
if (action === '*') {
|
||||
// Expand wildcard action to CRUD operations
|
||||
expandWildcardAction().forEach((a) => actionsSet.add(a));
|
||||
} else {
|
||||
// Use shared helper to parse and validate action
|
||||
const parsedAction = parseActionToAuthAction(action);
|
||||
if (parsedAction) {
|
||||
actionsSet.add(parsedAction);
|
||||
} else {
|
||||
this.logger.debug(`Skipping invalid action from Casbin: ${action}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Reconcile wildcard permissions and convert to final format
|
||||
reconcileWildcardPermissions(permissionsWithSets);
|
||||
return convertPermissionSetsToArrays(permissionsWithSets);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -234,7 +345,7 @@ export class AuthService {
|
||||
* @returns a service account that represents the user session (i.e. a webgui user).
|
||||
*/
|
||||
async getSessionUser(): Promise<UserAccount> {
|
||||
this.logger.debug('getSessionUser called!');
|
||||
this.logger.verbose('getSessionUser called!');
|
||||
return {
|
||||
id: '-1',
|
||||
description: 'Session receives administrator permissions',
|
||||
@@ -243,4 +354,21 @@ export class AuthService {
|
||||
permissions: [],
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a user object representing a local session.
|
||||
* Note: Does NOT perform validation.
|
||||
*
|
||||
* @returns a service account that represents the local session user (i.e. CLI/system operations).
|
||||
*/
|
||||
async getLocalSessionUser(): Promise<UserAccount> {
|
||||
this.logger.verbose('getLocalSessionUser called!');
|
||||
return {
|
||||
id: '-2',
|
||||
description: 'Local session receives administrator permissions for CLI/system operations',
|
||||
name: 'local-admin',
|
||||
roles: [Role.ADMIN],
|
||||
permissions: [],
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,6 +13,7 @@ import type { FastifyRequest } from '@app/unraid-api/types/fastify.js';
|
||||
import { apiLogger } from '@app/core/log.js';
|
||||
import { UserCookieStrategy } from '@app/unraid-api/auth/cookie.strategy.js';
|
||||
import { ServerHeaderStrategy } from '@app/unraid-api/auth/header.strategy.js';
|
||||
import { LocalSessionStrategy } from '@app/unraid-api/auth/local-session.strategy.js';
|
||||
import { IS_PUBLIC_ENDPOINT_KEY } from '@app/unraid-api/auth/public.decorator.js';
|
||||
|
||||
/**
|
||||
@@ -37,7 +38,7 @@ type GraphQLContext =
|
||||
|
||||
@Injectable()
|
||||
export class AuthenticationGuard
|
||||
extends AuthGuard([ServerHeaderStrategy.key, UserCookieStrategy.key])
|
||||
extends AuthGuard([ServerHeaderStrategy.key, LocalSessionStrategy.key, UserCookieStrategy.key])
|
||||
implements CanActivate
|
||||
{
|
||||
protected logger = new Logger(AuthenticationGuard.name);
|
||||
|
||||
@@ -12,7 +12,7 @@ g = _, _
|
||||
e = some(where (p.eft == allow))
|
||||
|
||||
[matchers]
|
||||
m = (regexMatch(r.sub, p.sub) || g(r.sub, p.sub)) && \
|
||||
regexMatch(lower(r.obj), lower(p.obj)) && \
|
||||
(regexMatch(lower(r.act), lower(p.act)) || p.act == '*' || regexMatch(lower(r.act), lower(concat(p.act, ':.*'))))
|
||||
m = (r.sub == p.sub || g(r.sub, p.sub)) && \
|
||||
(r.obj == p.obj || p.obj == '*') && \
|
||||
(r.act == p.act || p.act == '*')
|
||||
`;
|
||||
|
||||
566
api/src/unraid-api/auth/casbin/permissions-comprehensive.spec.ts
Normal file
566
api/src/unraid-api/auth/casbin/permissions-comprehensive.spec.ts
Normal file
@@ -0,0 +1,566 @@
|
||||
import { AuthAction, Resource, Role } from '@unraid/shared/graphql.model.js';
|
||||
import { Model as CasbinModel, newEnforcer, StringAdapter } from 'casbin';
|
||||
import { beforeEach, describe, expect, it } from 'vitest';
|
||||
|
||||
import { CASBIN_MODEL } from '@app/unraid-api/auth/casbin/model.js';
|
||||
import { BASE_POLICY } from '@app/unraid-api/auth/casbin/policy.js';
|
||||
|
||||
describe('Comprehensive Casbin Permissions Tests', () => {
|
||||
describe('All UsePermissions decorator combinations', () => {
|
||||
// Test all resource/action combinations used in the codebase
|
||||
const testCases = [
|
||||
// API_KEY permissions
|
||||
{
|
||||
resource: Resource.API_KEY,
|
||||
action: AuthAction.READ_ANY,
|
||||
allowedRoles: [Role.ADMIN],
|
||||
deniedRoles: [Role.VIEWER, Role.GUEST, Role.CONNECT],
|
||||
},
|
||||
{
|
||||
resource: Resource.API_KEY,
|
||||
action: AuthAction.CREATE_ANY,
|
||||
allowedRoles: [Role.ADMIN],
|
||||
deniedRoles: [Role.VIEWER, Role.GUEST, Role.CONNECT],
|
||||
},
|
||||
{
|
||||
resource: Resource.API_KEY,
|
||||
action: AuthAction.UPDATE_ANY,
|
||||
allowedRoles: [Role.ADMIN],
|
||||
deniedRoles: [Role.VIEWER, Role.GUEST, Role.CONNECT],
|
||||
},
|
||||
{
|
||||
resource: Resource.API_KEY,
|
||||
action: AuthAction.DELETE_ANY,
|
||||
allowedRoles: [Role.ADMIN],
|
||||
deniedRoles: [Role.VIEWER, Role.GUEST, Role.CONNECT],
|
||||
},
|
||||
|
||||
// PERMISSION resource (for listing possible permissions)
|
||||
{
|
||||
resource: Resource.PERMISSION,
|
||||
action: AuthAction.READ_ANY,
|
||||
allowedRoles: [Role.ADMIN, Role.VIEWER, Role.CONNECT],
|
||||
deniedRoles: [Role.GUEST],
|
||||
},
|
||||
|
||||
// ARRAY permissions
|
||||
{
|
||||
resource: Resource.ARRAY,
|
||||
action: AuthAction.READ_ANY,
|
||||
allowedRoles: [Role.ADMIN, Role.VIEWER, Role.CONNECT],
|
||||
deniedRoles: [Role.GUEST],
|
||||
},
|
||||
{
|
||||
resource: Resource.ARRAY,
|
||||
action: AuthAction.UPDATE_ANY,
|
||||
allowedRoles: [Role.ADMIN],
|
||||
deniedRoles: [Role.VIEWER, Role.GUEST],
|
||||
},
|
||||
|
||||
// CONFIG permissions
|
||||
{
|
||||
resource: Resource.CONFIG,
|
||||
action: AuthAction.READ_ANY,
|
||||
allowedRoles: [Role.ADMIN, Role.VIEWER, Role.CONNECT],
|
||||
deniedRoles: [Role.GUEST],
|
||||
},
|
||||
{
|
||||
resource: Resource.CONFIG,
|
||||
action: AuthAction.UPDATE_ANY,
|
||||
allowedRoles: [Role.ADMIN],
|
||||
deniedRoles: [Role.VIEWER, Role.GUEST, Role.CONNECT],
|
||||
},
|
||||
|
||||
// DOCKER permissions
|
||||
{
|
||||
resource: Resource.DOCKER,
|
||||
action: AuthAction.READ_ANY,
|
||||
allowedRoles: [Role.ADMIN, Role.VIEWER, Role.CONNECT],
|
||||
deniedRoles: [Role.GUEST],
|
||||
},
|
||||
{
|
||||
resource: Resource.DOCKER,
|
||||
action: AuthAction.UPDATE_ANY,
|
||||
allowedRoles: [Role.ADMIN],
|
||||
deniedRoles: [Role.VIEWER, Role.GUEST],
|
||||
},
|
||||
|
||||
// VMS permissions
|
||||
{
|
||||
resource: Resource.VMS,
|
||||
action: AuthAction.READ_ANY,
|
||||
allowedRoles: [Role.ADMIN, Role.VIEWER, Role.CONNECT],
|
||||
deniedRoles: [Role.GUEST],
|
||||
},
|
||||
{
|
||||
resource: Resource.VMS,
|
||||
action: AuthAction.UPDATE_ANY,
|
||||
allowedRoles: [Role.ADMIN],
|
||||
deniedRoles: [Role.VIEWER, Role.GUEST],
|
||||
},
|
||||
|
||||
// FLASH permissions (includes rclone operations)
|
||||
{
|
||||
resource: Resource.FLASH,
|
||||
action: AuthAction.READ_ANY,
|
||||
allowedRoles: [Role.ADMIN, Role.VIEWER, Role.CONNECT],
|
||||
deniedRoles: [Role.GUEST],
|
||||
},
|
||||
{
|
||||
resource: Resource.FLASH,
|
||||
action: AuthAction.CREATE_ANY,
|
||||
allowedRoles: [Role.ADMIN],
|
||||
deniedRoles: [Role.VIEWER, Role.GUEST, Role.CONNECT],
|
||||
},
|
||||
{
|
||||
resource: Resource.FLASH,
|
||||
action: AuthAction.DELETE_ANY,
|
||||
allowedRoles: [Role.ADMIN],
|
||||
deniedRoles: [Role.VIEWER, Role.GUEST, Role.CONNECT],
|
||||
},
|
||||
|
||||
// INFO permissions (system information)
|
||||
{
|
||||
resource: Resource.INFO,
|
||||
action: AuthAction.READ_ANY,
|
||||
allowedRoles: [Role.ADMIN, Role.VIEWER, Role.CONNECT],
|
||||
deniedRoles: [Role.GUEST],
|
||||
},
|
||||
|
||||
// LOGS permissions
|
||||
{
|
||||
resource: Resource.LOGS,
|
||||
action: AuthAction.READ_ANY,
|
||||
allowedRoles: [Role.ADMIN, Role.VIEWER, Role.CONNECT],
|
||||
deniedRoles: [Role.GUEST],
|
||||
},
|
||||
|
||||
// ME permissions (current user info)
|
||||
{
|
||||
resource: Resource.ME,
|
||||
action: AuthAction.READ_ANY,
|
||||
allowedRoles: [Role.ADMIN, Role.VIEWER, Role.CONNECT, Role.GUEST],
|
||||
deniedRoles: [],
|
||||
},
|
||||
|
||||
// NOTIFICATIONS permissions
|
||||
{
|
||||
resource: Resource.NOTIFICATIONS,
|
||||
action: AuthAction.READ_ANY,
|
||||
allowedRoles: [Role.ADMIN, Role.VIEWER, Role.CONNECT],
|
||||
deniedRoles: [Role.GUEST],
|
||||
},
|
||||
|
||||
// Other read-only resources for VIEWER
|
||||
{
|
||||
resource: Resource.DISK,
|
||||
action: AuthAction.READ_ANY,
|
||||
allowedRoles: [Role.ADMIN, Role.VIEWER, Role.CONNECT],
|
||||
deniedRoles: [Role.GUEST],
|
||||
},
|
||||
{
|
||||
resource: Resource.DISPLAY,
|
||||
action: AuthAction.READ_ANY,
|
||||
allowedRoles: [Role.ADMIN, Role.VIEWER, Role.CONNECT],
|
||||
deniedRoles: [Role.GUEST],
|
||||
},
|
||||
{
|
||||
resource: Resource.ONLINE,
|
||||
action: AuthAction.READ_ANY,
|
||||
allowedRoles: [Role.ADMIN, Role.VIEWER, Role.CONNECT],
|
||||
deniedRoles: [Role.GUEST],
|
||||
},
|
||||
{
|
||||
resource: Resource.OWNER,
|
||||
action: AuthAction.READ_ANY,
|
||||
allowedRoles: [Role.ADMIN, Role.VIEWER, Role.CONNECT],
|
||||
deniedRoles: [Role.GUEST],
|
||||
},
|
||||
{
|
||||
resource: Resource.REGISTRATION,
|
||||
action: AuthAction.READ_ANY,
|
||||
allowedRoles: [Role.ADMIN, Role.VIEWER, Role.CONNECT],
|
||||
deniedRoles: [Role.GUEST],
|
||||
},
|
||||
{
|
||||
resource: Resource.SERVERS,
|
||||
action: AuthAction.READ_ANY,
|
||||
allowedRoles: [Role.ADMIN, Role.VIEWER, Role.CONNECT],
|
||||
deniedRoles: [Role.GUEST],
|
||||
},
|
||||
{
|
||||
resource: Resource.SERVICES,
|
||||
action: AuthAction.READ_ANY,
|
||||
allowedRoles: [Role.ADMIN, Role.VIEWER, Role.CONNECT],
|
||||
deniedRoles: [Role.GUEST],
|
||||
},
|
||||
{
|
||||
resource: Resource.SHARE,
|
||||
action: AuthAction.READ_ANY,
|
||||
allowedRoles: [Role.ADMIN, Role.VIEWER, Role.CONNECT],
|
||||
deniedRoles: [Role.GUEST],
|
||||
},
|
||||
{
|
||||
resource: Resource.VARS,
|
||||
action: AuthAction.READ_ANY,
|
||||
allowedRoles: [Role.ADMIN, Role.VIEWER, Role.CONNECT],
|
||||
deniedRoles: [Role.GUEST],
|
||||
},
|
||||
{
|
||||
resource: Resource.CUSTOMIZATIONS,
|
||||
action: AuthAction.READ_ANY,
|
||||
allowedRoles: [Role.ADMIN, Role.VIEWER, Role.CONNECT],
|
||||
deniedRoles: [Role.GUEST],
|
||||
},
|
||||
{
|
||||
resource: Resource.ACTIVATION_CODE,
|
||||
action: AuthAction.READ_ANY,
|
||||
allowedRoles: [Role.ADMIN, Role.VIEWER, Role.CONNECT],
|
||||
deniedRoles: [Role.GUEST],
|
||||
},
|
||||
|
||||
// CONNECT special permission for remote access
|
||||
{
|
||||
resource: Resource.CONNECT__REMOTE_ACCESS,
|
||||
action: AuthAction.READ_ANY,
|
||||
allowedRoles: [Role.ADMIN, Role.VIEWER, Role.CONNECT],
|
||||
deniedRoles: [Role.GUEST],
|
||||
},
|
||||
{
|
||||
resource: Resource.CONNECT__REMOTE_ACCESS,
|
||||
action: AuthAction.UPDATE_ANY,
|
||||
allowedRoles: [Role.ADMIN, Role.CONNECT],
|
||||
deniedRoles: [Role.VIEWER, Role.GUEST],
|
||||
},
|
||||
];
|
||||
|
||||
testCases.forEach(({ resource, action, allowedRoles, deniedRoles }) => {
|
||||
describe(`${resource} - ${action}`, () => {
|
||||
let enforcer: any;
|
||||
|
||||
beforeEach(async () => {
|
||||
const model = new CasbinModel();
|
||||
model.loadModelFromText(CASBIN_MODEL);
|
||||
const adapter = new StringAdapter(BASE_POLICY);
|
||||
enforcer = await newEnforcer(model, adapter);
|
||||
});
|
||||
|
||||
allowedRoles.forEach((role) => {
|
||||
it(`should allow ${role} to ${action} ${resource}`, async () => {
|
||||
const result = await enforcer.enforce(role, resource, action);
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
deniedRoles.forEach((role) => {
|
||||
it(`should deny ${role} to ${action} ${resource}`, async () => {
|
||||
const result = await enforcer.enforce(role, resource, action);
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Action matching and normalization', () => {
|
||||
let enforcer: any;
|
||||
|
||||
beforeEach(async () => {
|
||||
const model = new CasbinModel();
|
||||
model.loadModelFromText(CASBIN_MODEL);
|
||||
const adapter = new StringAdapter(BASE_POLICY);
|
||||
enforcer = await newEnforcer(model, adapter);
|
||||
});
|
||||
|
||||
it('should match actions exactly as stored (uppercase)', async () => {
|
||||
// Our policies store actions as uppercase (e.g., 'READ_ANY')
|
||||
// The matcher now requires exact matching for security
|
||||
|
||||
// Uppercase actions should work
|
||||
const adminUpperResult = await enforcer.enforce(
|
||||
Role.ADMIN,
|
||||
Resource.DOCKER,
|
||||
AuthAction.READ_ANY
|
||||
);
|
||||
expect(adminUpperResult).toBe(true);
|
||||
|
||||
const viewerUpperResult = await enforcer.enforce(
|
||||
Role.VIEWER,
|
||||
Resource.DOCKER,
|
||||
AuthAction.READ_ANY
|
||||
);
|
||||
expect(viewerUpperResult).toBe(true);
|
||||
|
||||
// For non-wildcard roles, lowercase actions won't match
|
||||
const viewerLowerResult = await enforcer.enforce(Role.VIEWER, Resource.DOCKER, 'read:any');
|
||||
expect(viewerLowerResult).toBe(false);
|
||||
|
||||
// Mixed case won't match for VIEWER either
|
||||
const viewerMixedResult = await enforcer.enforce(Role.VIEWER, Resource.DOCKER, 'Read_Any');
|
||||
expect(viewerMixedResult).toBe(false);
|
||||
|
||||
// GUEST also requires exact lowercase
|
||||
const guestUpperResult = await enforcer.enforce(Role.GUEST, Resource.ME, 'READ:ANY');
|
||||
expect(guestUpperResult).toBe(false);
|
||||
|
||||
const guestLowerResult = await enforcer.enforce(
|
||||
Role.GUEST,
|
||||
Resource.ME,
|
||||
AuthAction.READ_ANY
|
||||
);
|
||||
expect(guestLowerResult).toBe(true);
|
||||
});
|
||||
|
||||
it('should allow wildcard actions for ADMIN regardless of case', async () => {
|
||||
// ADMIN has wildcard permissions (*, *, *) which match any action
|
||||
const adminWildcardActions = [
|
||||
'read:any',
|
||||
'create:any',
|
||||
'update:any',
|
||||
'delete:any',
|
||||
'READ:ANY', // Even uppercase works due to wildcard
|
||||
'ANYTHING', // Any action works due to wildcard
|
||||
];
|
||||
|
||||
for (const action of adminWildcardActions) {
|
||||
const result = await enforcer.enforce(Role.ADMIN, Resource.DOCKER, action);
|
||||
expect(result).toBe(true);
|
||||
}
|
||||
});
|
||||
|
||||
it('should NOT match different actions even with correct case', async () => {
|
||||
// VIEWER should not be able to UPDATE even with correct lowercase
|
||||
const result = await enforcer.enforce(Role.VIEWER, Resource.DOCKER, AuthAction.UPDATE_ANY);
|
||||
expect(result).toBe(false);
|
||||
|
||||
// VIEWER should not be able to DELETE
|
||||
const deleteResult = await enforcer.enforce(
|
||||
Role.VIEWER,
|
||||
Resource.DOCKER,
|
||||
AuthAction.DELETE_ANY
|
||||
);
|
||||
expect(deleteResult).toBe(false);
|
||||
|
||||
// VIEWER should not be able to CREATE
|
||||
const createResult = await enforcer.enforce(
|
||||
Role.VIEWER,
|
||||
Resource.DOCKER,
|
||||
AuthAction.CREATE_ANY
|
||||
);
|
||||
expect(createResult).toBe(false);
|
||||
});
|
||||
|
||||
it('should ensure actions are normalized when stored', async () => {
|
||||
// This test documents that our auth service normalizes actions to uppercase
|
||||
// when syncing permissions, ensuring consistency
|
||||
|
||||
// The BASE_POLICY uses AuthAction.READ_ANY which is 'READ_ANY' (uppercase)
|
||||
expect(BASE_POLICY).toContain('READ_ANY');
|
||||
expect(BASE_POLICY).not.toContain('read:any');
|
||||
|
||||
// All our stored policies should be uppercase
|
||||
const policies = await enforcer.getPolicy();
|
||||
for (const policy of policies) {
|
||||
const action = policy[2]; // Third element is the action
|
||||
if (action && action !== '*') {
|
||||
// All non-wildcard actions should be uppercase
|
||||
expect(action).toBe(action.toUpperCase());
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('Wildcard permissions', () => {
|
||||
let enforcer: any;
|
||||
|
||||
beforeEach(async () => {
|
||||
const model = new CasbinModel();
|
||||
model.loadModelFromText(CASBIN_MODEL);
|
||||
const adapter = new StringAdapter(BASE_POLICY);
|
||||
enforcer = await newEnforcer(model, adapter);
|
||||
});
|
||||
|
||||
it('should allow ADMIN wildcard access to all resources and actions', async () => {
|
||||
const resources = Object.values(Resource);
|
||||
const actions = [
|
||||
AuthAction.READ_ANY,
|
||||
AuthAction.CREATE_ANY,
|
||||
AuthAction.UPDATE_ANY,
|
||||
AuthAction.DELETE_ANY,
|
||||
];
|
||||
|
||||
for (const resource of resources) {
|
||||
for (const action of actions) {
|
||||
const result = await enforcer.enforce(Role.ADMIN, resource, action);
|
||||
expect(result).toBe(true);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
it('should allow CONNECT read access to most resources but NOT API_KEY', async () => {
|
||||
const resources = Object.values(Resource).filter(
|
||||
(r) => r !== Resource.CONNECT__REMOTE_ACCESS && r !== Resource.API_KEY
|
||||
);
|
||||
|
||||
for (const resource of resources) {
|
||||
// Should be able to read most resources
|
||||
const readResult = await enforcer.enforce(Role.CONNECT, resource, AuthAction.READ_ANY);
|
||||
expect(readResult).toBe(true);
|
||||
|
||||
// Should NOT be able to write (except CONNECT__REMOTE_ACCESS)
|
||||
const updateResult = await enforcer.enforce(
|
||||
Role.CONNECT,
|
||||
resource,
|
||||
AuthAction.UPDATE_ANY
|
||||
);
|
||||
expect(updateResult).toBe(false);
|
||||
}
|
||||
|
||||
// CONNECT should NOT be able to read API_KEY
|
||||
const apiKeyRead = await enforcer.enforce(
|
||||
Role.CONNECT,
|
||||
Resource.API_KEY,
|
||||
AuthAction.READ_ANY
|
||||
);
|
||||
expect(apiKeyRead).toBe(false);
|
||||
|
||||
// CONNECT should NOT be able to perform any action on API_KEY
|
||||
const apiKeyCreate = await enforcer.enforce(
|
||||
Role.CONNECT,
|
||||
Resource.API_KEY,
|
||||
AuthAction.CREATE_ANY
|
||||
);
|
||||
expect(apiKeyCreate).toBe(false);
|
||||
const apiKeyUpdate = await enforcer.enforce(
|
||||
Role.CONNECT,
|
||||
Resource.API_KEY,
|
||||
AuthAction.UPDATE_ANY
|
||||
);
|
||||
expect(apiKeyUpdate).toBe(false);
|
||||
const apiKeyDelete = await enforcer.enforce(
|
||||
Role.CONNECT,
|
||||
Resource.API_KEY,
|
||||
AuthAction.DELETE_ANY
|
||||
);
|
||||
expect(apiKeyDelete).toBe(false);
|
||||
|
||||
// Special case: CONNECT can update CONNECT__REMOTE_ACCESS
|
||||
const remoteAccessUpdate = await enforcer.enforce(
|
||||
Role.CONNECT,
|
||||
Resource.CONNECT__REMOTE_ACCESS,
|
||||
AuthAction.UPDATE_ANY
|
||||
);
|
||||
expect(remoteAccessUpdate).toBe(true);
|
||||
});
|
||||
|
||||
it('should explicitly deny CONNECT role from accessing API_KEY to prevent secret exposure', async () => {
|
||||
// CONNECT should NOT be able to read API_KEY (which would expose secrets)
|
||||
const apiKeyRead = await enforcer.enforce(
|
||||
Role.CONNECT,
|
||||
Resource.API_KEY,
|
||||
AuthAction.READ_ANY
|
||||
);
|
||||
expect(apiKeyRead).toBe(false);
|
||||
|
||||
// Verify all API_KEY operations are denied for CONNECT
|
||||
const actions = ['create:any', 'read:any', 'update:any', 'delete:any'];
|
||||
for (const action of actions) {
|
||||
const result = await enforcer.enforce(Role.CONNECT, Resource.API_KEY, action);
|
||||
expect(result).toBe(false);
|
||||
}
|
||||
|
||||
// Verify ADMIN can still access API_KEY
|
||||
const adminApiKeyRead = await enforcer.enforce(
|
||||
Role.ADMIN,
|
||||
Resource.API_KEY,
|
||||
AuthAction.READ_ANY
|
||||
);
|
||||
expect(adminApiKeyRead).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Role inheritance', () => {
|
||||
let enforcer: any;
|
||||
|
||||
beforeEach(async () => {
|
||||
const model = new CasbinModel();
|
||||
model.loadModelFromText(CASBIN_MODEL);
|
||||
const adapter = new StringAdapter(BASE_POLICY);
|
||||
enforcer = await newEnforcer(model, adapter);
|
||||
});
|
||||
|
||||
it('should inherit GUEST permissions for VIEWER', async () => {
|
||||
// VIEWER inherits from GUEST, so should have ME access
|
||||
const result = await enforcer.enforce(Role.VIEWER, Resource.ME, AuthAction.READ_ANY);
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it('should inherit GUEST permissions for CONNECT', async () => {
|
||||
// CONNECT inherits from GUEST, so should have ME access
|
||||
const result = await enforcer.enforce(Role.CONNECT, Resource.ME, AuthAction.READ_ANY);
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it('should inherit GUEST permissions for ADMIN', async () => {
|
||||
// ADMIN inherits from GUEST, so should have ME access
|
||||
const result = await enforcer.enforce(Role.ADMIN, Resource.ME, AuthAction.READ_ANY);
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge cases and security', () => {
|
||||
it('should deny access with empty action', async () => {
|
||||
const model = new CasbinModel();
|
||||
model.loadModelFromText(CASBIN_MODEL);
|
||||
const adapter = new StringAdapter(BASE_POLICY);
|
||||
const enforcer = await newEnforcer(model, adapter);
|
||||
|
||||
const result = await enforcer.enforce(Role.VIEWER, Resource.DOCKER, '');
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('should deny access with empty resource', async () => {
|
||||
const model = new CasbinModel();
|
||||
model.loadModelFromText(CASBIN_MODEL);
|
||||
const adapter = new StringAdapter(BASE_POLICY);
|
||||
const enforcer = await newEnforcer(model, adapter);
|
||||
|
||||
const result = await enforcer.enforce(Role.VIEWER, '', AuthAction.READ_ANY);
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('should deny access with undefined role', async () => {
|
||||
const model = new CasbinModel();
|
||||
model.loadModelFromText(CASBIN_MODEL);
|
||||
const adapter = new StringAdapter(BASE_POLICY);
|
||||
const enforcer = await newEnforcer(model, adapter);
|
||||
|
||||
const result = await enforcer.enforce(
|
||||
'UNDEFINED_ROLE',
|
||||
Resource.DOCKER,
|
||||
AuthAction.READ_ANY
|
||||
);
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('should deny access with malformed action', async () => {
|
||||
const model = new CasbinModel();
|
||||
model.loadModelFromText(CASBIN_MODEL);
|
||||
const adapter = new StringAdapter(BASE_POLICY);
|
||||
const enforcer = await newEnforcer(model, adapter);
|
||||
|
||||
const malformedActions = [
|
||||
'read', // Missing possession
|
||||
':any', // Missing verb
|
||||
'read:', // Empty possession
|
||||
'read:own', // Different possession format
|
||||
'READ', // Uppercase without possession
|
||||
];
|
||||
|
||||
for (const action of malformedActions) {
|
||||
const result = await enforcer.enforce(Role.VIEWER, Resource.DOCKER, action);
|
||||
expect(result).toBe(false);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
147
api/src/unraid-api/auth/casbin/policy.spec.ts
Normal file
147
api/src/unraid-api/auth/casbin/policy.spec.ts
Normal file
@@ -0,0 +1,147 @@
|
||||
import { AuthAction, Resource, Role } from '@unraid/shared/graphql.model.js';
|
||||
import { Model as CasbinModel, newEnforcer, StringAdapter } from 'casbin';
|
||||
import { describe, expect, it } from 'vitest';
|
||||
|
||||
import { CASBIN_MODEL } from '@app/unraid-api/auth/casbin/model.js';
|
||||
import { BASE_POLICY } from '@app/unraid-api/auth/casbin/policy.js';
|
||||
|
||||
describe('Casbin Policy - VIEWER role restrictions', () => {
|
||||
it('should validate matcher does not allow empty policies', async () => {
|
||||
// Test that empty policies don't match everything
|
||||
const model = new CasbinModel();
|
||||
model.loadModelFromText(CASBIN_MODEL);
|
||||
|
||||
// Test with a policy that has an empty object
|
||||
const emptyPolicy = `p, VIEWER, , ${AuthAction.READ_ANY}`;
|
||||
const adapter = new StringAdapter(emptyPolicy);
|
||||
const enforcer = await newEnforcer(model, adapter);
|
||||
|
||||
// Empty policy should not match a real resource
|
||||
const canReadApiKey = await enforcer.enforce(Role.VIEWER, Resource.API_KEY, AuthAction.READ_ANY);
|
||||
expect(canReadApiKey).toBe(false);
|
||||
});
|
||||
|
||||
it('should deny VIEWER role access to API_KEY resource', async () => {
|
||||
// Create enforcer with actual policy
|
||||
const model = new CasbinModel();
|
||||
model.loadModelFromText(CASBIN_MODEL);
|
||||
const adapter = new StringAdapter(BASE_POLICY);
|
||||
const enforcer = await newEnforcer(model, adapter);
|
||||
|
||||
// Test that VIEWER cannot access API_KEY with any action
|
||||
const canReadApiKey = await enforcer.enforce(Role.VIEWER, Resource.API_KEY, AuthAction.READ_ANY);
|
||||
const canCreateApiKey = await enforcer.enforce(
|
||||
Role.VIEWER,
|
||||
Resource.API_KEY,
|
||||
AuthAction.CREATE_ANY
|
||||
);
|
||||
const canUpdateApiKey = await enforcer.enforce(
|
||||
Role.VIEWER,
|
||||
Resource.API_KEY,
|
||||
AuthAction.UPDATE_ANY
|
||||
);
|
||||
const canDeleteApiKey = await enforcer.enforce(
|
||||
Role.VIEWER,
|
||||
Resource.API_KEY,
|
||||
AuthAction.DELETE_ANY
|
||||
);
|
||||
|
||||
expect(canReadApiKey).toBe(false);
|
||||
expect(canCreateApiKey).toBe(false);
|
||||
expect(canUpdateApiKey).toBe(false);
|
||||
expect(canDeleteApiKey).toBe(false);
|
||||
});
|
||||
|
||||
it('should allow VIEWER role access to other resources', async () => {
|
||||
// Create enforcer with actual policy
|
||||
const model = new CasbinModel();
|
||||
model.loadModelFromText(CASBIN_MODEL);
|
||||
const adapter = new StringAdapter(BASE_POLICY);
|
||||
const enforcer = await newEnforcer(model, adapter);
|
||||
|
||||
// Test that VIEWER can read other resources
|
||||
const canReadDocker = await enforcer.enforce(Role.VIEWER, Resource.DOCKER, AuthAction.READ_ANY);
|
||||
const canReadArray = await enforcer.enforce(Role.VIEWER, Resource.ARRAY, AuthAction.READ_ANY);
|
||||
const canReadConfig = await enforcer.enforce(Role.VIEWER, Resource.CONFIG, AuthAction.READ_ANY);
|
||||
const canReadVms = await enforcer.enforce(Role.VIEWER, Resource.VMS, AuthAction.READ_ANY);
|
||||
|
||||
expect(canReadDocker).toBe(true);
|
||||
expect(canReadArray).toBe(true);
|
||||
expect(canReadConfig).toBe(true);
|
||||
expect(canReadVms).toBe(true);
|
||||
|
||||
// But VIEWER cannot write to these resources
|
||||
const canUpdateDocker = await enforcer.enforce(
|
||||
Role.VIEWER,
|
||||
Resource.DOCKER,
|
||||
AuthAction.UPDATE_ANY
|
||||
);
|
||||
const canDeleteArray = await enforcer.enforce(
|
||||
Role.VIEWER,
|
||||
Resource.ARRAY,
|
||||
AuthAction.DELETE_ANY
|
||||
);
|
||||
|
||||
expect(canUpdateDocker).toBe(false);
|
||||
expect(canDeleteArray).toBe(false);
|
||||
});
|
||||
|
||||
it('should allow ADMIN role full access to API_KEY resource', async () => {
|
||||
// Create enforcer with actual policy
|
||||
const model = new CasbinModel();
|
||||
model.loadModelFromText(CASBIN_MODEL);
|
||||
const adapter = new StringAdapter(BASE_POLICY);
|
||||
const enforcer = await newEnforcer(model, adapter);
|
||||
|
||||
// Test that ADMIN can access API_KEY with all actions
|
||||
const canReadApiKey = await enforcer.enforce(Role.ADMIN, Resource.API_KEY, AuthAction.READ_ANY);
|
||||
const canCreateApiKey = await enforcer.enforce(
|
||||
Role.ADMIN,
|
||||
Resource.API_KEY,
|
||||
AuthAction.CREATE_ANY
|
||||
);
|
||||
const canUpdateApiKey = await enforcer.enforce(
|
||||
Role.ADMIN,
|
||||
Resource.API_KEY,
|
||||
AuthAction.UPDATE_ANY
|
||||
);
|
||||
const canDeleteApiKey = await enforcer.enforce(
|
||||
Role.ADMIN,
|
||||
Resource.API_KEY,
|
||||
AuthAction.DELETE_ANY
|
||||
);
|
||||
|
||||
expect(canReadApiKey).toBe(true);
|
||||
expect(canCreateApiKey).toBe(true);
|
||||
expect(canUpdateApiKey).toBe(true);
|
||||
expect(canDeleteApiKey).toBe(true);
|
||||
});
|
||||
|
||||
it('should ensure VIEWER permissions exclude API_KEY in generated policy', () => {
|
||||
// Verify that the generated policy string doesn't contain VIEWER + API_KEY combination
|
||||
expect(BASE_POLICY).toContain(`p, ${Role.VIEWER}, ${Resource.DOCKER}, ${AuthAction.READ_ANY}`);
|
||||
expect(BASE_POLICY).toContain(`p, ${Role.VIEWER}, ${Resource.ARRAY}, ${AuthAction.READ_ANY}`);
|
||||
expect(BASE_POLICY).not.toContain(
|
||||
`p, ${Role.VIEWER}, ${Resource.API_KEY}, ${AuthAction.READ_ANY}`
|
||||
);
|
||||
|
||||
// Count VIEWER permissions - should be total resources minus API_KEY
|
||||
const viewerPermissionLines = BASE_POLICY.split('\n').filter((line) =>
|
||||
line.startsWith(`p, ${Role.VIEWER},`)
|
||||
);
|
||||
const totalResources = Object.values(Resource).length;
|
||||
expect(viewerPermissionLines.length).toBe(totalResources - 1); // All resources except API_KEY
|
||||
});
|
||||
|
||||
it('should inherit GUEST permissions for VIEWER role', async () => {
|
||||
// Create enforcer with actual policy
|
||||
const model = new CasbinModel();
|
||||
model.loadModelFromText(CASBIN_MODEL);
|
||||
const adapter = new StringAdapter(BASE_POLICY);
|
||||
const enforcer = await newEnforcer(model, adapter);
|
||||
|
||||
// VIEWER inherits from GUEST, so should have access to ME resource
|
||||
const canReadMe = await enforcer.enforce(Role.VIEWER, Resource.ME, AuthAction.READ_ANY);
|
||||
expect(canReadMe).toBe(true);
|
||||
});
|
||||
});
|
||||
@@ -1,18 +1,26 @@
|
||||
import { Resource, Role } from '@unraid/shared/graphql.model.js';
|
||||
import { AuthAction } from 'nest-authz';
|
||||
import { AuthAction, Resource, Role } from '@unraid/shared/graphql.model.js';
|
||||
|
||||
// Generate VIEWER permissions for all resources except API_KEY
|
||||
const viewerPermissions = Object.values(Resource)
|
||||
.filter((resource) => resource !== Resource.API_KEY)
|
||||
.map((resource) => `p, ${Role.VIEWER}, ${resource}, ${AuthAction.READ_ANY}`)
|
||||
.join('\n');
|
||||
|
||||
export const BASE_POLICY = `
|
||||
# Admin permissions
|
||||
# Admin permissions - full access
|
||||
p, ${Role.ADMIN}, *, *
|
||||
|
||||
# Connect Permissions
|
||||
p, ${Role.CONNECT}, *, ${AuthAction.READ_ANY}
|
||||
# Connect permissions - inherits from VIEWER plus can manage remote access
|
||||
p, ${Role.CONNECT}, ${Resource.CONNECT__REMOTE_ACCESS}, ${AuthAction.UPDATE_ANY}
|
||||
|
||||
# Guest permissions
|
||||
# Guest permissions - basic profile access
|
||||
p, ${Role.GUEST}, ${Resource.ME}, ${AuthAction.READ_ANY}
|
||||
|
||||
# Viewer permissions - read-only access to all resources except API_KEY
|
||||
${viewerPermissions}
|
||||
|
||||
# Role inheritance
|
||||
g, ${Role.ADMIN}, ${Role.GUEST}
|
||||
g, ${Role.CONNECT}, ${Role.GUEST}
|
||||
g, ${Role.CONNECT}, ${Role.VIEWER}
|
||||
g, ${Role.VIEWER}, ${Role.GUEST}
|
||||
`;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { Inject, Injectable, Logger } from '@nestjs/common';
|
||||
import { readFile } from 'fs/promises';
|
||||
import { readdir, readFile } from 'fs/promises';
|
||||
import { join } from 'path';
|
||||
|
||||
import { fileExists } from '@app/core/utils/files/file-exists.js';
|
||||
@@ -9,7 +9,7 @@ import { batchProcess } from '@app/utils.js';
|
||||
/** token for dependency injection of a session cookie options object */
|
||||
export const SESSION_COOKIE_CONFIG = 'SESSION_COOKIE_CONFIG';
|
||||
|
||||
type SessionCookieConfig = {
|
||||
export type SessionCookieConfig = {
|
||||
namePrefix: string;
|
||||
sessionDir: string;
|
||||
secure: boolean;
|
||||
@@ -68,13 +68,17 @@ export class CookieService {
|
||||
}
|
||||
try {
|
||||
const sessionData = await readFile(sessionFile, 'ascii');
|
||||
return sessionData.includes('unraid_login') && sessionData.includes('unraid_user');
|
||||
return this.isSessionValid(sessionData);
|
||||
} catch (e) {
|
||||
this.logger.error(e, 'Error reading session file');
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private isSessionValid(sessionData: string): boolean {
|
||||
return sessionData.includes('unraid_login') && sessionData.includes('unraid_user');
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a session id, returns the full path to the session file on disk.
|
||||
*
|
||||
@@ -91,4 +95,33 @@ export class CookieService {
|
||||
const sanitizedSessionId = sessionId.replace(/[^a-zA-Z0-9]/g, '');
|
||||
return join(this.opts.sessionDir, `sess_${sanitizedSessionId}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the active session id, if any.
|
||||
* @returns the active session id, if any, or null if no active session is found.
|
||||
*/
|
||||
async getActiveSession(): Promise<string | null> {
|
||||
let sessionFiles: string[] = [];
|
||||
try {
|
||||
sessionFiles = await readdir(this.opts.sessionDir);
|
||||
} catch (e) {
|
||||
this.logger.warn(e, 'Error reading session directory');
|
||||
return null;
|
||||
}
|
||||
for (const sessionFile of sessionFiles) {
|
||||
if (!sessionFile.startsWith('sess_')) {
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
const sessionData = await readFile(join(this.opts.sessionDir, sessionFile), 'ascii');
|
||||
if (this.isSessionValid(sessionData)) {
|
||||
return sessionFile.replace('sess_', '');
|
||||
}
|
||||
} catch {
|
||||
// Ignore unreadable files and continue scanning
|
||||
continue;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
21
api/src/unraid-api/auth/local-session-lifecycle.service.ts
Normal file
21
api/src/unraid-api/auth/local-session-lifecycle.service.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
import { Injectable, OnModuleInit } from '@nestjs/common';
|
||||
|
||||
import { LocalSessionService } from '@app/unraid-api/auth/local-session.service.js';
|
||||
|
||||
/**
|
||||
* Service for managing the lifecycle of the local session.
|
||||
*
|
||||
* Used for tying the local session's lifecycle to the API's life, rather
|
||||
* than the LocalSessionService's lifecycle, since it may also be used by
|
||||
* other applications, like the CLI.
|
||||
*
|
||||
* This service is only used in the API, and not in the CLI.
|
||||
*/
|
||||
@Injectable()
|
||||
export class LocalSessionLifecycleService implements OnModuleInit {
|
||||
constructor(private readonly localSessionService: LocalSessionService) {}
|
||||
|
||||
async onModuleInit() {
|
||||
await this.localSessionService.generateLocalSession();
|
||||
}
|
||||
}
|
||||
97
api/src/unraid-api/auth/local-session.service.ts
Normal file
97
api/src/unraid-api/auth/local-session.service.ts
Normal file
@@ -0,0 +1,97 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { randomBytes, timingSafeEqual } from 'crypto';
|
||||
import { chmod, mkdir, readFile, unlink, writeFile } from 'fs/promises';
|
||||
import { dirname } from 'path';
|
||||
|
||||
import { PATHS_LOCAL_SESSION_FILE } from '@app/environment.js';
|
||||
|
||||
/**
|
||||
* Service that manages a local session file for internal CLI/system authentication.
|
||||
* Creates a secure token on startup that can be used for local system operations.
|
||||
*/
|
||||
@Injectable()
|
||||
export class LocalSessionService {
|
||||
private readonly logger = new Logger(LocalSessionService.name);
|
||||
private sessionToken: string | null = null;
|
||||
private static readonly SESSION_FILE_PATH = PATHS_LOCAL_SESSION_FILE;
|
||||
|
||||
/**
|
||||
* Generate a secure local session token and write it to file
|
||||
*/
|
||||
async generateLocalSession(): Promise<void> {
|
||||
// Generate a cryptographically secure random token
|
||||
this.sessionToken = randomBytes(32).toString('hex');
|
||||
|
||||
try {
|
||||
// Ensure directory exists
|
||||
await mkdir(dirname(LocalSessionService.getSessionFilePath()), { recursive: true });
|
||||
|
||||
// Write token to file
|
||||
await writeFile(LocalSessionService.getSessionFilePath(), this.sessionToken, {
|
||||
encoding: 'utf-8',
|
||||
mode: 0o600, // Owner read/write only
|
||||
});
|
||||
|
||||
// Ensure proper permissions (redundant but explicit)
|
||||
// Check if file exists first to handle race conditions in test environments
|
||||
await chmod(LocalSessionService.getSessionFilePath(), 0o600).catch((error) => {
|
||||
this.logger.warn(error, 'Failed to set permissions on local session file');
|
||||
});
|
||||
|
||||
this.logger.debug(`Local session written to ${LocalSessionService.getSessionFilePath()}`);
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to write local session: ${error}`);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Read and return the current local session token from file
|
||||
*/
|
||||
public async getLocalSession(): Promise<string | null> {
|
||||
try {
|
||||
return await readFile(LocalSessionService.getSessionFilePath(), 'utf-8');
|
||||
} catch (error) {
|
||||
this.logger.warn(error, 'Local session file not found or not readable');
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate if a given token matches the current local session
|
||||
*/
|
||||
public async validateLocalSession(token: string): Promise<boolean> {
|
||||
// Coerce inputs to strings (or empty string if undefined)
|
||||
const tokenStr = token || '';
|
||||
const currentToken = await this.getLocalSession();
|
||||
const currentTokenStr = currentToken || '';
|
||||
|
||||
// Early return if either is empty
|
||||
if (!tokenStr || !currentTokenStr) return false;
|
||||
|
||||
// Create buffers
|
||||
const tokenBuffer = Buffer.from(tokenStr, 'utf-8');
|
||||
const currentTokenBuffer = Buffer.from(currentTokenStr, 'utf-8');
|
||||
|
||||
// Check length equality first to prevent timingSafeEqual from throwing
|
||||
if (tokenBuffer.length !== currentTokenBuffer.length) return false;
|
||||
|
||||
// Use constant-time comparison to prevent timing attacks
|
||||
return timingSafeEqual(tokenBuffer, currentTokenBuffer);
|
||||
}
|
||||
|
||||
public async deleteLocalSession(): Promise<void> {
|
||||
try {
|
||||
await unlink(LocalSessionService.getSessionFilePath());
|
||||
} catch (error) {
|
||||
this.logger.error(error, 'Error deleting local session file');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the file path for the local session (useful for external readers)
|
||||
*/
|
||||
public static getSessionFilePath(): string {
|
||||
return LocalSessionService.SESSION_FILE_PATH;
|
||||
}
|
||||
}
|
||||
46
api/src/unraid-api/auth/local-session.strategy.ts
Normal file
46
api/src/unraid-api/auth/local-session.strategy.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { PassportStrategy } from '@nestjs/passport';
|
||||
|
||||
import { Strategy } from 'passport-custom';
|
||||
|
||||
import { AuthService } from '@app/unraid-api/auth/auth.service.js';
|
||||
import { UserAccount } from '@app/unraid-api/graph/user/user.model.js';
|
||||
import { FastifyRequest } from '@app/unraid-api/types/fastify.js';
|
||||
|
||||
/**
|
||||
* Passport strategy for local session authentication.
|
||||
* Validates the x-local-session header for internal CLI/system operations.
|
||||
*/
|
||||
@Injectable()
|
||||
export class LocalSessionStrategy extends PassportStrategy(Strategy, 'local-session') {
|
||||
static readonly key = 'local-session';
|
||||
private readonly logger = new Logger(LocalSessionStrategy.name);
|
||||
|
||||
constructor(private readonly authService: AuthService) {
|
||||
super();
|
||||
}
|
||||
|
||||
async validate(request: FastifyRequest): Promise<UserAccount | null> {
|
||||
try {
|
||||
const localSessionToken = request.headers['x-local-session'] as string;
|
||||
|
||||
if (!localSessionToken) {
|
||||
this.logger.verbose('No local session token found in request headers');
|
||||
return null;
|
||||
}
|
||||
|
||||
this.logger.verbose('Attempting to validate local session token');
|
||||
const user = await this.authService.validateLocalSession(localSessionToken);
|
||||
|
||||
if (user) {
|
||||
this.logger.verbose(`Local session authenticated user: ${user.name}`);
|
||||
return user;
|
||||
}
|
||||
|
||||
return null;
|
||||
} catch (error) {
|
||||
this.logger.verbose(error, `Local session validation failed`);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
192
api/src/unraid-api/cli/__test__/api-key.command.test.ts
Normal file
192
api/src/unraid-api/cli/__test__/api-key.command.test.ts
Normal file
@@ -0,0 +1,192 @@
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
|
||||
import { InquirerService } from 'nest-commander';
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { ApiKeyService } from '@app/unraid-api/auth/api-key.service.js';
|
||||
import { AddApiKeyQuestionSet } from '@app/unraid-api/cli/apikey/add-api-key.questions.js';
|
||||
import { ApiKeyCommand } from '@app/unraid-api/cli/apikey/api-key.command.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
|
||||
describe('ApiKeyCommand', () => {
|
||||
let command: ApiKeyCommand;
|
||||
let apiKeyService: ApiKeyService;
|
||||
let logService: LogService;
|
||||
let inquirerService: InquirerService;
|
||||
let questionSet: AddApiKeyQuestionSet;
|
||||
|
||||
beforeEach(async () => {
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [
|
||||
ApiKeyCommand,
|
||||
AddApiKeyQuestionSet,
|
||||
{
|
||||
provide: ApiKeyService,
|
||||
useValue: {
|
||||
findByField: vi.fn(),
|
||||
create: vi.fn(),
|
||||
findAll: vi.fn(),
|
||||
deleteApiKeys: vi.fn(),
|
||||
convertRolesStringArrayToRoles: vi.fn((roles) => roles),
|
||||
convertPermissionsStringArrayToPermissions: vi.fn((perms) => perms),
|
||||
getAllValidPermissions: vi.fn(() => []),
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: LogService,
|
||||
useValue: {
|
||||
log: vi.fn(),
|
||||
error: vi.fn(),
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: InquirerService,
|
||||
useValue: {
|
||||
prompt: vi.fn(),
|
||||
},
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
command = module.get<ApiKeyCommand>(ApiKeyCommand);
|
||||
apiKeyService = module.get<ApiKeyService>(ApiKeyService);
|
||||
logService = module.get<LogService>(LogService);
|
||||
inquirerService = module.get<InquirerService>(InquirerService);
|
||||
questionSet = module.get<AddApiKeyQuestionSet>(AddApiKeyQuestionSet);
|
||||
});
|
||||
|
||||
describe('AddApiKeyQuestionSet', () => {
|
||||
describe('shouldAskOverwrite', () => {
|
||||
it('should return true when an API key with the given name exists', () => {
|
||||
vi.mocked(apiKeyService.findByField).mockReturnValue({
|
||||
key: 'existing-key',
|
||||
name: 'test-key',
|
||||
description: 'Test key',
|
||||
roles: [],
|
||||
permissions: [],
|
||||
} as any);
|
||||
|
||||
const result = questionSet.shouldAskOverwrite({ name: 'test-key' });
|
||||
|
||||
expect(result).toBe(true);
|
||||
expect(apiKeyService.findByField).toHaveBeenCalledWith('name', 'test-key');
|
||||
});
|
||||
|
||||
it('should return false when no API key with the given name exists', () => {
|
||||
vi.mocked(apiKeyService.findByField).mockReturnValue(null);
|
||||
|
||||
const result = questionSet.shouldAskOverwrite({ name: 'non-existent-key' });
|
||||
|
||||
expect(result).toBe(false);
|
||||
expect(apiKeyService.findByField).toHaveBeenCalledWith('name', 'non-existent-key');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('run', () => {
|
||||
it('should find and return existing key when not creating', async () => {
|
||||
const mockKey = { key: 'test-api-key-123', name: 'test-key' };
|
||||
vi.mocked(apiKeyService.findByField).mockReturnValue(mockKey as any);
|
||||
|
||||
await command.run([], { name: 'test-key', create: false });
|
||||
|
||||
expect(apiKeyService.findByField).toHaveBeenCalledWith('name', 'test-key');
|
||||
expect(logService.log).toHaveBeenCalledWith('test-api-key-123');
|
||||
});
|
||||
|
||||
it('should create new key when key does not exist and create flag is set', async () => {
|
||||
vi.mocked(apiKeyService.findByField).mockReturnValue(null);
|
||||
vi.mocked(apiKeyService.create).mockResolvedValue({ key: 'new-api-key-456' } as any);
|
||||
|
||||
await command.run([], {
|
||||
name: 'new-key',
|
||||
create: true,
|
||||
roles: ['ADMIN'] as any,
|
||||
description: 'Test description',
|
||||
});
|
||||
|
||||
expect(apiKeyService.create).toHaveBeenCalledWith({
|
||||
name: 'new-key',
|
||||
description: 'Test description',
|
||||
roles: ['ADMIN'],
|
||||
permissions: undefined,
|
||||
overwrite: false,
|
||||
});
|
||||
expect(logService.log).toHaveBeenCalledWith('new-api-key-456');
|
||||
});
|
||||
|
||||
it('should error when key exists and overwrite is not set in non-interactive mode', async () => {
|
||||
const mockKey = { key: 'existing-key', name: 'test-key' };
|
||||
vi.mocked(apiKeyService.findByField)
|
||||
.mockReturnValueOnce(null) // First call in line 131
|
||||
.mockReturnValueOnce(mockKey as any); // Second call in non-interactive check
|
||||
const exitSpy = vi.spyOn(process, 'exit').mockImplementation(() => {
|
||||
throw new Error('process.exit');
|
||||
});
|
||||
|
||||
await expect(
|
||||
command.run([], {
|
||||
name: 'test-key',
|
||||
create: true,
|
||||
roles: ['ADMIN'] as any,
|
||||
})
|
||||
).rejects.toThrow();
|
||||
|
||||
expect(logService.error).toHaveBeenCalledWith(
|
||||
"API key with name 'test-key' already exists. Use --overwrite to replace it."
|
||||
);
|
||||
expect(exitSpy).toHaveBeenCalledWith(1);
|
||||
exitSpy.mockRestore();
|
||||
});
|
||||
|
||||
it('should create key with overwrite when key exists and overwrite is set', async () => {
|
||||
const mockKey = { key: 'existing-key', name: 'test-key' };
|
||||
vi.mocked(apiKeyService.findByField)
|
||||
.mockReturnValueOnce(null) // First call in line 131
|
||||
.mockReturnValueOnce(mockKey as any); // Second call in non-interactive check
|
||||
vi.mocked(apiKeyService.create).mockResolvedValue({ key: 'overwritten-key' } as any);
|
||||
|
||||
await command.run([], {
|
||||
name: 'test-key',
|
||||
create: true,
|
||||
roles: ['ADMIN'] as any,
|
||||
overwrite: true,
|
||||
});
|
||||
|
||||
expect(apiKeyService.create).toHaveBeenCalledWith({
|
||||
name: 'test-key',
|
||||
description: 'CLI generated key: test-key',
|
||||
roles: ['ADMIN'],
|
||||
permissions: undefined,
|
||||
overwrite: true,
|
||||
});
|
||||
expect(logService.log).toHaveBeenCalledWith('overwritten-key');
|
||||
});
|
||||
|
||||
it('should prompt for missing fields when creating without sufficient info', async () => {
|
||||
vi.mocked(apiKeyService.findByField).mockReturnValue(null);
|
||||
vi.mocked(inquirerService.prompt).mockResolvedValue({
|
||||
name: 'prompted-key',
|
||||
roles: ['USER'],
|
||||
permissions: [],
|
||||
description: 'Prompted description',
|
||||
overwrite: false,
|
||||
} as any);
|
||||
vi.mocked(apiKeyService.create).mockResolvedValue({ key: 'prompted-api-key' } as any);
|
||||
|
||||
await command.run([], { name: '', create: true });
|
||||
|
||||
expect(inquirerService.prompt).toHaveBeenCalledWith('add-api-key', {
|
||||
name: '',
|
||||
create: true,
|
||||
});
|
||||
expect(apiKeyService.create).toHaveBeenCalledWith({
|
||||
name: 'prompted-key',
|
||||
description: 'Prompted description',
|
||||
roles: ['USER'],
|
||||
permissions: [],
|
||||
overwrite: false,
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,9 +1,10 @@
|
||||
import { Test } from '@nestjs/testing';
|
||||
|
||||
import type { CanonicalInternalClientService } from '@unraid/shared';
|
||||
import { CANONICAL_INTERNAL_CLIENT_TOKEN } from '@unraid/shared';
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { ApiReportService } from '@app/unraid-api/cli/api-report.service.js';
|
||||
import { CliInternalClientService } from '@app/unraid-api/cli/internal-client.service.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
import {
|
||||
CONNECT_STATUS_QUERY,
|
||||
@@ -40,7 +41,7 @@ describe('ApiReportService', () => {
|
||||
providers: [
|
||||
ApiReportService,
|
||||
{ provide: LogService, useValue: mockLogService },
|
||||
{ provide: CliInternalClientService, useValue: mockInternalClientService },
|
||||
{ provide: CANONICAL_INTERNAL_CLIENT_TOKEN, useValue: mockInternalClientService },
|
||||
],
|
||||
}).compile();
|
||||
|
||||
@@ -64,9 +65,13 @@ describe('ApiReportService', () => {
|
||||
uuid: 'test-uuid',
|
||||
},
|
||||
versions: {
|
||||
unraid: '6.12.0',
|
||||
kernel: '5.19.17',
|
||||
openssl: '3.0.8',
|
||||
core: {
|
||||
unraid: '6.12.0',
|
||||
kernel: '5.19.17',
|
||||
},
|
||||
packages: {
|
||||
openssl: '3.0.8',
|
||||
},
|
||||
},
|
||||
},
|
||||
config: {
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
import { access, readFile, unlink, writeFile } from 'fs/promises';
|
||||
|
||||
import type { CanonicalInternalClientService } from '@unraid/shared';
|
||||
import { CANONICAL_INTERNAL_CLIENT_TOKEN } from '@unraid/shared';
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { DeveloperToolsService } from '@app/unraid-api/cli/developer/developer-tools.service.js';
|
||||
import { CliInternalClientService } from '@app/unraid-api/cli/internal-client.service.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
import { RestartCommand } from '@app/unraid-api/cli/restart.command.js';
|
||||
|
||||
@@ -15,7 +16,7 @@ describe('DeveloperToolsService', () => {
|
||||
let service: DeveloperToolsService;
|
||||
let logService: LogService;
|
||||
let restartCommand: RestartCommand;
|
||||
let internalClient: CliInternalClientService;
|
||||
let internalClient: CanonicalInternalClientService;
|
||||
|
||||
const mockClient = {
|
||||
mutate: vi.fn(),
|
||||
@@ -42,7 +43,7 @@ describe('DeveloperToolsService', () => {
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: CliInternalClientService,
|
||||
provide: CANONICAL_INTERNAL_CLIENT_TOKEN,
|
||||
useValue: {
|
||||
getClient: vi.fn().mockResolvedValue(mockClient),
|
||||
},
|
||||
@@ -53,7 +54,7 @@ describe('DeveloperToolsService', () => {
|
||||
service = module.get<DeveloperToolsService>(DeveloperToolsService);
|
||||
logService = module.get<LogService>(LogService);
|
||||
restartCommand = module.get<RestartCommand>(RestartCommand);
|
||||
internalClient = module.get<CliInternalClientService>(CliInternalClientService);
|
||||
internalClient = module.get<CanonicalInternalClientService>(CANONICAL_INTERNAL_CLIENT_TOKEN);
|
||||
});
|
||||
|
||||
describe('setSandboxMode', () => {
|
||||
|
||||
111
api/src/unraid-api/cli/__test__/version.command.test.ts
Normal file
111
api/src/unraid-api/cli/__test__/version.command.test.ts
Normal file
@@ -0,0 +1,111 @@
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
|
||||
import { afterEach, beforeEach, describe, expect, it, MockInstance, vi } from 'vitest';
|
||||
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
import { VersionCommand } from '@app/unraid-api/cli/version.command.js';
|
||||
|
||||
let API_VERSION_MOCK = '4.18.2+build123';
|
||||
|
||||
vi.mock('@app/environment.js', async (importOriginal) => {
|
||||
const actual = (await importOriginal()) as any;
|
||||
return {
|
||||
...actual,
|
||||
get API_VERSION() {
|
||||
return API_VERSION_MOCK;
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
describe('VersionCommand', () => {
|
||||
let command: VersionCommand;
|
||||
let logService: LogService;
|
||||
let consoleLogSpy: MockInstance<typeof console.log>;
|
||||
|
||||
beforeEach(async () => {
|
||||
API_VERSION_MOCK = '4.18.2+build123'; // Reset to default before each test
|
||||
consoleLogSpy = vi.spyOn(console, 'log').mockImplementation(() => {});
|
||||
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [
|
||||
VersionCommand,
|
||||
{
|
||||
provide: LogService,
|
||||
useValue: {
|
||||
info: vi.fn(),
|
||||
},
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
command = module.get<VersionCommand>(VersionCommand);
|
||||
logService = module.get<LogService>(LogService);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
describe('run', () => {
|
||||
it('should output version with logger when no options provided', async () => {
|
||||
await command.run([]);
|
||||
|
||||
expect(logService.info).toHaveBeenCalledWith('Unraid API v4.18.2+build123');
|
||||
expect(consoleLogSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should output version with logger when json option is false', async () => {
|
||||
await command.run([], { json: false });
|
||||
|
||||
expect(logService.info).toHaveBeenCalledWith('Unraid API v4.18.2+build123');
|
||||
expect(consoleLogSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should output JSON when json option is true', async () => {
|
||||
await command.run([], { json: true });
|
||||
|
||||
expect(logService.info).not.toHaveBeenCalled();
|
||||
expect(consoleLogSpy).toHaveBeenCalledWith(
|
||||
JSON.stringify({
|
||||
version: '4.18.2',
|
||||
build: 'build123',
|
||||
combined: '4.18.2+build123',
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle version without build info', async () => {
|
||||
API_VERSION_MOCK = '4.18.2'; // Set version without build info
|
||||
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [
|
||||
VersionCommand,
|
||||
{
|
||||
provide: LogService,
|
||||
useValue: {
|
||||
info: vi.fn(),
|
||||
},
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
const commandWithoutBuild = module.get<VersionCommand>(VersionCommand);
|
||||
|
||||
await commandWithoutBuild.run([], { json: true });
|
||||
|
||||
expect(consoleLogSpy).toHaveBeenCalledWith(
|
||||
JSON.stringify({
|
||||
version: '4.18.2',
|
||||
build: undefined,
|
||||
combined: '4.18.2',
|
||||
})
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseJson', () => {
|
||||
it('should return true', () => {
|
||||
expect(command.parseJson()).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,44 +0,0 @@
|
||||
import { Inject, Injectable, Logger, OnModuleInit } from '@nestjs/common';
|
||||
|
||||
import type { ApiKeyService } from '@unraid/shared/services/api-key.js';
|
||||
import { Role } from '@unraid/shared/graphql.model.js';
|
||||
import { API_KEY_SERVICE_TOKEN } from '@unraid/shared/tokens.js';
|
||||
|
||||
/**
|
||||
* Service that creates and manages the admin API key used by CLI commands.
|
||||
* Uses the standard API key storage location via helper methods in ApiKeyService.
|
||||
*/
|
||||
@Injectable()
|
||||
export class AdminKeyService implements OnModuleInit {
|
||||
private readonly logger = new Logger(AdminKeyService.name);
|
||||
private static readonly ADMIN_KEY_NAME = 'CliInternal';
|
||||
private static readonly ADMIN_KEY_DESCRIPTION =
|
||||
'Internal admin API key used by CLI commands for system operations';
|
||||
|
||||
constructor(
|
||||
@Inject(API_KEY_SERVICE_TOKEN)
|
||||
private readonly apiKeyService: ApiKeyService
|
||||
) {}
|
||||
|
||||
async onModuleInit() {
|
||||
try {
|
||||
await this.getOrCreateLocalAdminKey();
|
||||
this.logger.log('Admin API key initialized successfully');
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to initialize admin API key:', error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets or creates a local admin API key for CLI operations.
|
||||
* Uses the standard API key storage location.
|
||||
*/
|
||||
public async getOrCreateLocalAdminKey(): Promise<string> {
|
||||
return this.apiKeyService.ensureKey({
|
||||
name: AdminKeyService.ADMIN_KEY_NAME,
|
||||
description: AdminKeyService.ADMIN_KEY_DESCRIPTION,
|
||||
roles: [Role.ADMIN], // Full admin privileges for CLI operations
|
||||
legacyNames: ['CLI', 'Internal', 'CliAdmin'], // Clean up old keys
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,9 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { Inject, Injectable } from '@nestjs/common';
|
||||
|
||||
import type { CanonicalInternalClientService } from '@unraid/shared';
|
||||
import { CANONICAL_INTERNAL_CLIENT_TOKEN } from '@unraid/shared';
|
||||
|
||||
import type { ConnectStatusQuery, SystemReportQuery } from '@app/unraid-api/cli/generated/graphql.js';
|
||||
import { CliInternalClientService } from '@app/unraid-api/cli/internal-client.service.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
import {
|
||||
CONNECT_STATUS_QUERY,
|
||||
@@ -60,7 +62,8 @@ export interface ApiReportData {
|
||||
@Injectable()
|
||||
export class ApiReportService {
|
||||
constructor(
|
||||
private readonly internalClient: CliInternalClientService,
|
||||
@Inject(CANONICAL_INTERNAL_CLIENT_TOKEN)
|
||||
private readonly internalClient: CanonicalInternalClientService,
|
||||
private readonly logger: LogService
|
||||
) {}
|
||||
|
||||
@@ -82,7 +85,7 @@ export class ApiReportService {
|
||||
? {
|
||||
id: systemData.info.system.uuid,
|
||||
name: systemData.server?.name || 'Unknown',
|
||||
version: systemData.info.versions.unraid || 'Unknown',
|
||||
version: systemData.info.versions.core.unraid || 'Unknown',
|
||||
machineId: 'REDACTED',
|
||||
manufacturer: systemData.info.system.manufacturer,
|
||||
model: systemData.info.system.model,
|
||||
@@ -135,7 +138,7 @@ export class ApiReportService {
|
||||
});
|
||||
}
|
||||
|
||||
const client = await this.internalClient.getClient();
|
||||
const client = await this.internalClient.getClient({ enableSubscriptions: false });
|
||||
|
||||
// Query system data
|
||||
let systemResult: { data: SystemReportQuery } | null = null;
|
||||
@@ -190,7 +193,7 @@ export class ApiReportService {
|
||||
|
||||
return this.createApiReportData({
|
||||
apiRunning,
|
||||
systemData: systemResult.data,
|
||||
systemData: systemResult?.data,
|
||||
connectData,
|
||||
servicesData,
|
||||
});
|
||||
|
||||
@@ -39,6 +39,12 @@ export class AddApiKeyQuestionSet {
|
||||
return this.apiKeyService.convertRolesStringArrayToRoles(val);
|
||||
}
|
||||
|
||||
@WhenFor({ name: 'roles' })
|
||||
shouldAskRoles(options: { roles?: Role[]; permissions?: Permission[] }): boolean {
|
||||
// Ask for roles if they weren't provided or are empty
|
||||
return !options.roles || options.roles.length === 0;
|
||||
}
|
||||
|
||||
@ChoicesFor({ name: 'roles' })
|
||||
async getRoles() {
|
||||
return Object.values(Role);
|
||||
@@ -53,6 +59,12 @@ export class AddApiKeyQuestionSet {
|
||||
return this.apiKeyService.convertPermissionsStringArrayToPermissions(val);
|
||||
}
|
||||
|
||||
@WhenFor({ name: 'permissions' })
|
||||
shouldAskPermissions(options: { roles?: Role[]; permissions?: Permission[] }): boolean {
|
||||
// Ask for permissions if they weren't provided or are empty
|
||||
return !options.permissions || options.permissions.length === 0;
|
||||
}
|
||||
|
||||
@ChoicesFor({ name: 'permissions' })
|
||||
async getPermissions() {
|
||||
return this.apiKeyService
|
||||
@@ -72,6 +84,6 @@ export class AddApiKeyQuestionSet {
|
||||
|
||||
@WhenFor({ name: 'overwrite' })
|
||||
shouldAskOverwrite(options: { name: string }): boolean {
|
||||
return Boolean(this.apiKeyService.findByKey(options.name));
|
||||
return Boolean(this.apiKeyService.findByField('name', options.name));
|
||||
}
|
||||
}
|
||||
|
||||
434
api/src/unraid-api/cli/apikey/api-key.command.spec.ts
Normal file
434
api/src/unraid-api/cli/apikey/api-key.command.spec.ts
Normal file
@@ -0,0 +1,434 @@
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
|
||||
import { AuthAction, Resource, Role } from '@unraid/shared/graphql.model.js';
|
||||
import { InquirerService } from 'nest-commander';
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { ApiKeyService } from '@app/unraid-api/auth/api-key.service.js';
|
||||
import { ApiKeyCommand } from '@app/unraid-api/cli/apikey/api-key.command.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
|
||||
describe('ApiKeyCommand', () => {
|
||||
let command: ApiKeyCommand;
|
||||
let apiKeyService: ApiKeyService;
|
||||
let logService: LogService;
|
||||
|
||||
beforeEach(async () => {
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [
|
||||
ApiKeyCommand,
|
||||
{
|
||||
provide: ApiKeyService,
|
||||
useValue: {
|
||||
findByField: vi.fn(),
|
||||
create: vi.fn(),
|
||||
convertRolesStringArrayToRoles: vi.fn(),
|
||||
convertPermissionsStringArrayToPermissions: vi.fn(),
|
||||
findAll: vi.fn(),
|
||||
deleteApiKeys: vi.fn(),
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: LogService,
|
||||
useValue: {
|
||||
log: vi.fn(),
|
||||
error: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: InquirerService,
|
||||
useValue: {
|
||||
prompt: vi.fn(),
|
||||
},
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
command = module.get<ApiKeyCommand>(ApiKeyCommand);
|
||||
apiKeyService = module.get<ApiKeyService>(ApiKeyService);
|
||||
logService = module.get<LogService>(LogService);
|
||||
});
|
||||
|
||||
describe('parseRoles', () => {
|
||||
it('should parse valid roles correctly', () => {
|
||||
const mockConvert = vi
|
||||
.spyOn(apiKeyService, 'convertRolesStringArrayToRoles')
|
||||
.mockReturnValue([Role.ADMIN, Role.CONNECT]);
|
||||
|
||||
const result = command.parseRoles('ADMIN,CONNECT');
|
||||
|
||||
expect(mockConvert).toHaveBeenCalledWith(['ADMIN', 'CONNECT']);
|
||||
expect(result).toEqual([Role.ADMIN, Role.CONNECT]);
|
||||
});
|
||||
|
||||
it('should return GUEST role when no roles provided', () => {
|
||||
const result = command.parseRoles('');
|
||||
|
||||
expect(result).toEqual([Role.GUEST]);
|
||||
});
|
||||
|
||||
it('should handle roles with spaces', () => {
|
||||
const mockConvert = vi
|
||||
.spyOn(apiKeyService, 'convertRolesStringArrayToRoles')
|
||||
.mockReturnValue([Role.ADMIN, Role.VIEWER]);
|
||||
|
||||
const result = command.parseRoles('ADMIN, VIEWER');
|
||||
|
||||
expect(mockConvert).toHaveBeenCalledWith(['ADMIN', ' VIEWER']);
|
||||
expect(result).toEqual([Role.ADMIN, Role.VIEWER]);
|
||||
});
|
||||
|
||||
it('should throw error when no valid roles found', () => {
|
||||
vi.spyOn(apiKeyService, 'convertRolesStringArrayToRoles').mockReturnValue([]);
|
||||
|
||||
expect(() => command.parseRoles('INVALID_ROLE')).toThrow(
|
||||
`Invalid roles. Valid options are: ${Object.values(Role).join(', ')}`
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle mixed valid and invalid roles with warning', () => {
|
||||
const mockConvert = vi
|
||||
.spyOn(apiKeyService, 'convertRolesStringArrayToRoles')
|
||||
.mockImplementation((roles) => {
|
||||
const validRoles: Role[] = [];
|
||||
const invalidRoles: string[] = [];
|
||||
|
||||
for (const roleStr of roles) {
|
||||
const upperRole = roleStr.trim().toUpperCase();
|
||||
const role = Role[upperRole as keyof typeof Role];
|
||||
|
||||
if (role) {
|
||||
validRoles.push(role);
|
||||
} else {
|
||||
invalidRoles.push(roleStr);
|
||||
}
|
||||
}
|
||||
|
||||
if (invalidRoles.length > 0) {
|
||||
logService.warn(`Ignoring invalid roles: ${invalidRoles.join(', ')}`);
|
||||
}
|
||||
|
||||
return validRoles;
|
||||
});
|
||||
|
||||
const result = command.parseRoles('ADMIN,INVALID,VIEWER');
|
||||
|
||||
expect(mockConvert).toHaveBeenCalledWith(['ADMIN', 'INVALID', 'VIEWER']);
|
||||
expect(logService.warn).toHaveBeenCalledWith('Ignoring invalid roles: INVALID');
|
||||
expect(result).toEqual([Role.ADMIN, Role.VIEWER]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('run', () => {
|
||||
it('should create API key with roles without prompting', async () => {
|
||||
const mockKey = {
|
||||
id: 'test-id',
|
||||
key: 'test-key-123',
|
||||
name: 'TEST',
|
||||
roles: [Role.ADMIN],
|
||||
createdAt: new Date().toISOString(),
|
||||
permissions: [],
|
||||
};
|
||||
vi.spyOn(apiKeyService, 'findByField').mockReturnValue(null);
|
||||
vi.spyOn(apiKeyService, 'create').mockResolvedValue(mockKey);
|
||||
|
||||
await command.run([], {
|
||||
name: 'TEST',
|
||||
create: true,
|
||||
roles: [Role.ADMIN],
|
||||
permissions: undefined,
|
||||
description: 'Test description',
|
||||
});
|
||||
|
||||
expect(apiKeyService.create).toHaveBeenCalledWith({
|
||||
name: 'TEST',
|
||||
description: 'Test description',
|
||||
roles: [Role.ADMIN],
|
||||
permissions: undefined,
|
||||
overwrite: false,
|
||||
});
|
||||
expect(logService.log).toHaveBeenCalledWith('test-key-123');
|
||||
});
|
||||
|
||||
it('should create API key with permissions only without prompting', async () => {
|
||||
const mockKey = {
|
||||
id: 'test-id',
|
||||
key: 'test-key-456',
|
||||
name: 'TEST_PERMS',
|
||||
roles: [],
|
||||
createdAt: new Date().toISOString(),
|
||||
permissions: [],
|
||||
};
|
||||
const mockPermissions = [
|
||||
{
|
||||
resource: Resource.DOCKER,
|
||||
actions: [AuthAction.READ_ANY],
|
||||
},
|
||||
];
|
||||
|
||||
vi.spyOn(apiKeyService, 'findByField').mockReturnValue(null);
|
||||
vi.spyOn(apiKeyService, 'create').mockResolvedValue(mockKey);
|
||||
|
||||
await command.run([], {
|
||||
name: 'TEST_PERMS',
|
||||
create: true,
|
||||
roles: undefined,
|
||||
permissions: mockPermissions,
|
||||
description: 'Test with permissions',
|
||||
});
|
||||
|
||||
expect(apiKeyService.create).toHaveBeenCalledWith({
|
||||
name: 'TEST_PERMS',
|
||||
description: 'Test with permissions',
|
||||
roles: undefined,
|
||||
permissions: mockPermissions,
|
||||
overwrite: false,
|
||||
});
|
||||
expect(logService.log).toHaveBeenCalledWith('test-key-456');
|
||||
});
|
||||
|
||||
it('should use default description when not provided', async () => {
|
||||
const mockKey = {
|
||||
id: 'test-id',
|
||||
key: 'test-key-789',
|
||||
name: 'NO_DESC',
|
||||
roles: [Role.VIEWER],
|
||||
createdAt: new Date().toISOString(),
|
||||
permissions: [],
|
||||
};
|
||||
vi.spyOn(apiKeyService, 'findByField').mockReturnValue(null);
|
||||
vi.spyOn(apiKeyService, 'create').mockResolvedValue(mockKey);
|
||||
|
||||
await command.run([], {
|
||||
name: 'NO_DESC',
|
||||
create: true,
|
||||
roles: [Role.VIEWER],
|
||||
permissions: undefined,
|
||||
});
|
||||
|
||||
expect(apiKeyService.create).toHaveBeenCalledWith({
|
||||
name: 'NO_DESC',
|
||||
description: 'CLI generated key: NO_DESC',
|
||||
roles: [Role.VIEWER],
|
||||
permissions: undefined,
|
||||
overwrite: false,
|
||||
});
|
||||
});
|
||||
|
||||
it('should return existing key when found', async () => {
|
||||
const existingKey = {
|
||||
id: 'existing-id',
|
||||
key: 'existing-key-123',
|
||||
name: 'EXISTING',
|
||||
roles: [Role.ADMIN],
|
||||
createdAt: new Date().toISOString(),
|
||||
permissions: [],
|
||||
};
|
||||
vi.spyOn(apiKeyService, 'findByField').mockReturnValue(existingKey);
|
||||
|
||||
await command.run([], {
|
||||
name: 'EXISTING',
|
||||
create: false,
|
||||
});
|
||||
|
||||
expect(apiKeyService.findByField).toHaveBeenCalledWith('name', 'EXISTING');
|
||||
expect(logService.log).toHaveBeenCalledWith('existing-key-123');
|
||||
expect(apiKeyService.create).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle uppercase role conversion', () => {
|
||||
const mockConvert = vi
|
||||
.spyOn(apiKeyService, 'convertRolesStringArrayToRoles')
|
||||
.mockImplementation((roles) => {
|
||||
return roles
|
||||
.map((roleStr) => Role[roleStr.trim().toUpperCase() as keyof typeof Role])
|
||||
.filter(Boolean);
|
||||
});
|
||||
|
||||
const result = command.parseRoles('admin,connect');
|
||||
|
||||
expect(mockConvert).toHaveBeenCalledWith(['admin', 'connect']);
|
||||
expect(result).toEqual([Role.ADMIN, Role.CONNECT]);
|
||||
});
|
||||
|
||||
it('should handle lowercase role conversion', () => {
|
||||
const mockConvert = vi
|
||||
.spyOn(apiKeyService, 'convertRolesStringArrayToRoles')
|
||||
.mockImplementation((roles) => {
|
||||
return roles
|
||||
.map((roleStr) => Role[roleStr.trim().toUpperCase() as keyof typeof Role])
|
||||
.filter(Boolean);
|
||||
});
|
||||
|
||||
const result = command.parseRoles('viewer');
|
||||
|
||||
expect(mockConvert).toHaveBeenCalledWith(['viewer']);
|
||||
expect(result).toEqual([Role.VIEWER]);
|
||||
});
|
||||
|
||||
it('should handle mixed case role conversion', () => {
|
||||
const mockConvert = vi
|
||||
.spyOn(apiKeyService, 'convertRolesStringArrayToRoles')
|
||||
.mockImplementation((roles) => {
|
||||
return roles
|
||||
.map((roleStr) => Role[roleStr.trim().toUpperCase() as keyof typeof Role])
|
||||
.filter(Boolean);
|
||||
});
|
||||
|
||||
const result = command.parseRoles('Admin,CoNnEcT');
|
||||
|
||||
expect(mockConvert).toHaveBeenCalledWith(['Admin', 'CoNnEcT']);
|
||||
expect(result).toEqual([Role.ADMIN, Role.CONNECT]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('JSON output functionality', () => {
|
||||
let consoleSpy: ReturnType<typeof vi.spyOn>;
|
||||
|
||||
beforeEach(() => {
|
||||
consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {});
|
||||
});
|
||||
|
||||
it('should output JSON when creating key with --json flag', async () => {
|
||||
const mockKey = {
|
||||
id: 'test-id-123',
|
||||
key: 'test-key-456',
|
||||
name: 'JSON_TEST',
|
||||
roles: [Role.ADMIN],
|
||||
createdAt: new Date().toISOString(),
|
||||
permissions: [],
|
||||
};
|
||||
vi.spyOn(apiKeyService, 'findByField').mockReturnValue(null);
|
||||
vi.spyOn(apiKeyService, 'create').mockResolvedValue(mockKey);
|
||||
|
||||
await command.run([], {
|
||||
name: 'JSON_TEST',
|
||||
create: true,
|
||||
roles: [Role.ADMIN],
|
||||
json: true,
|
||||
});
|
||||
|
||||
expect(consoleSpy).toHaveBeenCalledWith(
|
||||
JSON.stringify({ key: 'test-key-456', name: 'JSON_TEST', id: 'test-id-123' })
|
||||
);
|
||||
expect(logService.log).not.toHaveBeenCalledWith('test-key-456');
|
||||
});
|
||||
|
||||
it('should output JSON when fetching existing key with --json flag', async () => {
|
||||
const existingKey = {
|
||||
id: 'existing-id-456',
|
||||
key: 'existing-key-789',
|
||||
name: 'EXISTING_JSON',
|
||||
roles: [Role.VIEWER],
|
||||
createdAt: new Date().toISOString(),
|
||||
permissions: [],
|
||||
};
|
||||
vi.spyOn(apiKeyService, 'findByField').mockReturnValue(existingKey);
|
||||
|
||||
await command.run([], {
|
||||
name: 'EXISTING_JSON',
|
||||
create: false,
|
||||
json: true,
|
||||
});
|
||||
|
||||
expect(consoleSpy).toHaveBeenCalledWith(
|
||||
JSON.stringify({ key: 'existing-key-789', name: 'EXISTING_JSON', id: 'existing-id-456' })
|
||||
);
|
||||
expect(logService.log).not.toHaveBeenCalledWith('existing-key-789');
|
||||
});
|
||||
|
||||
it('should output JSON when deleting key with --json flag', async () => {
|
||||
const existingKeys = [
|
||||
{
|
||||
id: 'delete-id-123',
|
||||
name: 'DELETE_JSON',
|
||||
key: 'delete-key-456',
|
||||
roles: [Role.GUEST],
|
||||
createdAt: new Date().toISOString(),
|
||||
permissions: [],
|
||||
},
|
||||
];
|
||||
vi.spyOn(apiKeyService, 'findAll').mockResolvedValue(existingKeys);
|
||||
vi.spyOn(apiKeyService, 'deleteApiKeys').mockResolvedValue();
|
||||
|
||||
await command.run([], {
|
||||
name: 'DELETE_JSON',
|
||||
delete: true,
|
||||
json: true,
|
||||
});
|
||||
|
||||
expect(consoleSpy).toHaveBeenCalledWith(
|
||||
JSON.stringify({
|
||||
deleted: 1,
|
||||
keys: [{ id: 'delete-id-123', name: 'DELETE_JSON' }],
|
||||
})
|
||||
);
|
||||
expect(logService.log).not.toHaveBeenCalledWith('Successfully deleted 1 API key');
|
||||
});
|
||||
|
||||
it('should output JSON error when deleting non-existent key with --json flag', async () => {
|
||||
vi.spyOn(apiKeyService, 'findAll').mockResolvedValue([]);
|
||||
|
||||
await command.run([], {
|
||||
name: 'NONEXISTENT',
|
||||
delete: true,
|
||||
json: true,
|
||||
});
|
||||
|
||||
expect(consoleSpy).toHaveBeenCalledWith(
|
||||
JSON.stringify({ deleted: 0, message: 'No API keys found to delete' })
|
||||
);
|
||||
expect(logService.log).not.toHaveBeenCalledWith('No API keys found to delete');
|
||||
});
|
||||
|
||||
it('should not suppress creation message when not using JSON', async () => {
|
||||
const mockKey = {
|
||||
id: 'test-id',
|
||||
key: 'test-key',
|
||||
name: 'NO_JSON_TEST',
|
||||
roles: [Role.ADMIN],
|
||||
createdAt: new Date().toISOString(),
|
||||
permissions: [],
|
||||
};
|
||||
vi.spyOn(apiKeyService, 'findByField').mockReturnValue(null);
|
||||
vi.spyOn(apiKeyService, 'create').mockResolvedValue(mockKey);
|
||||
|
||||
await command.run([], {
|
||||
name: 'NO_JSON_TEST',
|
||||
create: true,
|
||||
roles: [Role.ADMIN],
|
||||
json: false,
|
||||
});
|
||||
|
||||
expect(logService.log).toHaveBeenCalledWith('Creating API Key...');
|
||||
expect(logService.log).toHaveBeenCalledWith('test-key');
|
||||
expect(consoleSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should suppress creation message when using JSON', async () => {
|
||||
const mockKey = {
|
||||
id: 'test-id',
|
||||
key: 'test-key',
|
||||
name: 'JSON_SUPPRESS_TEST',
|
||||
roles: [Role.ADMIN],
|
||||
createdAt: new Date().toISOString(),
|
||||
permissions: [],
|
||||
};
|
||||
vi.spyOn(apiKeyService, 'findByField').mockReturnValue(null);
|
||||
vi.spyOn(apiKeyService, 'create').mockResolvedValue(mockKey);
|
||||
|
||||
await command.run([], {
|
||||
name: 'JSON_SUPPRESS_TEST',
|
||||
create: true,
|
||||
roles: [Role.ADMIN],
|
||||
json: true,
|
||||
});
|
||||
|
||||
expect(logService.log).not.toHaveBeenCalledWith('Creating API Key...');
|
||||
expect(consoleSpy).toHaveBeenCalledWith(
|
||||
JSON.stringify({ key: 'test-key', name: 'JSON_SUPPRESS_TEST', id: 'test-id' })
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,5 +1,4 @@
|
||||
import { Resource, Role } from '@unraid/shared/graphql.model.js';
|
||||
import { AuthActionVerb } from 'nest-authz';
|
||||
import { AuthAction, Resource, Role } from '@unraid/shared/graphql.model.js';
|
||||
import { Command, CommandRunner, InquirerService, Option } from 'nest-commander';
|
||||
|
||||
import type { DeleteApiKeyAnswers } from '@app/unraid-api/cli/apikey/delete-api-key.questions.js';
|
||||
@@ -11,11 +10,13 @@ import { Permission } from '@app/unraid-api/graph/resolvers/api-key/api-key.mode
|
||||
|
||||
interface KeyOptions {
|
||||
name: string;
|
||||
create: boolean;
|
||||
create?: boolean;
|
||||
delete?: boolean;
|
||||
description?: string;
|
||||
roles?: Role[];
|
||||
permissions?: Permission[];
|
||||
overwrite?: boolean;
|
||||
json?: boolean;
|
||||
}
|
||||
|
||||
@Command({
|
||||
@@ -53,29 +54,22 @@ export class ApiKeyCommand extends CommandRunner {
|
||||
})
|
||||
parseRoles(roles: string): Role[] {
|
||||
if (!roles) return [Role.GUEST];
|
||||
const validRoles: Set<Role> = new Set(Object.values(Role));
|
||||
|
||||
const requestedRoles = roles.split(',').map((role) => role.trim().toLocaleLowerCase() as Role);
|
||||
const validRequestedRoles = requestedRoles.filter((role) => validRoles.has(role));
|
||||
const roleArray = roles.split(',').filter(Boolean);
|
||||
const validRoles = this.apiKeyService.convertRolesStringArrayToRoles(roleArray);
|
||||
|
||||
if (validRequestedRoles.length === 0) {
|
||||
throw new Error(`Invalid roles. Valid options are: ${Array.from(validRoles).join(', ')}`);
|
||||
if (validRoles.length === 0) {
|
||||
throw new Error(`Invalid roles. Valid options are: ${Object.values(Role).join(', ')}`);
|
||||
}
|
||||
|
||||
const invalidRoles = requestedRoles.filter((role) => !validRoles.has(role));
|
||||
|
||||
if (invalidRoles.length > 0) {
|
||||
this.logger.warn(`Ignoring invalid roles: ${invalidRoles.join(', ')}`);
|
||||
}
|
||||
|
||||
return validRequestedRoles;
|
||||
return validRoles;
|
||||
}
|
||||
|
||||
@Option({
|
||||
flags: '-p, --permissions <permissions>',
|
||||
description: `Comma separated list of permissions to assign to the key (in the form of "resource:action")
|
||||
RESOURCES: ${Object.values(Resource).join(', ')}
|
||||
ACTIONS: ${Object.values(AuthActionVerb).join(', ')}`,
|
||||
ACTIONS: ${Object.values(AuthAction).join(', ')}`,
|
||||
})
|
||||
parsePermissions(permissions: string): Array<Permission> {
|
||||
return this.apiKeyService.convertPermissionsStringArrayToPermissions(
|
||||
@@ -99,48 +93,137 @@ ACTIONS: ${Object.values(AuthActionVerb).join(', ')}`,
|
||||
return true;
|
||||
}
|
||||
|
||||
/** Prompt the user to select API keys to delete. Then, delete the selected keys. */
|
||||
private async deleteKeys() {
|
||||
@Option({
|
||||
flags: '--overwrite',
|
||||
description: 'Overwrite existing API key if it exists',
|
||||
})
|
||||
parseOverwrite(): boolean {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Option({
|
||||
flags: '--json',
|
||||
description: 'Output machine-readable JSON format',
|
||||
})
|
||||
parseJson(): boolean {
|
||||
return true;
|
||||
}
|
||||
|
||||
/** Helper to output either JSON or regular log message */
|
||||
private output(message: string, jsonData?: object, jsonOutput?: boolean): void {
|
||||
if (jsonOutput && jsonData) {
|
||||
console.log(JSON.stringify(jsonData));
|
||||
} else {
|
||||
this.logger.log(message);
|
||||
}
|
||||
}
|
||||
|
||||
/** Helper to output either JSON or regular error message */
|
||||
private outputError(message: string, jsonData?: object, jsonOutput?: boolean): void {
|
||||
if (jsonOutput && jsonData) {
|
||||
console.log(JSON.stringify(jsonData));
|
||||
} else {
|
||||
this.logger.error(message);
|
||||
}
|
||||
}
|
||||
|
||||
/** Delete API keys either by name (non-interactive) or by prompting user selection (interactive). */
|
||||
private async deleteKeys(name?: string, jsonOutput?: boolean) {
|
||||
const allKeys = await this.apiKeyService.findAll();
|
||||
if (allKeys.length === 0) {
|
||||
this.logger.log('No API keys found to delete');
|
||||
this.output(
|
||||
'No API keys found to delete',
|
||||
{ deleted: 0, message: 'No API keys found to delete' },
|
||||
jsonOutput
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const answers = await this.inquirerService.prompt<DeleteApiKeyAnswers>(
|
||||
DeleteApiKeyQuestionSet.name,
|
||||
{}
|
||||
);
|
||||
if (!answers.selectedKeys || answers.selectedKeys.length === 0) {
|
||||
this.logger.log('No keys selected for deletion');
|
||||
return;
|
||||
let selectedKeyIds: string[];
|
||||
let deletedKeys: { id: string; name: string }[] = [];
|
||||
|
||||
if (name) {
|
||||
// Non-interactive mode: delete by name
|
||||
const keyToDelete = allKeys.find((key) => key.name === name);
|
||||
if (!keyToDelete) {
|
||||
this.outputError(
|
||||
`No API key found with name: ${name}`,
|
||||
{ deleted: 0, error: `No API key found with name: ${name}` },
|
||||
jsonOutput
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
selectedKeyIds = [keyToDelete.id];
|
||||
deletedKeys = [{ id: keyToDelete.id, name: keyToDelete.name }];
|
||||
} else {
|
||||
// Interactive mode: prompt user to select keys
|
||||
const answers = await this.inquirerService.prompt<DeleteApiKeyAnswers>(
|
||||
DeleteApiKeyQuestionSet.name,
|
||||
{}
|
||||
);
|
||||
if (!answers.selectedKeys || answers.selectedKeys.length === 0) {
|
||||
this.output(
|
||||
'No keys selected for deletion',
|
||||
{ deleted: 0, message: 'No keys selected for deletion' },
|
||||
jsonOutput
|
||||
);
|
||||
return;
|
||||
}
|
||||
selectedKeyIds = answers.selectedKeys;
|
||||
deletedKeys = allKeys
|
||||
.filter((key) => selectedKeyIds.includes(key.id))
|
||||
.map((key) => ({ id: key.id, name: key.name }));
|
||||
}
|
||||
|
||||
try {
|
||||
await this.apiKeyService.deleteApiKeys(answers.selectedKeys);
|
||||
this.logger.log(`Successfully deleted ${answers.selectedKeys.length} API keys`);
|
||||
await this.apiKeyService.deleteApiKeys(selectedKeyIds);
|
||||
const message = `Successfully deleted ${selectedKeyIds.length} API key${selectedKeyIds.length === 1 ? '' : 's'}`;
|
||||
this.output(message, { deleted: selectedKeyIds.length, keys: deletedKeys }, jsonOutput);
|
||||
} catch (error) {
|
||||
this.logger.error(error as any);
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
this.outputError(errorMessage, { deleted: 0, error: errorMessage }, jsonOutput);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
async run(
|
||||
_: string[],
|
||||
options: KeyOptions = { create: false, name: '', delete: false }
|
||||
): Promise<void> {
|
||||
async run(_: string[], options: KeyOptions = { name: '', delete: false }): Promise<void> {
|
||||
try {
|
||||
if (options.delete) {
|
||||
await this.deleteKeys();
|
||||
await this.deleteKeys(options.name, options.json);
|
||||
return;
|
||||
}
|
||||
|
||||
const key = this.apiKeyService.findByField('name', options.name);
|
||||
if (key) {
|
||||
this.logger.log(key.key);
|
||||
} else if (options.create) {
|
||||
options = await this.inquirerService.prompt(AddApiKeyQuestionSet.name, options);
|
||||
this.logger.log('Creating API Key...' + JSON.stringify(options));
|
||||
this.output(key.key, { key: key.key, name: key.name, id: key.id }, options.json);
|
||||
} else if (options.create === true) {
|
||||
// Check if we have minimum required info from flags (name + at least one role or permission)
|
||||
const hasMinimumInfo =
|
||||
options.name &&
|
||||
((options.roles && options.roles.length > 0) ||
|
||||
(options.permissions && options.permissions.length > 0));
|
||||
|
||||
if (!hasMinimumInfo) {
|
||||
// Interactive mode - prompt for missing fields
|
||||
options = await this.inquirerService.prompt(AddApiKeyQuestionSet.name, options);
|
||||
} else {
|
||||
// Non-interactive mode - check if key exists and handle overwrite
|
||||
const existingKey = this.apiKeyService.findByField('name', options.name);
|
||||
if (existingKey && !options.overwrite) {
|
||||
this.outputError(
|
||||
`API key with name '${options.name}' already exists. Use --overwrite to replace it.`,
|
||||
{
|
||||
error: `API key with name '${options.name}' already exists. Use --overwrite to replace it.`,
|
||||
},
|
||||
options.json
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
if (!options.json) {
|
||||
this.logger.log('Creating API Key...');
|
||||
}
|
||||
|
||||
if (!options.roles && !options.permissions) {
|
||||
this.logger.error('Please add at least one role or permission to the key.');
|
||||
@@ -155,10 +238,10 @@ ACTIONS: ${Object.values(AuthActionVerb).join(', ')}`,
|
||||
description: options.description || `CLI generated key: ${options.name}`,
|
||||
roles: options.roles,
|
||||
permissions: options.permissions,
|
||||
overwrite: true,
|
||||
overwrite: options.overwrite ?? false,
|
||||
});
|
||||
|
||||
this.logger.log(key.key);
|
||||
this.output(key.key, { key: key.key, name: key.name, id: key.id }, options.json);
|
||||
} else {
|
||||
this.logger.log('No Key Found');
|
||||
process.exit(1);
|
||||
|
||||
@@ -2,9 +2,7 @@ import { Module } from '@nestjs/common';
|
||||
|
||||
import { DependencyService } from '@app/unraid-api/app/dependency.service.js';
|
||||
import { ApiKeyService } from '@app/unraid-api/auth/api-key.service.js';
|
||||
import { AdminKeyService } from '@app/unraid-api/cli/admin-key.service.js';
|
||||
import { ApiReportService } from '@app/unraid-api/cli/api-report.service.js';
|
||||
import { CliInternalClientService } from '@app/unraid-api/cli/internal-client.service.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
import { PM2Service } from '@app/unraid-api/cli/pm2.service.js';
|
||||
import { ApiConfigModule } from '@app/unraid-api/config/api-config.module.js';
|
||||
@@ -23,15 +21,7 @@ import { UnraidFileModifierModule } from '@app/unraid-api/unraid-file-modifier/u
|
||||
PluginCliModule.register(),
|
||||
UnraidFileModifierModule,
|
||||
],
|
||||
providers: [
|
||||
LogService,
|
||||
PM2Service,
|
||||
ApiKeyService,
|
||||
DependencyService,
|
||||
AdminKeyService,
|
||||
ApiReportService,
|
||||
CliInternalClientService,
|
||||
],
|
||||
exports: [ApiReportService, LogService, ApiKeyService, CliInternalClientService],
|
||||
providers: [LogService, PM2Service, ApiKeyService, DependencyService, ApiReportService],
|
||||
exports: [ApiReportService, LogService, ApiKeyService],
|
||||
})
|
||||
export class CliServicesModule {}
|
||||
|
||||
@@ -1,12 +1,10 @@
|
||||
import { ConfigModule } from '@nestjs/config';
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
|
||||
import { INTERNAL_CLIENT_SERVICE_TOKEN } from '@unraid/shared';
|
||||
import { CANONICAL_INTERNAL_CLIENT_TOKEN, INTERNAL_CLIENT_FACTORY_TOKEN } from '@unraid/shared';
|
||||
import { afterEach, beforeEach, describe, expect, it } from 'vitest';
|
||||
|
||||
import { AdminKeyService } from '@app/unraid-api/cli/admin-key.service.js';
|
||||
import { CliServicesModule } from '@app/unraid-api/cli/cli-services.module.js';
|
||||
import { CliInternalClientService } from '@app/unraid-api/cli/internal-client.service.js';
|
||||
import { InternalGraphQLClientFactory } from '@app/unraid-api/shared/internal-graphql-client.factory.js';
|
||||
|
||||
describe('CliServicesModule', () => {
|
||||
@@ -26,29 +24,23 @@ describe('CliServicesModule', () => {
|
||||
expect(module).toBeDefined();
|
||||
});
|
||||
|
||||
it('should provide CliInternalClientService', () => {
|
||||
const service = module.get(CliInternalClientService);
|
||||
it('should provide CanonicalInternalClient', () => {
|
||||
const service = module.get(CANONICAL_INTERNAL_CLIENT_TOKEN);
|
||||
expect(service).toBeDefined();
|
||||
expect(service).toBeInstanceOf(CliInternalClientService);
|
||||
});
|
||||
|
||||
it('should provide AdminKeyService', () => {
|
||||
const service = module.get(AdminKeyService);
|
||||
expect(service).toBeDefined();
|
||||
expect(service).toBeInstanceOf(AdminKeyService);
|
||||
expect(service.getClient).toBeInstanceOf(Function);
|
||||
});
|
||||
|
||||
it('should provide InternalGraphQLClientFactory via token', () => {
|
||||
const factory = module.get(INTERNAL_CLIENT_SERVICE_TOKEN);
|
||||
const factory = module.get(INTERNAL_CLIENT_FACTORY_TOKEN);
|
||||
expect(factory).toBeDefined();
|
||||
expect(factory).toBeInstanceOf(InternalGraphQLClientFactory);
|
||||
});
|
||||
|
||||
describe('CliInternalClientService dependencies', () => {
|
||||
describe('CanonicalInternalClient dependencies', () => {
|
||||
it('should have all required dependencies available', () => {
|
||||
// This test ensures that CliInternalClientService can be instantiated
|
||||
// This test ensures that CanonicalInternalClient can be instantiated
|
||||
// with all its dependencies properly resolved
|
||||
const service = module.get(CliInternalClientService);
|
||||
const service = module.get(CANONICAL_INTERNAL_CLIENT_TOKEN);
|
||||
expect(service).toBeDefined();
|
||||
|
||||
// Verify the service has its dependencies injected
|
||||
@@ -59,16 +51,9 @@ describe('CliServicesModule', () => {
|
||||
|
||||
it('should resolve InternalGraphQLClientFactory dependency via token', () => {
|
||||
// Explicitly test that the factory is available in the module context via token
|
||||
const factory = module.get(INTERNAL_CLIENT_SERVICE_TOKEN);
|
||||
const factory = module.get(INTERNAL_CLIENT_FACTORY_TOKEN);
|
||||
expect(factory).toBeDefined();
|
||||
expect(factory.createClient).toBeDefined();
|
||||
});
|
||||
|
||||
it('should resolve AdminKeyService dependency', () => {
|
||||
// Explicitly test that AdminKeyService is available in the module context
|
||||
const adminKeyService = module.get(AdminKeyService);
|
||||
expect(adminKeyService).toBeDefined();
|
||||
expect(adminKeyService.getOrCreateLocalAdminKey).toBeDefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -3,7 +3,6 @@ import { ConfigModule } from '@nestjs/config';
|
||||
|
||||
import { DependencyService } from '@app/unraid-api/app/dependency.service.js';
|
||||
import { ApiKeyService } from '@app/unraid-api/auth/api-key.service.js';
|
||||
import { AdminKeyService } from '@app/unraid-api/cli/admin-key.service.js';
|
||||
import { ApiReportService } from '@app/unraid-api/cli/api-report.service.js';
|
||||
import { AddApiKeyQuestionSet } from '@app/unraid-api/cli/apikey/add-api-key.questions.js';
|
||||
import { ApiKeyCommand } from '@app/unraid-api/cli/apikey/api-key.command.js';
|
||||
@@ -12,7 +11,6 @@ import { ConfigCommand } from '@app/unraid-api/cli/config.command.js';
|
||||
import { DeveloperToolsService } from '@app/unraid-api/cli/developer/developer-tools.service.js';
|
||||
import { DeveloperCommand } from '@app/unraid-api/cli/developer/developer.command.js';
|
||||
import { DeveloperQuestions } from '@app/unraid-api/cli/developer/developer.questions.js';
|
||||
import { CliInternalClientService } from '@app/unraid-api/cli/internal-client.service.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
import { LogsCommand } from '@app/unraid-api/cli/logs.command.js';
|
||||
import {
|
||||
@@ -69,9 +67,7 @@ const DEFAULT_PROVIDERS = [
|
||||
PM2Service,
|
||||
ApiKeyService,
|
||||
DependencyService,
|
||||
AdminKeyService,
|
||||
ApiReportService,
|
||||
CliInternalClientService,
|
||||
] as const;
|
||||
|
||||
@Module({
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { Inject, Injectable, Logger } from '@nestjs/common';
|
||||
import { access, readFile, unlink, writeFile } from 'fs/promises';
|
||||
import * as path from 'path';
|
||||
|
||||
import { CliInternalClientService } from '@app/unraid-api/cli/internal-client.service.js';
|
||||
import type { CanonicalInternalClientService } from '@unraid/shared';
|
||||
import { CANONICAL_INTERNAL_CLIENT_TOKEN } from '@unraid/shared';
|
||||
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
import { UPDATE_SANDBOX_MUTATION } from '@app/unraid-api/cli/queries/developer.mutation.js';
|
||||
import { RestartCommand } from '@app/unraid-api/cli/restart.command.js';
|
||||
@@ -52,12 +54,13 @@ unraid-dev-modal-test {
|
||||
constructor(
|
||||
private readonly logger: LogService,
|
||||
private readonly restartCommand: RestartCommand,
|
||||
private readonly internalClient: CliInternalClientService
|
||||
@Inject(CANONICAL_INTERNAL_CLIENT_TOKEN)
|
||||
private readonly internalClient: CanonicalInternalClientService
|
||||
) {}
|
||||
|
||||
async setSandboxMode(enable: boolean): Promise<void> {
|
||||
try {
|
||||
const client = await this.internalClient.getClient();
|
||||
const client = await this.internalClient.getClient({ enableSubscriptions: false });
|
||||
|
||||
const result = await client.mutate({
|
||||
mutation: UPDATE_SANDBOX_MUTATION,
|
||||
|
||||
@@ -20,7 +20,7 @@ type Documents = {
|
||||
"\n mutation UpdateSandboxSettings($input: JSON!) {\n updateSettings(input: $input) {\n restartRequired\n values\n }\n }\n": typeof types.UpdateSandboxSettingsDocument,
|
||||
"\n query GetPlugins {\n plugins {\n name\n version\n hasApiModule\n hasCliModule\n }\n }\n": typeof types.GetPluginsDocument,
|
||||
"\n query GetSSOUsers {\n settings {\n api {\n ssoSubIds\n }\n }\n }\n": typeof types.GetSsoUsersDocument,
|
||||
"\n query SystemReport {\n info {\n id\n machineId\n system {\n manufacturer\n model\n version\n sku\n serial\n uuid\n }\n versions {\n unraid\n kernel\n openssl\n }\n }\n config {\n id\n valid\n error\n }\n server {\n id\n name\n }\n }\n": typeof types.SystemReportDocument,
|
||||
"\n query SystemReport {\n info {\n id\n machineId\n system {\n manufacturer\n model\n version\n sku\n serial\n uuid\n }\n versions {\n core {\n unraid\n kernel\n }\n packages {\n openssl\n }\n }\n }\n config {\n id\n valid\n error\n }\n server {\n id\n name\n }\n }\n": typeof types.SystemReportDocument,
|
||||
"\n query ConnectStatus {\n connect {\n id\n dynamicRemoteAccess {\n enabledType\n runningType\n error\n }\n }\n }\n": typeof types.ConnectStatusDocument,
|
||||
"\n query Services {\n services {\n id\n name\n online\n uptime {\n timestamp\n }\n version\n }\n }\n": typeof types.ServicesDocument,
|
||||
"\n query ValidateOidcSession($token: String!) {\n validateOidcSession(token: $token) {\n valid\n username\n }\n }\n": typeof types.ValidateOidcSessionDocument,
|
||||
@@ -32,7 +32,7 @@ const documents: Documents = {
|
||||
"\n mutation UpdateSandboxSettings($input: JSON!) {\n updateSettings(input: $input) {\n restartRequired\n values\n }\n }\n": types.UpdateSandboxSettingsDocument,
|
||||
"\n query GetPlugins {\n plugins {\n name\n version\n hasApiModule\n hasCliModule\n }\n }\n": types.GetPluginsDocument,
|
||||
"\n query GetSSOUsers {\n settings {\n api {\n ssoSubIds\n }\n }\n }\n": types.GetSsoUsersDocument,
|
||||
"\n query SystemReport {\n info {\n id\n machineId\n system {\n manufacturer\n model\n version\n sku\n serial\n uuid\n }\n versions {\n unraid\n kernel\n openssl\n }\n }\n config {\n id\n valid\n error\n }\n server {\n id\n name\n }\n }\n": types.SystemReportDocument,
|
||||
"\n query SystemReport {\n info {\n id\n machineId\n system {\n manufacturer\n model\n version\n sku\n serial\n uuid\n }\n versions {\n core {\n unraid\n kernel\n }\n packages {\n openssl\n }\n }\n }\n config {\n id\n valid\n error\n }\n server {\n id\n name\n }\n }\n": types.SystemReportDocument,
|
||||
"\n query ConnectStatus {\n connect {\n id\n dynamicRemoteAccess {\n enabledType\n runningType\n error\n }\n }\n }\n": types.ConnectStatusDocument,
|
||||
"\n query Services {\n services {\n id\n name\n online\n uptime {\n timestamp\n }\n version\n }\n }\n": types.ServicesDocument,
|
||||
"\n query ValidateOidcSession($token: String!) {\n validateOidcSession(token: $token) {\n valid\n username\n }\n }\n": types.ValidateOidcSessionDocument,
|
||||
@@ -79,7 +79,7 @@ export function gql(source: "\n query GetSSOUsers {\n settings {\n
|
||||
/**
|
||||
* The gql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
|
||||
*/
|
||||
export function gql(source: "\n query SystemReport {\n info {\n id\n machineId\n system {\n manufacturer\n model\n version\n sku\n serial\n uuid\n }\n versions {\n unraid\n kernel\n openssl\n }\n }\n config {\n id\n valid\n error\n }\n server {\n id\n name\n }\n }\n"): (typeof documents)["\n query SystemReport {\n info {\n id\n machineId\n system {\n manufacturer\n model\n version\n sku\n serial\n uuid\n }\n versions {\n unraid\n kernel\n openssl\n }\n }\n config {\n id\n valid\n error\n }\n server {\n id\n name\n }\n }\n"];
|
||||
export function gql(source: "\n query SystemReport {\n info {\n id\n machineId\n system {\n manufacturer\n model\n version\n sku\n serial\n uuid\n }\n versions {\n core {\n unraid\n kernel\n }\n packages {\n openssl\n }\n }\n }\n config {\n id\n valid\n error\n }\n server {\n id\n name\n }\n }\n"): (typeof documents)["\n query SystemReport {\n info {\n id\n machineId\n system {\n manufacturer\n model\n version\n sku\n serial\n uuid\n }\n versions {\n core {\n unraid\n kernel\n }\n packages {\n openssl\n }\n }\n }\n config {\n id\n valid\n error\n }\n server {\n id\n name\n }\n }\n"];
|
||||
/**
|
||||
* The gql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
|
||||
*/
|
||||
|
||||
@@ -15,7 +15,7 @@ export type Scalars = {
|
||||
Int: { input: number; output: number; }
|
||||
Float: { input: number; output: number; }
|
||||
/** The `BigInt` scalar type represents non-fractional signed whole numeric values. */
|
||||
BigInt: { input: any; output: any; }
|
||||
BigInt: { input: number; output: number; }
|
||||
/** A date-time string at UTC, such as 2019-12-03T09:54:33Z, compliant with the date-time format. */
|
||||
DateTime: { input: string; output: string; }
|
||||
/** The `JSON` scalar type represents JSON values as specified by [ECMA-404](http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf). */
|
||||
@@ -120,7 +120,7 @@ export type ActivationCode = {
|
||||
};
|
||||
|
||||
export type AddPermissionInput = {
|
||||
actions: Array<Scalars['String']['input']>;
|
||||
actions: Array<AuthAction>;
|
||||
resource: Resource;
|
||||
};
|
||||
|
||||
@@ -143,24 +143,36 @@ export type ApiKey = Node & {
|
||||
createdAt: Scalars['String']['output'];
|
||||
description?: Maybe<Scalars['String']['output']>;
|
||||
id: Scalars['PrefixedID']['output'];
|
||||
key: Scalars['String']['output'];
|
||||
name: Scalars['String']['output'];
|
||||
permissions: Array<Permission>;
|
||||
roles: Array<Role>;
|
||||
};
|
||||
|
||||
export type ApiKeyFormSettings = FormSchema & Node & {
|
||||
__typename?: 'ApiKeyFormSettings';
|
||||
/** The data schema for the API key form */
|
||||
dataSchema: Scalars['JSON']['output'];
|
||||
id: Scalars['PrefixedID']['output'];
|
||||
/** The UI schema for the API key form */
|
||||
uiSchema: Scalars['JSON']['output'];
|
||||
/** The current values of the API key form */
|
||||
values: Scalars['JSON']['output'];
|
||||
};
|
||||
|
||||
/** API Key related mutations */
|
||||
export type ApiKeyMutations = {
|
||||
__typename?: 'ApiKeyMutations';
|
||||
/** Add a role to an API key */
|
||||
addRole: Scalars['Boolean']['output'];
|
||||
/** Create an API key */
|
||||
create: ApiKeyWithSecret;
|
||||
create: ApiKey;
|
||||
/** Delete one or more API keys */
|
||||
delete: Scalars['Boolean']['output'];
|
||||
/** Remove a role from an API key */
|
||||
removeRole: Scalars['Boolean']['output'];
|
||||
/** Update an API key */
|
||||
update: ApiKeyWithSecret;
|
||||
update: ApiKey;
|
||||
};
|
||||
|
||||
|
||||
@@ -199,17 +211,6 @@ export type ApiKeyResponse = {
|
||||
valid: Scalars['Boolean']['output'];
|
||||
};
|
||||
|
||||
export type ApiKeyWithSecret = Node & {
|
||||
__typename?: 'ApiKeyWithSecret';
|
||||
createdAt: Scalars['String']['output'];
|
||||
description?: Maybe<Scalars['String']['output']>;
|
||||
id: Scalars['PrefixedID']['output'];
|
||||
key: Scalars['String']['output'];
|
||||
name: Scalars['String']['output'];
|
||||
permissions: Array<Permission>;
|
||||
roles: Array<Role>;
|
||||
};
|
||||
|
||||
export type ArrayCapacity = {
|
||||
__typename?: 'ArrayCapacity';
|
||||
/** Capacity in number of disks */
|
||||
@@ -240,6 +241,8 @@ export type ArrayDisk = Node & {
|
||||
id: Scalars['PrefixedID']['output'];
|
||||
/** Array slot number. Parity1 is always 0 and Parity2 is always 29. Array slots will be 1 - 28. Cache slots are 30 - 53. Flash is 54. */
|
||||
idx: Scalars['Int']['output'];
|
||||
/** Whether the disk is currently spinning */
|
||||
isSpinning?: Maybe<Scalars['Boolean']['output']>;
|
||||
name?: Maybe<Scalars['String']['output']>;
|
||||
/** Number of unrecoverable errors reported by the device I/O drivers. Missing data due to unrecoverable array read errors is filled in on-the-fly using parity reconstruct (and we attempt to write this data back to the sector(s) which failed). Any unrecoverable write error results in disabling the disk. */
|
||||
numErrors?: Maybe<Scalars['BigInt']['output']>;
|
||||
@@ -370,19 +373,24 @@ export enum ArrayStateInputState {
|
||||
STOP = 'STOP'
|
||||
}
|
||||
|
||||
/** Available authentication action verbs */
|
||||
export enum AuthActionVerb {
|
||||
CREATE = 'CREATE',
|
||||
DELETE = 'DELETE',
|
||||
READ = 'READ',
|
||||
UPDATE = 'UPDATE'
|
||||
}
|
||||
|
||||
/** Available authentication possession types */
|
||||
export enum AuthPossession {
|
||||
ANY = 'ANY',
|
||||
OWN = 'OWN',
|
||||
OWN_ANY = 'OWN_ANY'
|
||||
/** Authentication actions with possession (e.g., create:any, read:own) */
|
||||
export enum AuthAction {
|
||||
/** Create any resource */
|
||||
CREATE_ANY = 'CREATE_ANY',
|
||||
/** Create own resource */
|
||||
CREATE_OWN = 'CREATE_OWN',
|
||||
/** Delete any resource */
|
||||
DELETE_ANY = 'DELETE_ANY',
|
||||
/** Delete own resource */
|
||||
DELETE_OWN = 'DELETE_OWN',
|
||||
/** Read any resource */
|
||||
READ_ANY = 'READ_ANY',
|
||||
/** Read own resource */
|
||||
READ_OWN = 'READ_OWN',
|
||||
/** Update any resource */
|
||||
UPDATE_ANY = 'UPDATE_ANY',
|
||||
/** Update own resource */
|
||||
UPDATE_OWN = 'UPDATE_OWN'
|
||||
}
|
||||
|
||||
/** Operators for authorization rule matching */
|
||||
@@ -520,15 +528,29 @@ export enum ContainerState {
|
||||
RUNNING = 'RUNNING'
|
||||
}
|
||||
|
||||
export type CoreVersions = {
|
||||
__typename?: 'CoreVersions';
|
||||
/** Unraid API version */
|
||||
api?: Maybe<Scalars['String']['output']>;
|
||||
/** Kernel version */
|
||||
kernel?: Maybe<Scalars['String']['output']>;
|
||||
/** Unraid version */
|
||||
unraid?: Maybe<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
/** CPU load for a single core */
|
||||
export type CpuLoad = {
|
||||
__typename?: 'CpuLoad';
|
||||
/** The percentage of time the CPU spent running virtual machines (guest). */
|
||||
percentGuest: Scalars['Float']['output'];
|
||||
/** The percentage of time the CPU was idle. */
|
||||
percentIdle: Scalars['Float']['output'];
|
||||
/** The percentage of time the CPU spent servicing hardware interrupts. */
|
||||
percentIrq: Scalars['Float']['output'];
|
||||
/** The percentage of time the CPU spent on low-priority (niced) user space processes. */
|
||||
percentNice: Scalars['Float']['output'];
|
||||
/** The percentage of CPU time stolen by the hypervisor. */
|
||||
percentSteal: Scalars['Float']['output'];
|
||||
/** The percentage of time the CPU spent in kernel space. */
|
||||
percentSystem: Scalars['Float']['output'];
|
||||
/** The total CPU load on a single core, in percent. */
|
||||
@@ -587,6 +609,8 @@ export type Disk = Node & {
|
||||
id: Scalars['PrefixedID']['output'];
|
||||
/** The interface type of the disk */
|
||||
interfaceType: DiskInterfaceType;
|
||||
/** Whether the disk is spinning or not */
|
||||
isSpinning: Scalars['Boolean']['output'];
|
||||
/** The model name of the disk */
|
||||
name: Scalars['String']['output'];
|
||||
/** The partitions on the disk */
|
||||
@@ -654,6 +678,7 @@ export enum DiskSmartStatus {
|
||||
|
||||
export type Docker = Node & {
|
||||
__typename?: 'Docker';
|
||||
containerUpdateStatuses: Array<ExplicitStatusItem>;
|
||||
containers: Array<DockerContainer>;
|
||||
id: Scalars['PrefixedID']['output'];
|
||||
networks: Array<DockerNetwork>;
|
||||
@@ -679,13 +704,15 @@ export type DockerContainer = Node & {
|
||||
id: Scalars['PrefixedID']['output'];
|
||||
image: Scalars['String']['output'];
|
||||
imageId: Scalars['String']['output'];
|
||||
isRebuildReady?: Maybe<Scalars['Boolean']['output']>;
|
||||
isUpdateAvailable?: Maybe<Scalars['Boolean']['output']>;
|
||||
labels?: Maybe<Scalars['JSON']['output']>;
|
||||
mounts?: Maybe<Array<Scalars['JSON']['output']>>;
|
||||
names: Array<Scalars['String']['output']>;
|
||||
networkSettings?: Maybe<Scalars['JSON']['output']>;
|
||||
ports: Array<ContainerPort>;
|
||||
/** Total size of all the files in the container */
|
||||
sizeRootFs?: Maybe<Scalars['Int']['output']>;
|
||||
/** Total size of all files in the container (in bytes) */
|
||||
sizeRootFs?: Maybe<Scalars['BigInt']['output']>;
|
||||
state: ContainerState;
|
||||
status: Scalars['String']['output'];
|
||||
};
|
||||
@@ -750,6 +777,12 @@ export type EnableDynamicRemoteAccessInput = {
|
||||
url: AccessUrlInput;
|
||||
};
|
||||
|
||||
export type ExplicitStatusItem = {
|
||||
__typename?: 'ExplicitStatusItem';
|
||||
name: Scalars['String']['output'];
|
||||
updateStatus: UpdateStatus;
|
||||
};
|
||||
|
||||
export type Flash = Node & {
|
||||
__typename?: 'Flash';
|
||||
guid: Scalars['String']['output'];
|
||||
@@ -766,6 +799,15 @@ export type FlashBackupStatus = {
|
||||
status: Scalars['String']['output'];
|
||||
};
|
||||
|
||||
export type FormSchema = {
|
||||
/** The data schema for the form */
|
||||
dataSchema: Scalars['JSON']['output'];
|
||||
/** The UI schema for the form */
|
||||
uiSchema: Scalars['JSON']['output'];
|
||||
/** The current values of the form */
|
||||
values: Scalars['JSON']['output'];
|
||||
};
|
||||
|
||||
export type Info = Node & {
|
||||
__typename?: 'Info';
|
||||
/** Motherboard information */
|
||||
@@ -1039,67 +1081,11 @@ export type InfoUsb = Node & {
|
||||
|
||||
export type InfoVersions = Node & {
|
||||
__typename?: 'InfoVersions';
|
||||
/** Apache version */
|
||||
apache?: Maybe<Scalars['String']['output']>;
|
||||
/** Docker version */
|
||||
docker?: Maybe<Scalars['String']['output']>;
|
||||
/** gcc version */
|
||||
gcc?: Maybe<Scalars['String']['output']>;
|
||||
/** Git version */
|
||||
git?: Maybe<Scalars['String']['output']>;
|
||||
/** Grunt version */
|
||||
grunt?: Maybe<Scalars['String']['output']>;
|
||||
/** Gulp version */
|
||||
gulp?: Maybe<Scalars['String']['output']>;
|
||||
/** Core system versions */
|
||||
core: CoreVersions;
|
||||
id: Scalars['PrefixedID']['output'];
|
||||
/** Java version */
|
||||
java?: Maybe<Scalars['String']['output']>;
|
||||
/** Kernel version */
|
||||
kernel?: Maybe<Scalars['String']['output']>;
|
||||
/** MongoDB version */
|
||||
mongodb?: Maybe<Scalars['String']['output']>;
|
||||
/** MySQL version */
|
||||
mysql?: Maybe<Scalars['String']['output']>;
|
||||
/** nginx version */
|
||||
nginx?: Maybe<Scalars['String']['output']>;
|
||||
/** Node.js version */
|
||||
node?: Maybe<Scalars['String']['output']>;
|
||||
/** npm version */
|
||||
npm?: Maybe<Scalars['String']['output']>;
|
||||
/** OpenSSL version */
|
||||
openssl?: Maybe<Scalars['String']['output']>;
|
||||
/** Perl version */
|
||||
perl?: Maybe<Scalars['String']['output']>;
|
||||
/** PHP version */
|
||||
php?: Maybe<Scalars['String']['output']>;
|
||||
/** pip version */
|
||||
pip?: Maybe<Scalars['String']['output']>;
|
||||
/** pip3 version */
|
||||
pip3?: Maybe<Scalars['String']['output']>;
|
||||
/** pm2 version */
|
||||
pm2?: Maybe<Scalars['String']['output']>;
|
||||
/** Postfix version */
|
||||
postfix?: Maybe<Scalars['String']['output']>;
|
||||
/** PostgreSQL version */
|
||||
postgresql?: Maybe<Scalars['String']['output']>;
|
||||
/** Python version */
|
||||
python?: Maybe<Scalars['String']['output']>;
|
||||
/** Python3 version */
|
||||
python3?: Maybe<Scalars['String']['output']>;
|
||||
/** Redis version */
|
||||
redis?: Maybe<Scalars['String']['output']>;
|
||||
/** System OpenSSL version */
|
||||
systemOpenssl?: Maybe<Scalars['String']['output']>;
|
||||
/** tsc version */
|
||||
tsc?: Maybe<Scalars['String']['output']>;
|
||||
/** Unraid version */
|
||||
unraid?: Maybe<Scalars['String']['output']>;
|
||||
/** V8 engine version */
|
||||
v8?: Maybe<Scalars['String']['output']>;
|
||||
/** VirtualBox version */
|
||||
virtualbox?: Maybe<Scalars['String']['output']>;
|
||||
/** Yarn version */
|
||||
yarn?: Maybe<Scalars['String']['output']>;
|
||||
/** Software package versions */
|
||||
packages?: Maybe<PackageVersions>;
|
||||
};
|
||||
|
||||
export type InitiateFlashBackupInput = {
|
||||
@@ -1252,6 +1238,7 @@ export type Mutation = {
|
||||
rclone: RCloneMutations;
|
||||
/** Reads each notification to recompute & update the overview. */
|
||||
recalculateOverview: NotificationOverview;
|
||||
refreshDockerDigests: Scalars['Boolean']['output'];
|
||||
/** Remove one or more plugins from the API. Returns false if restart was triggered automatically, true if manual restart is required. */
|
||||
removePlugin: Scalars['Boolean']['output'];
|
||||
setDockerFolderChildren: ResolvedOrganizerV1;
|
||||
@@ -1463,6 +1450,14 @@ export type OidcAuthorizationRule = {
|
||||
value: Array<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
export type OidcConfiguration = {
|
||||
__typename?: 'OidcConfiguration';
|
||||
/** Default allowed redirect origins that apply to all OIDC providers (e.g., Tailscale domains) */
|
||||
defaultAllowedOrigins?: Maybe<Array<Scalars['String']['output']>>;
|
||||
/** List of configured OIDC providers */
|
||||
providers: Array<OidcProvider>;
|
||||
};
|
||||
|
||||
export type OidcProvider = {
|
||||
__typename?: 'OidcProvider';
|
||||
/** OAuth2 authorization endpoint URL. If omitted, will be auto-discovered from issuer/.well-known/openid-configuration */
|
||||
@@ -1486,7 +1481,7 @@ export type OidcProvider = {
|
||||
/** The unique identifier for the OIDC provider */
|
||||
id: Scalars['PrefixedID']['output'];
|
||||
/** OIDC issuer URL (e.g., https://accounts.google.com). Required for auto-discovery via /.well-known/openid-configuration */
|
||||
issuer: Scalars['String']['output'];
|
||||
issuer?: Maybe<Scalars['String']['output']>;
|
||||
/** JSON Web Key Set URI for token validation. If omitted, will be auto-discovered from issuer/.well-known/openid-configuration */
|
||||
jwksUri?: Maybe<Scalars['String']['output']>;
|
||||
/** Display name of the OIDC provider */
|
||||
@@ -1526,6 +1521,26 @@ export type Owner = {
|
||||
username: Scalars['String']['output'];
|
||||
};
|
||||
|
||||
export type PackageVersions = {
|
||||
__typename?: 'PackageVersions';
|
||||
/** Docker version */
|
||||
docker?: Maybe<Scalars['String']['output']>;
|
||||
/** Git version */
|
||||
git?: Maybe<Scalars['String']['output']>;
|
||||
/** nginx version */
|
||||
nginx?: Maybe<Scalars['String']['output']>;
|
||||
/** Node.js version */
|
||||
node?: Maybe<Scalars['String']['output']>;
|
||||
/** npm version */
|
||||
npm?: Maybe<Scalars['String']['output']>;
|
||||
/** OpenSSL version */
|
||||
openssl?: Maybe<Scalars['String']['output']>;
|
||||
/** PHP version */
|
||||
php?: Maybe<Scalars['String']['output']>;
|
||||
/** pm2 version */
|
||||
pm2?: Maybe<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
export type ParityCheck = {
|
||||
__typename?: 'ParityCheck';
|
||||
/** Whether corrections are being written to parity */
|
||||
@@ -1545,7 +1560,7 @@ export type ParityCheck = {
|
||||
/** Speed of the parity check, in MB/s */
|
||||
speed?: Maybe<Scalars['String']['output']>;
|
||||
/** Status of the parity check */
|
||||
status?: Maybe<Scalars['String']['output']>;
|
||||
status: ParityCheckStatus;
|
||||
};
|
||||
|
||||
/** Parity check related mutations, WIP, response types and functionaliy will change */
|
||||
@@ -1567,9 +1582,19 @@ export type ParityCheckMutationsStartArgs = {
|
||||
correct: Scalars['Boolean']['input'];
|
||||
};
|
||||
|
||||
export enum ParityCheckStatus {
|
||||
CANCELLED = 'CANCELLED',
|
||||
COMPLETED = 'COMPLETED',
|
||||
FAILED = 'FAILED',
|
||||
NEVER_RUN = 'NEVER_RUN',
|
||||
PAUSED = 'PAUSED',
|
||||
RUNNING = 'RUNNING'
|
||||
}
|
||||
|
||||
export type Permission = {
|
||||
__typename?: 'Permission';
|
||||
actions: Array<Scalars['String']['output']>;
|
||||
/** Actions allowed on this resource */
|
||||
actions: Array<AuthAction>;
|
||||
resource: Resource;
|
||||
};
|
||||
|
||||
@@ -1639,6 +1664,12 @@ export type Query = {
|
||||
disks: Array<Disk>;
|
||||
docker: Docker;
|
||||
flash: Flash;
|
||||
/** Get JSON Schema for API key creation form */
|
||||
getApiKeyCreationFormSchema: ApiKeyFormSettings;
|
||||
/** Get all available authentication actions with possession */
|
||||
getAvailableAuthActions: Array<AuthAction>;
|
||||
/** Get the actual permissions that would be granted by a set of roles */
|
||||
getPermissionsForRoles: Array<Permission>;
|
||||
info: Info;
|
||||
isInitialSetup: Scalars['Boolean']['output'];
|
||||
isSSOEnabled: Scalars['Boolean']['output'];
|
||||
@@ -1649,6 +1680,8 @@ export type Query = {
|
||||
network: Network;
|
||||
/** Get all notifications */
|
||||
notifications: Notifications;
|
||||
/** Get the full OIDC configuration (admin only) */
|
||||
oidcConfiguration: OidcConfiguration;
|
||||
/** Get a specific OIDC provider by ID */
|
||||
oidcProvider?: Maybe<OidcProvider>;
|
||||
/** Get all configured OIDC providers (admin only) */
|
||||
@@ -1658,6 +1691,8 @@ export type Query = {
|
||||
parityHistory: Array<ParityCheck>;
|
||||
/** List all installed plugins with their metadata */
|
||||
plugins: Array<Plugin>;
|
||||
/** Preview the effective permissions for a combination of roles and explicit permissions */
|
||||
previewEffectivePermissions: Array<Permission>;
|
||||
/** Get public OIDC provider information for login buttons */
|
||||
publicOidcProviders: Array<PublicOidcProvider>;
|
||||
publicPartnerInfo?: Maybe<PublicPartnerInfo>;
|
||||
@@ -1691,6 +1726,11 @@ export type QueryDiskArgs = {
|
||||
};
|
||||
|
||||
|
||||
export type QueryGetPermissionsForRolesArgs = {
|
||||
roles: Array<Role>;
|
||||
};
|
||||
|
||||
|
||||
export type QueryLogFileArgs = {
|
||||
lines?: InputMaybe<Scalars['Int']['input']>;
|
||||
path: Scalars['String']['input'];
|
||||
@@ -1703,6 +1743,12 @@ export type QueryOidcProviderArgs = {
|
||||
};
|
||||
|
||||
|
||||
export type QueryPreviewEffectivePermissionsArgs = {
|
||||
permissions?: InputMaybe<Array<AddPermissionInput>>;
|
||||
roles?: InputMaybe<Array<Role>>;
|
||||
};
|
||||
|
||||
|
||||
export type QueryUpsDeviceByIdArgs = {
|
||||
id: Scalars['String']['input'];
|
||||
};
|
||||
@@ -1895,10 +1941,14 @@ export enum Resource {
|
||||
|
||||
/** Available roles for API keys and users */
|
||||
export enum Role {
|
||||
/** Full administrative access to all resources */
|
||||
ADMIN = 'ADMIN',
|
||||
/** Internal Role for Unraid Connect */
|
||||
CONNECT = 'CONNECT',
|
||||
/** Basic read access to user profile only */
|
||||
GUEST = 'GUEST',
|
||||
USER = 'USER'
|
||||
/** Read-only access to all resources */
|
||||
VIEWER = 'VIEWER'
|
||||
}
|
||||
|
||||
export type Server = Node & {
|
||||
@@ -1911,6 +1961,7 @@ export type Server = Node & {
|
||||
name: Scalars['String']['output'];
|
||||
owner: ProfileModel;
|
||||
remoteurl: Scalars['String']['output'];
|
||||
/** Whether this server is online or offline */
|
||||
status: ServerStatus;
|
||||
wanip: Scalars['String']['output'];
|
||||
};
|
||||
@@ -2175,7 +2226,7 @@ export enum UrlType {
|
||||
WIREGUARD = 'WIREGUARD'
|
||||
}
|
||||
|
||||
export type UnifiedSettings = Node & {
|
||||
export type UnifiedSettings = FormSchema & Node & {
|
||||
__typename?: 'UnifiedSettings';
|
||||
/** The data schema for the settings */
|
||||
dataSchema: Scalars['JSON']['output'];
|
||||
@@ -2199,6 +2250,8 @@ export type UnraidArray = Node & {
|
||||
id: Scalars['PrefixedID']['output'];
|
||||
/** Parity disks in the current array */
|
||||
parities: Array<ArrayDisk>;
|
||||
/** Current parity check status */
|
||||
parityCheckStatus: ParityCheck;
|
||||
/** Current array state */
|
||||
state: ArrayState;
|
||||
};
|
||||
@@ -2221,6 +2274,14 @@ export type UpdateSettingsResponse = {
|
||||
warnings?: Maybe<Array<Scalars['String']['output']>>;
|
||||
};
|
||||
|
||||
/** Update status of a container. */
|
||||
export enum UpdateStatus {
|
||||
REBUILD_READY = 'REBUILD_READY',
|
||||
UNKNOWN = 'UNKNOWN',
|
||||
UPDATE_AVAILABLE = 'UPDATE_AVAILABLE',
|
||||
UP_TO_DATE = 'UP_TO_DATE'
|
||||
}
|
||||
|
||||
export type Uptime = {
|
||||
__typename?: 'Uptime';
|
||||
timestamp?: Maybe<Scalars['String']['output']>;
|
||||
@@ -2553,7 +2614,7 @@ export type GetSsoUsersQuery = { __typename?: 'Query', settings: { __typename?:
|
||||
export type SystemReportQueryVariables = Exact<{ [key: string]: never; }>;
|
||||
|
||||
|
||||
export type SystemReportQuery = { __typename?: 'Query', info: { __typename?: 'Info', id: any, machineId?: string | null, system: { __typename?: 'InfoSystem', manufacturer?: string | null, model?: string | null, version?: string | null, sku?: string | null, serial?: string | null, uuid?: string | null }, versions: { __typename?: 'InfoVersions', unraid?: string | null, kernel?: string | null, openssl?: string | null } }, config: { __typename?: 'Config', id: any, valid?: boolean | null, error?: string | null }, server?: { __typename?: 'Server', id: any, name: string } | null };
|
||||
export type SystemReportQuery = { __typename?: 'Query', info: { __typename?: 'Info', id: any, machineId?: string | null, system: { __typename?: 'InfoSystem', manufacturer?: string | null, model?: string | null, version?: string | null, sku?: string | null, serial?: string | null, uuid?: string | null }, versions: { __typename?: 'InfoVersions', core: { __typename?: 'CoreVersions', unraid?: string | null, kernel?: string | null }, packages?: { __typename?: 'PackageVersions', openssl?: string | null } | null } }, config: { __typename?: 'Config', id: any, valid?: boolean | null, error?: string | null }, server?: { __typename?: 'Server', id: any, name: string } | null };
|
||||
|
||||
export type ConnectStatusQueryVariables = Exact<{ [key: string]: never; }>;
|
||||
|
||||
@@ -2579,7 +2640,7 @@ export const UpdateSsoUsersDocument = {"kind":"Document","definitions":[{"kind":
|
||||
export const UpdateSandboxSettingsDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"mutation","name":{"kind":"Name","value":"UpdateSandboxSettings"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"input"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"JSON"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"updateSettings"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"input"},"value":{"kind":"Variable","name":{"kind":"Name","value":"input"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"restartRequired"}},{"kind":"Field","name":{"kind":"Name","value":"values"}}]}}]}}]} as unknown as DocumentNode<UpdateSandboxSettingsMutation, UpdateSandboxSettingsMutationVariables>;
|
||||
export const GetPluginsDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"query","name":{"kind":"Name","value":"GetPlugins"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"plugins"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"name"}},{"kind":"Field","name":{"kind":"Name","value":"version"}},{"kind":"Field","name":{"kind":"Name","value":"hasApiModule"}},{"kind":"Field","name":{"kind":"Name","value":"hasCliModule"}}]}}]}}]} as unknown as DocumentNode<GetPluginsQuery, GetPluginsQueryVariables>;
|
||||
export const GetSsoUsersDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"query","name":{"kind":"Name","value":"GetSSOUsers"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"settings"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"api"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"ssoSubIds"}}]}}]}}]}}]} as unknown as DocumentNode<GetSsoUsersQuery, GetSsoUsersQueryVariables>;
|
||||
export const SystemReportDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"query","name":{"kind":"Name","value":"SystemReport"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"info"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"machineId"}},{"kind":"Field","name":{"kind":"Name","value":"system"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"manufacturer"}},{"kind":"Field","name":{"kind":"Name","value":"model"}},{"kind":"Field","name":{"kind":"Name","value":"version"}},{"kind":"Field","name":{"kind":"Name","value":"sku"}},{"kind":"Field","name":{"kind":"Name","value":"serial"}},{"kind":"Field","name":{"kind":"Name","value":"uuid"}}]}},{"kind":"Field","name":{"kind":"Name","value":"versions"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"unraid"}},{"kind":"Field","name":{"kind":"Name","value":"kernel"}},{"kind":"Field","name":{"kind":"Name","value":"openssl"}}]}}]}},{"kind":"Field","name":{"kind":"Name","value":"config"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"valid"}},{"kind":"Field","name":{"kind":"Name","value":"error"}}]}},{"kind":"Field","name":{"kind":"Name","value":"server"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"name"}}]}}]}}]} as unknown as DocumentNode<SystemReportQuery, SystemReportQueryVariables>;
|
||||
export const SystemReportDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"query","name":{"kind":"Name","value":"SystemReport"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"info"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"machineId"}},{"kind":"Field","name":{"kind":"Name","value":"system"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"manufacturer"}},{"kind":"Field","name":{"kind":"Name","value":"model"}},{"kind":"Field","name":{"kind":"Name","value":"version"}},{"kind":"Field","name":{"kind":"Name","value":"sku"}},{"kind":"Field","name":{"kind":"Name","value":"serial"}},{"kind":"Field","name":{"kind":"Name","value":"uuid"}}]}},{"kind":"Field","name":{"kind":"Name","value":"versions"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"core"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"unraid"}},{"kind":"Field","name":{"kind":"Name","value":"kernel"}}]}},{"kind":"Field","name":{"kind":"Name","value":"packages"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"openssl"}}]}}]}}]}},{"kind":"Field","name":{"kind":"Name","value":"config"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"valid"}},{"kind":"Field","name":{"kind":"Name","value":"error"}}]}},{"kind":"Field","name":{"kind":"Name","value":"server"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"name"}}]}}]}}]} as unknown as DocumentNode<SystemReportQuery, SystemReportQueryVariables>;
|
||||
export const ConnectStatusDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"query","name":{"kind":"Name","value":"ConnectStatus"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"connect"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"dynamicRemoteAccess"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"enabledType"}},{"kind":"Field","name":{"kind":"Name","value":"runningType"}},{"kind":"Field","name":{"kind":"Name","value":"error"}}]}}]}}]}}]} as unknown as DocumentNode<ConnectStatusQuery, ConnectStatusQueryVariables>;
|
||||
export const ServicesDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"query","name":{"kind":"Name","value":"Services"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"services"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"name"}},{"kind":"Field","name":{"kind":"Name","value":"online"}},{"kind":"Field","name":{"kind":"Name","value":"uptime"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"timestamp"}}]}},{"kind":"Field","name":{"kind":"Name","value":"version"}}]}}]}}]} as unknown as DocumentNode<ServicesQuery, ServicesQueryVariables>;
|
||||
export const ValidateOidcSessionDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"query","name":{"kind":"Name","value":"ValidateOidcSession"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"token"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"String"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"validateOidcSession"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"token"},"value":{"kind":"Variable","name":{"kind":"Name","value":"token"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"valid"}},{"kind":"Field","name":{"kind":"Name","value":"username"}}]}}]}}]} as unknown as DocumentNode<ValidateOidcSessionQuery, ValidateOidcSessionQueryVariables>;
|
||||
@@ -1,203 +0,0 @@
|
||||
import { ConfigModule, ConfigService } from '@nestjs/config';
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
|
||||
import type { InternalGraphQLClientFactory } from '@unraid/shared';
|
||||
import { ApolloClient } from '@apollo/client/core/index.js';
|
||||
import { INTERNAL_CLIENT_SERVICE_TOKEN } from '@unraid/shared';
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { AdminKeyService } from '@app/unraid-api/cli/admin-key.service.js';
|
||||
import { CliInternalClientService } from '@app/unraid-api/cli/internal-client.service.js';
|
||||
|
||||
describe('CliInternalClientService', () => {
|
||||
let service: CliInternalClientService;
|
||||
let clientFactory: InternalGraphQLClientFactory;
|
||||
let adminKeyService: AdminKeyService;
|
||||
let module: TestingModule;
|
||||
|
||||
const mockApolloClient = {
|
||||
query: vi.fn(),
|
||||
mutate: vi.fn(),
|
||||
stop: vi.fn(),
|
||||
};
|
||||
|
||||
beforeEach(async () => {
|
||||
module = await Test.createTestingModule({
|
||||
imports: [ConfigModule.forRoot()],
|
||||
providers: [
|
||||
CliInternalClientService,
|
||||
{
|
||||
provide: INTERNAL_CLIENT_SERVICE_TOKEN,
|
||||
useValue: {
|
||||
createClient: vi.fn().mockResolvedValue(mockApolloClient),
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: AdminKeyService,
|
||||
useValue: {
|
||||
getOrCreateLocalAdminKey: vi.fn().mockResolvedValue('test-admin-key'),
|
||||
},
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
service = module.get<CliInternalClientService>(CliInternalClientService);
|
||||
clientFactory = module.get<InternalGraphQLClientFactory>(INTERNAL_CLIENT_SERVICE_TOKEN);
|
||||
adminKeyService = module.get<AdminKeyService>(AdminKeyService);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await module?.close();
|
||||
});
|
||||
|
||||
it('should be defined', () => {
|
||||
expect(service).toBeDefined();
|
||||
});
|
||||
|
||||
describe('dependency injection', () => {
|
||||
it('should have InternalGraphQLClientFactory injected', () => {
|
||||
expect(clientFactory).toBeDefined();
|
||||
expect(clientFactory.createClient).toBeDefined();
|
||||
});
|
||||
|
||||
it('should have AdminKeyService injected', () => {
|
||||
expect(adminKeyService).toBeDefined();
|
||||
expect(adminKeyService.getOrCreateLocalAdminKey).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getClient', () => {
|
||||
it('should create a client with getApiKey function', async () => {
|
||||
const client = await service.getClient();
|
||||
|
||||
// The API key is now fetched lazily, not immediately
|
||||
expect(clientFactory.createClient).toHaveBeenCalledWith({
|
||||
getApiKey: expect.any(Function),
|
||||
enableSubscriptions: false,
|
||||
});
|
||||
|
||||
// Verify the getApiKey function works correctly when called
|
||||
const callArgs = vi.mocked(clientFactory.createClient).mock.calls[0][0];
|
||||
const apiKey = await callArgs.getApiKey();
|
||||
expect(apiKey).toBe('test-admin-key');
|
||||
expect(adminKeyService.getOrCreateLocalAdminKey).toHaveBeenCalled();
|
||||
|
||||
expect(client).toBe(mockApolloClient);
|
||||
});
|
||||
|
||||
it('should return cached client on subsequent calls', async () => {
|
||||
const client1 = await service.getClient();
|
||||
const client2 = await service.getClient();
|
||||
|
||||
expect(client1).toBe(client2);
|
||||
expect(clientFactory.createClient).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should handle errors when getting admin key', async () => {
|
||||
const error = new Error('Failed to get admin key');
|
||||
vi.mocked(adminKeyService.getOrCreateLocalAdminKey).mockRejectedValueOnce(error);
|
||||
|
||||
// The client creation will succeed, but the API key error happens later
|
||||
const client = await service.getClient();
|
||||
expect(client).toBe(mockApolloClient);
|
||||
|
||||
// Now test that the getApiKey function throws the expected error
|
||||
const callArgs = vi.mocked(clientFactory.createClient).mock.calls[0][0];
|
||||
await expect(callArgs.getApiKey()).rejects.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('clearClient', () => {
|
||||
it('should stop and clear the client', async () => {
|
||||
// First create a client
|
||||
await service.getClient();
|
||||
|
||||
// Clear the client
|
||||
service.clearClient();
|
||||
|
||||
expect(mockApolloClient.stop).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle clearing when no client exists', () => {
|
||||
// Should not throw when clearing a non-existent client
|
||||
expect(() => service.clearClient()).not.toThrow();
|
||||
});
|
||||
|
||||
it('should create a new client after clearing', async () => {
|
||||
// Create initial client
|
||||
await service.getClient();
|
||||
|
||||
// Clear it
|
||||
service.clearClient();
|
||||
|
||||
// Create new client
|
||||
await service.getClient();
|
||||
|
||||
// Should have created client twice
|
||||
expect(clientFactory.createClient).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('race condition protection', () => {
|
||||
it('should prevent stale client resurrection when clearClient() is called during creation', async () => {
|
||||
let resolveClientCreation!: (client: any) => void;
|
||||
|
||||
// Mock createClient to return a controllable promise
|
||||
const clientCreationPromise = new Promise<any>((resolve) => {
|
||||
resolveClientCreation = resolve;
|
||||
});
|
||||
vi.mocked(clientFactory.createClient).mockReturnValueOnce(clientCreationPromise);
|
||||
|
||||
// Start client creation (but don't await yet)
|
||||
const getClientPromise = service.getClient();
|
||||
|
||||
// Clear the client while creation is in progress
|
||||
service.clearClient();
|
||||
|
||||
// Now complete the client creation
|
||||
resolveClientCreation(mockApolloClient);
|
||||
|
||||
// Wait for getClient to complete
|
||||
const client = await getClientPromise;
|
||||
|
||||
// The client should be returned from getClient
|
||||
expect(client).toBe(mockApolloClient);
|
||||
|
||||
// But subsequent getClient calls should create a new client
|
||||
// because the race condition protection prevented assignment
|
||||
await service.getClient();
|
||||
|
||||
// Should have created a second client, proving the first wasn't assigned
|
||||
expect(clientFactory.createClient).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
it('should handle concurrent getClient calls during race condition', async () => {
|
||||
let resolveClientCreation!: (client: any) => void;
|
||||
|
||||
// Mock createClient to return a controllable promise
|
||||
const clientCreationPromise = new Promise<any>((resolve) => {
|
||||
resolveClientCreation = resolve;
|
||||
});
|
||||
vi.mocked(clientFactory.createClient).mockReturnValueOnce(clientCreationPromise);
|
||||
|
||||
// Start multiple concurrent client creation calls
|
||||
const getClientPromise1 = service.getClient();
|
||||
const getClientPromise2 = service.getClient(); // Should wait for first one
|
||||
|
||||
// Clear the client while creation is in progress
|
||||
service.clearClient();
|
||||
|
||||
// Complete the client creation
|
||||
resolveClientCreation(mockApolloClient);
|
||||
|
||||
// Both calls should resolve with the same client
|
||||
const [client1, client2] = await Promise.all([getClientPromise1, getClientPromise2]);
|
||||
expect(client1).toBe(mockApolloClient);
|
||||
expect(client2).toBe(mockApolloClient);
|
||||
|
||||
// But the client should not be cached due to race condition protection
|
||||
await service.getClient();
|
||||
expect(clientFactory.createClient).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,97 +0,0 @@
|
||||
import { Inject, Injectable, Logger } from '@nestjs/common';
|
||||
|
||||
import type { InternalGraphQLClientFactory } from '@unraid/shared';
|
||||
import { ApolloClient, NormalizedCacheObject } from '@apollo/client/core/index.js';
|
||||
import { INTERNAL_CLIENT_SERVICE_TOKEN } from '@unraid/shared';
|
||||
|
||||
import { AdminKeyService } from '@app/unraid-api/cli/admin-key.service.js';
|
||||
|
||||
/**
|
||||
* Internal GraphQL client for CLI commands.
|
||||
*
|
||||
* This service creates an Apollo client that queries the local API server
|
||||
* with admin privileges for CLI operations.
|
||||
*/
|
||||
@Injectable()
|
||||
export class CliInternalClientService {
|
||||
private readonly logger = new Logger(CliInternalClientService.name);
|
||||
private client: ApolloClient<NormalizedCacheObject> | null = null;
|
||||
private creatingClient: Promise<ApolloClient<NormalizedCacheObject>> | null = null;
|
||||
|
||||
constructor(
|
||||
@Inject(INTERNAL_CLIENT_SERVICE_TOKEN)
|
||||
private readonly clientFactory: InternalGraphQLClientFactory,
|
||||
private readonly adminKeyService: AdminKeyService
|
||||
) {}
|
||||
|
||||
/**
|
||||
* Get the admin API key using the AdminKeyService.
|
||||
* This ensures the key exists and is available for CLI operations.
|
||||
*/
|
||||
private async getLocalApiKey(): Promise<string> {
|
||||
try {
|
||||
return await this.adminKeyService.getOrCreateLocalAdminKey();
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to get admin API key:', error);
|
||||
throw new Error(
|
||||
'Unable to get admin API key for internal client. Ensure the API server is running.'
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the default CLI client with admin API key.
|
||||
* This is for CLI commands that need admin access.
|
||||
*/
|
||||
public async getClient(): Promise<ApolloClient<NormalizedCacheObject>> {
|
||||
// If client already exists, return it
|
||||
if (this.client) {
|
||||
return this.client;
|
||||
}
|
||||
|
||||
// If another call is already creating the client, wait for it
|
||||
if (this.creatingClient) {
|
||||
return await this.creatingClient;
|
||||
}
|
||||
|
||||
// Start creating the client with race condition protection
|
||||
let creationPromise!: Promise<ApolloClient<NormalizedCacheObject>>;
|
||||
// eslint-disable-next-line prefer-const
|
||||
creationPromise = (async () => {
|
||||
try {
|
||||
const client = await this.clientFactory.createClient({
|
||||
getApiKey: () => this.getLocalApiKey(),
|
||||
enableSubscriptions: false, // CLI doesn't need subscriptions
|
||||
});
|
||||
|
||||
// awaiting *before* checking this.creatingClient is important!
|
||||
// by yielding to the event loop, it ensures
|
||||
// `this.creatingClient = creationPromise;` is executed before the next check.
|
||||
|
||||
// This prevents race conditions where the client is assigned to the wrong instance.
|
||||
// Only assign client if this creation is still current
|
||||
if (this.creatingClient === creationPromise) {
|
||||
this.client = client;
|
||||
this.logger.debug('Created CLI internal GraphQL client with admin privileges');
|
||||
}
|
||||
|
||||
return client;
|
||||
} finally {
|
||||
// Only clear if this creation is still current
|
||||
if (this.creatingClient === creationPromise) {
|
||||
this.creatingClient = null;
|
||||
}
|
||||
}
|
||||
})();
|
||||
|
||||
this.creatingClient = creationPromise;
|
||||
return await creationPromise;
|
||||
}
|
||||
|
||||
public clearClient() {
|
||||
// Stop the Apollo client to terminate any active processes
|
||||
this.client?.stop();
|
||||
this.client = null;
|
||||
this.creatingClient = null;
|
||||
}
|
||||
}
|
||||
76
api/src/unraid-api/cli/pm2.service.spec.ts
Normal file
76
api/src/unraid-api/cli/pm2.service.spec.ts
Normal file
@@ -0,0 +1,76 @@
|
||||
import * as fs from 'node:fs/promises';
|
||||
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
import { PM2Service } from '@app/unraid-api/cli/pm2.service.js';
|
||||
|
||||
vi.mock('node:fs/promises');
|
||||
vi.mock('execa');
|
||||
vi.mock('@app/core/utils/files/file-exists.js', () => ({
|
||||
fileExists: vi.fn().mockResolvedValue(false),
|
||||
}));
|
||||
vi.mock('@app/environment.js', () => ({
|
||||
PATHS_LOGS_DIR: '/var/log/unraid-api',
|
||||
PM2_HOME: '/var/log/.pm2',
|
||||
PM2_PATH: '/path/to/pm2',
|
||||
ECOSYSTEM_PATH: '/path/to/ecosystem.config.json',
|
||||
SUPPRESS_LOGS: false,
|
||||
LOG_LEVEL: 'info',
|
||||
}));
|
||||
|
||||
describe('PM2Service', () => {
|
||||
let pm2Service: PM2Service;
|
||||
let logService: LogService;
|
||||
const mockMkdir = vi.mocked(fs.mkdir);
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
logService = {
|
||||
trace: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
log: vi.fn(),
|
||||
info: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
} as unknown as LogService;
|
||||
pm2Service = new PM2Service(logService);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
describe('ensurePm2Dependencies', () => {
|
||||
it('should create logs directory and log that PM2 will handle its own directory', async () => {
|
||||
mockMkdir.mockResolvedValue(undefined);
|
||||
|
||||
await pm2Service.ensurePm2Dependencies();
|
||||
|
||||
expect(mockMkdir).toHaveBeenCalledWith('/var/log/unraid-api', { recursive: true });
|
||||
expect(mockMkdir).toHaveBeenCalledTimes(1); // Only logs directory, not PM2_HOME
|
||||
expect(logService.trace).toHaveBeenCalledWith(
|
||||
'PM2_HOME will be created at /var/log/.pm2 when PM2 daemon starts'
|
||||
);
|
||||
});
|
||||
|
||||
it('should log error but not throw when logs directory creation fails', async () => {
|
||||
mockMkdir.mockRejectedValue(new Error('Disk full'));
|
||||
|
||||
await expect(pm2Service.ensurePm2Dependencies()).resolves.not.toThrow();
|
||||
|
||||
expect(logService.error).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Failed to fully ensure PM2 dependencies: Disk full')
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle mkdir with recursive flag for nested logs path', async () => {
|
||||
mockMkdir.mockResolvedValue(undefined);
|
||||
|
||||
await pm2Service.ensurePm2Dependencies();
|
||||
|
||||
expect(mockMkdir).toHaveBeenCalledWith('/var/log/unraid-api', { recursive: true });
|
||||
expect(mockMkdir).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -42,8 +42,22 @@ export class PM2Service {
|
||||
|
||||
async run(context: CmdContext, ...args: string[]) {
|
||||
const { tag, raw, ...execOptions } = context;
|
||||
execOptions.extendEnv ??= false;
|
||||
// Default to true to match execa's default behavior
|
||||
execOptions.extendEnv ??= true;
|
||||
execOptions.shell ??= 'bash';
|
||||
|
||||
// Ensure /usr/local/bin is in PATH for Node.js
|
||||
const currentPath = execOptions.env?.PATH || process.env.PATH || '/usr/bin:/bin:/usr/sbin:/sbin';
|
||||
const needsPathUpdate = !currentPath.includes('/usr/local/bin');
|
||||
const finalPath = needsPathUpdate ? `/usr/local/bin:${currentPath}` : currentPath;
|
||||
|
||||
// Always ensure PM2_HOME is set in the environment for every PM2 command
|
||||
execOptions.env = {
|
||||
...execOptions.env,
|
||||
PM2_HOME,
|
||||
...(needsPathUpdate && { PATH: finalPath }),
|
||||
};
|
||||
|
||||
const runCommand = () => execa(PM2_PATH, [...args], execOptions satisfies Options);
|
||||
if (raw) {
|
||||
return runCommand();
|
||||
@@ -100,8 +114,20 @@ export class PM2Service {
|
||||
|
||||
/**
|
||||
* Ensures that the dependencies necessary for PM2 to start and operate are present.
|
||||
* Creates PM2_HOME directory with proper permissions if it doesn't exist.
|
||||
*/
|
||||
async ensurePm2Dependencies() {
|
||||
await mkdir(PATHS_LOGS_DIR, { recursive: true });
|
||||
try {
|
||||
// Create logs directory
|
||||
await mkdir(PATHS_LOGS_DIR, { recursive: true });
|
||||
|
||||
// PM2 automatically creates and manages its home directory when the daemon starts
|
||||
this.logger.trace(`PM2_HOME will be created at ${PM2_HOME} when PM2 daemon starts`);
|
||||
} catch (error) {
|
||||
// Log error but don't throw - let PM2 fail with its own error messages if the setup is incomplete
|
||||
this.logger.error(
|
||||
`Failed to fully ensure PM2 dependencies: ${error instanceof Error ? error.message : error}. PM2 may encounter issues during operation.`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,9 +14,13 @@ export const SYSTEM_REPORT_QUERY = gql(`
|
||||
uuid
|
||||
}
|
||||
versions {
|
||||
unraid
|
||||
kernel
|
||||
openssl
|
||||
core {
|
||||
unraid
|
||||
kernel
|
||||
}
|
||||
packages {
|
||||
openssl
|
||||
}
|
||||
}
|
||||
}
|
||||
config {
|
||||
|
||||
@@ -1,9 +1,23 @@
|
||||
import { Command, CommandRunner } from 'nest-commander';
|
||||
import { Command, CommandRunner, Option } from 'nest-commander';
|
||||
|
||||
import { ECOSYSTEM_PATH } from '@app/environment.js';
|
||||
import type { LogLevel } from '@app/core/log.js';
|
||||
import { levels } from '@app/core/log.js';
|
||||
import { ECOSYSTEM_PATH, LOG_LEVEL } from '@app/environment.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
import { PM2Service } from '@app/unraid-api/cli/pm2.service.js';
|
||||
|
||||
export interface LogLevelOptions {
|
||||
logLevel?: LogLevel;
|
||||
}
|
||||
|
||||
export function parseLogLevelOption(val: string, allowedLevels: string[] = [...levels]): LogLevel {
|
||||
const normalized = val.toLowerCase() as LogLevel;
|
||||
if (allowedLevels.includes(normalized)) {
|
||||
return normalized;
|
||||
}
|
||||
throw new Error(`Invalid --log-level "${val}". Allowed: ${allowedLevels.join(', ')}`);
|
||||
}
|
||||
|
||||
@Command({ name: 'restart', description: 'Restart the Unraid API' })
|
||||
export class RestartCommand extends CommandRunner {
|
||||
constructor(
|
||||
@@ -13,14 +27,16 @@ export class RestartCommand extends CommandRunner {
|
||||
super();
|
||||
}
|
||||
|
||||
async run(): Promise<void> {
|
||||
async run(_?: string[], options: LogLevelOptions = {}): Promise<void> {
|
||||
try {
|
||||
this.logger.info('Restarting the Unraid API...');
|
||||
const env = { LOG_LEVEL: options.logLevel };
|
||||
const { stderr, stdout } = await this.pm2.run(
|
||||
{ tag: 'PM2 Restart', raw: true },
|
||||
{ tag: 'PM2 Restart', raw: true, extendEnv: true, env },
|
||||
'restart',
|
||||
ECOSYSTEM_PATH,
|
||||
'--update-env'
|
||||
'--update-env',
|
||||
'--mini-list'
|
||||
);
|
||||
|
||||
if (stderr) {
|
||||
@@ -40,4 +56,13 @@ export class RestartCommand extends CommandRunner {
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
@Option({
|
||||
flags: `--log-level <${levels.join('|')}>`,
|
||||
description: 'log level to use',
|
||||
defaultValue: LOG_LEVEL.toLowerCase(),
|
||||
})
|
||||
parseLogLevel(val: string): LogLevel {
|
||||
return parseLogLevelOption(val);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
import { Inject } from '@nestjs/common';
|
||||
|
||||
import type { CanonicalInternalClientService } from '@unraid/shared';
|
||||
import { CANONICAL_INTERNAL_CLIENT_TOKEN } from '@unraid/shared';
|
||||
import { CommandRunner, SubCommand } from 'nest-commander';
|
||||
|
||||
import { CliInternalClientService } from '@app/unraid-api/cli/internal-client.service.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
import { VALIDATE_OIDC_SESSION_QUERY } from '@app/unraid-api/cli/queries/validate-oidc-session.query.js';
|
||||
|
||||
@@ -13,7 +16,8 @@ import { VALIDATE_OIDC_SESSION_QUERY } from '@app/unraid-api/cli/queries/validat
|
||||
export class ValidateTokenCommand extends CommandRunner {
|
||||
constructor(
|
||||
private readonly logger: LogService,
|
||||
private readonly internalClient: CliInternalClientService
|
||||
@Inject(CANONICAL_INTERNAL_CLIENT_TOKEN)
|
||||
private readonly internalClient: CanonicalInternalClientService
|
||||
) {
|
||||
super();
|
||||
}
|
||||
@@ -45,7 +49,7 @@ export class ValidateTokenCommand extends CommandRunner {
|
||||
|
||||
private async validateOidcToken(token: string): Promise<void> {
|
||||
try {
|
||||
const client = await this.internalClient.getClient();
|
||||
const client = await this.internalClient.getClient({ enableSubscriptions: false });
|
||||
const { data, errors } = await client.query({
|
||||
query: VALIDATE_OIDC_SESSION_QUERY,
|
||||
variables: { token },
|
||||
|
||||
@@ -1,14 +1,12 @@
|
||||
import { Command, CommandRunner, Option } from 'nest-commander';
|
||||
|
||||
import type { LogLevel } from '@app/core/log.js';
|
||||
import type { LogLevelOptions } from '@app/unraid-api/cli/restart.command.js';
|
||||
import { levels } from '@app/core/log.js';
|
||||
import { ECOSYSTEM_PATH } from '@app/environment.js';
|
||||
import { ECOSYSTEM_PATH, LOG_LEVEL } from '@app/environment.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
import { PM2Service } from '@app/unraid-api/cli/pm2.service.js';
|
||||
|
||||
interface StartCommandOptions {
|
||||
'log-level'?: string;
|
||||
}
|
||||
import { parseLogLevelOption } from '@app/unraid-api/cli/restart.command.js';
|
||||
|
||||
@Command({ name: 'start', description: 'Start the Unraid API' })
|
||||
export class StartCommand extends CommandRunner {
|
||||
@@ -27,20 +25,16 @@ export class StartCommand extends CommandRunner {
|
||||
await this.pm2.run({ tag: 'PM2 Delete' }, 'delete', ECOSYSTEM_PATH);
|
||||
}
|
||||
|
||||
async run(_: string[], options: StartCommandOptions): Promise<void> {
|
||||
async run(_: string[], options: LogLevelOptions): Promise<void> {
|
||||
this.logger.info('Starting the Unraid API');
|
||||
await this.cleanupPM2State();
|
||||
|
||||
const env: Record<string, string> = {};
|
||||
if (options['log-level']) {
|
||||
env.LOG_LEVEL = options['log-level'];
|
||||
}
|
||||
|
||||
const env = { LOG_LEVEL: options.logLevel };
|
||||
const { stderr, stdout } = await this.pm2.run(
|
||||
{ tag: 'PM2 Start', env, raw: true },
|
||||
{ tag: 'PM2 Start', raw: true, extendEnv: true, env },
|
||||
'start',
|
||||
ECOSYSTEM_PATH,
|
||||
'--update-env'
|
||||
'--update-env',
|
||||
'--mini-list'
|
||||
);
|
||||
if (stdout) {
|
||||
this.logger.log(stdout.toString());
|
||||
@@ -54,9 +48,9 @@ export class StartCommand extends CommandRunner {
|
||||
@Option({
|
||||
flags: `--log-level <${levels.join('|')}>`,
|
||||
description: 'log level to use',
|
||||
defaultValue: 'info',
|
||||
defaultValue: LOG_LEVEL.toLowerCase(),
|
||||
})
|
||||
parseLogLevel(val: string): LogLevel {
|
||||
return levels.includes(val as LogLevel) ? (val as LogLevel) : 'info';
|
||||
return parseLogLevelOption(val);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,6 +8,11 @@ export class StatusCommand extends CommandRunner {
|
||||
super();
|
||||
}
|
||||
async run(): Promise<void> {
|
||||
await this.pm2.run({ tag: 'PM2 Status', stdio: 'inherit', raw: true }, 'status', 'unraid-api');
|
||||
await this.pm2.run(
|
||||
{ tag: 'PM2 Status', stdio: 'inherit', raw: true },
|
||||
'status',
|
||||
'unraid-api',
|
||||
'--mini-list'
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -33,7 +33,8 @@ export class StopCommand extends CommandRunner {
|
||||
{ tag: 'PM2 Delete', stdio: 'inherit' },
|
||||
'delete',
|
||||
ECOSYSTEM_PATH,
|
||||
'--no-autorestart'
|
||||
'--no-autorestart',
|
||||
'--mini-list'
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +1,37 @@
|
||||
import { Command, CommandRunner } from 'nest-commander';
|
||||
import { Command, CommandRunner, Option } from 'nest-commander';
|
||||
|
||||
import { API_VERSION } from '@app/environment.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
|
||||
@Command({ name: 'version' })
|
||||
interface VersionOptions {
|
||||
json?: boolean;
|
||||
}
|
||||
|
||||
@Command({ name: 'version', description: 'Display API version information' })
|
||||
export class VersionCommand extends CommandRunner {
|
||||
constructor(private readonly logger: LogService) {
|
||||
super();
|
||||
}
|
||||
async run(): Promise<void> {
|
||||
this.logger.info(`Unraid API v${API_VERSION}`);
|
||||
|
||||
@Option({
|
||||
flags: '-j, --json',
|
||||
description: 'Output version information as JSON',
|
||||
})
|
||||
parseJson(): boolean {
|
||||
return true;
|
||||
}
|
||||
|
||||
async run(passedParam: string[], options?: VersionOptions): Promise<void> {
|
||||
if (options?.json) {
|
||||
const [baseVersion, buildInfo] = API_VERSION.split('+');
|
||||
const versionInfo = {
|
||||
version: baseVersion || API_VERSION,
|
||||
build: buildInfo || undefined,
|
||||
combined: API_VERSION,
|
||||
};
|
||||
console.log(JSON.stringify(versionInfo));
|
||||
} else {
|
||||
this.logger.info(`Unraid API v${API_VERSION}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
import { ScheduleModule } from '@nestjs/schedule';
|
||||
|
||||
import { JobModule } from '@app/unraid-api/cron/job.module.js';
|
||||
import { LogRotateService } from '@app/unraid-api/cron/log-rotate.service.js';
|
||||
import { WriteFlashFileService } from '@app/unraid-api/cron/write-flash-file.service.js';
|
||||
|
||||
@Module({
|
||||
imports: [],
|
||||
imports: [JobModule],
|
||||
providers: [WriteFlashFileService, LogRotateService],
|
||||
})
|
||||
export class CronModule {}
|
||||
|
||||
13
api/src/unraid-api/cron/job.module.ts
Normal file
13
api/src/unraid-api/cron/job.module.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
import { ScheduleModule } from '@nestjs/schedule';
|
||||
|
||||
/**
|
||||
* Sets up common dependencies for initializing jobs (e.g. scheduler registry, cron jobs).
|
||||
*
|
||||
* Simplifies testing setup & application dependency tree by ensuring `forRoot` is called only once.
|
||||
*/
|
||||
@Module({
|
||||
imports: [ScheduleModule.forRoot()],
|
||||
exports: [ScheduleModule],
|
||||
})
|
||||
export class JobModule {}
|
||||
172
api/src/unraid-api/decorators/omit-if.decorator.spec.ts
Normal file
172
api/src/unraid-api/decorators/omit-if.decorator.spec.ts
Normal file
@@ -0,0 +1,172 @@
|
||||
import { Reflector } from '@nestjs/core';
|
||||
import { Field, Mutation, ObjectType, Query, ResolveField, Resolver } from '@nestjs/graphql';
|
||||
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { OMIT_IF_METADATA_KEY, OmitIf } from '@app/unraid-api/decorators/omit-if.decorator.js';
|
||||
|
||||
describe('OmitIf Decorator', () => {
|
||||
let reflector: Reflector;
|
||||
|
||||
beforeEach(() => {
|
||||
reflector = new Reflector();
|
||||
});
|
||||
|
||||
describe('OmitIf', () => {
|
||||
it('should set metadata when condition is true', () => {
|
||||
class TestResolver {
|
||||
@OmitIf(true)
|
||||
testMethod() {
|
||||
return 'test';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
const metadata = reflector.get(OMIT_IF_METADATA_KEY, instance.testMethod);
|
||||
expect(metadata).toBe(true);
|
||||
});
|
||||
|
||||
it('should not set metadata when condition is false', () => {
|
||||
class TestResolver {
|
||||
@OmitIf(false)
|
||||
testMethod() {
|
||||
return 'test';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
const metadata = reflector.get(OMIT_IF_METADATA_KEY, instance.testMethod);
|
||||
expect(metadata).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should evaluate function conditions', () => {
|
||||
const mockCondition = vi.fn(() => true);
|
||||
|
||||
class TestResolver {
|
||||
@OmitIf(mockCondition)
|
||||
testMethod() {
|
||||
return 'test';
|
||||
}
|
||||
}
|
||||
|
||||
expect(mockCondition).toHaveBeenCalledOnce();
|
||||
const instance = new TestResolver();
|
||||
const metadata = reflector.get(OMIT_IF_METADATA_KEY, instance.testMethod);
|
||||
expect(metadata).toBe(true);
|
||||
});
|
||||
|
||||
it('should evaluate function conditions that return false', () => {
|
||||
const mockCondition = vi.fn(() => false);
|
||||
|
||||
class TestResolver {
|
||||
@OmitIf(mockCondition)
|
||||
testMethod() {
|
||||
return 'test';
|
||||
}
|
||||
}
|
||||
|
||||
expect(mockCondition).toHaveBeenCalledOnce();
|
||||
const instance = new TestResolver();
|
||||
const metadata = reflector.get(OMIT_IF_METADATA_KEY, instance.testMethod);
|
||||
expect(metadata).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should work with environment variables', () => {
|
||||
const originalEnv = process.env.NODE_ENV;
|
||||
process.env.NODE_ENV = 'production';
|
||||
|
||||
class TestResolver {
|
||||
@OmitIf(process.env.NODE_ENV === 'production')
|
||||
testMethod() {
|
||||
return 'test';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
const metadata = reflector.get(OMIT_IF_METADATA_KEY, instance.testMethod);
|
||||
expect(metadata).toBe(true);
|
||||
|
||||
process.env.NODE_ENV = originalEnv;
|
||||
});
|
||||
});
|
||||
|
||||
describe('Integration with NestJS GraphQL decorators', () => {
|
||||
it('should work with @Query decorator', () => {
|
||||
@Resolver()
|
||||
class TestResolver {
|
||||
@OmitIf(true)
|
||||
@Query(() => String)
|
||||
omittedQuery() {
|
||||
return 'test';
|
||||
}
|
||||
|
||||
@OmitIf(false)
|
||||
@Query(() => String)
|
||||
includedQuery() {
|
||||
return 'test';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
const omittedMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.omittedQuery);
|
||||
const includedMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.includedQuery);
|
||||
|
||||
expect(omittedMetadata).toBe(true);
|
||||
expect(includedMetadata).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should work with @Mutation decorator', () => {
|
||||
@Resolver()
|
||||
class TestResolver {
|
||||
@OmitIf(true)
|
||||
@Mutation(() => String)
|
||||
omittedMutation() {
|
||||
return 'test';
|
||||
}
|
||||
|
||||
@OmitIf(false)
|
||||
@Mutation(() => String)
|
||||
includedMutation() {
|
||||
return 'test';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
const omittedMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.omittedMutation);
|
||||
const includedMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.includedMutation);
|
||||
|
||||
expect(omittedMetadata).toBe(true);
|
||||
expect(includedMetadata).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should work with @ResolveField decorator', () => {
|
||||
@ObjectType()
|
||||
class TestType {
|
||||
@Field()
|
||||
id: string = '';
|
||||
}
|
||||
|
||||
@Resolver(() => TestType)
|
||||
class TestResolver {
|
||||
@OmitIf(true)
|
||||
@ResolveField(() => String)
|
||||
omittedField() {
|
||||
return 'test';
|
||||
}
|
||||
|
||||
@OmitIf(false)
|
||||
@ResolveField(() => String)
|
||||
includedField() {
|
||||
return 'test';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
const omittedMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.omittedField);
|
||||
const includedMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.includedField);
|
||||
|
||||
expect(omittedMetadata).toBe(true);
|
||||
expect(includedMetadata).toBeUndefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
80
api/src/unraid-api/decorators/omit-if.decorator.ts
Normal file
80
api/src/unraid-api/decorators/omit-if.decorator.ts
Normal file
@@ -0,0 +1,80 @@
|
||||
import { SetMetadata } from '@nestjs/common';
|
||||
import { Extensions } from '@nestjs/graphql';
|
||||
|
||||
import { MapperKind, mapSchema } from '@graphql-tools/utils';
|
||||
import { GraphQLFieldConfig, GraphQLSchema } from 'graphql';
|
||||
|
||||
export const OMIT_IF_METADATA_KEY = 'omitIf';
|
||||
|
||||
/**
|
||||
* Decorator that conditionally omits a GraphQL field/query/mutation based on a condition.
|
||||
* The field will only be omitted from the schema when the condition evaluates to true.
|
||||
*
|
||||
* @param condition - If the condition evaluates to true, the field will be omitted from the schema
|
||||
* @returns A decorator that wraps the target field/query/mutation
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* @OmitIf(process.env.NODE_ENV === 'production')
|
||||
* @Query(() => String)
|
||||
* async debugQuery() {
|
||||
* return 'This query is omitted in production';
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
export function OmitIf(condition: boolean | (() => boolean)): MethodDecorator & PropertyDecorator {
|
||||
const shouldOmit = typeof condition === 'function' ? condition() : condition;
|
||||
|
||||
return (target: object, propertyKey?: string | symbol, descriptor?: PropertyDescriptor) => {
|
||||
if (shouldOmit) {
|
||||
SetMetadata(OMIT_IF_METADATA_KEY, true)(
|
||||
target,
|
||||
propertyKey as string,
|
||||
descriptor as PropertyDescriptor
|
||||
);
|
||||
Extensions({ omitIf: true })(
|
||||
target,
|
||||
propertyKey as string,
|
||||
descriptor as PropertyDescriptor
|
||||
);
|
||||
}
|
||||
|
||||
return descriptor;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Schema transformer that omits fields/queries/mutations based on the OmitIf decorator.
|
||||
* @param schema - The GraphQL schema to transform
|
||||
* @returns The transformed GraphQL schema
|
||||
*/
|
||||
export function omitIfSchemaTransformer(schema: GraphQLSchema): GraphQLSchema {
|
||||
return mapSchema(schema, {
|
||||
[MapperKind.OBJECT_FIELD]: (
|
||||
fieldConfig: GraphQLFieldConfig<any, any>,
|
||||
fieldName: string,
|
||||
typeName: string
|
||||
) => {
|
||||
const extensions = fieldConfig.extensions || {};
|
||||
|
||||
if (extensions.omitIf === true) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return fieldConfig;
|
||||
},
|
||||
[MapperKind.ROOT_FIELD]: (
|
||||
fieldConfig: GraphQLFieldConfig<any, any>,
|
||||
fieldName: string,
|
||||
typeName: string
|
||||
) => {
|
||||
const extensions = fieldConfig.extensions || {};
|
||||
|
||||
if (extensions.omitIf === true) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return fieldConfig;
|
||||
},
|
||||
});
|
||||
}
|
||||
317
api/src/unraid-api/decorators/use-feature-flag.decorator.spec.ts
Normal file
317
api/src/unraid-api/decorators/use-feature-flag.decorator.spec.ts
Normal file
@@ -0,0 +1,317 @@
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-nocheck
|
||||
// fixme: types don't sync with mocks, and there's no override to simplify testing.
|
||||
|
||||
import { Reflector } from '@nestjs/core';
|
||||
import { Mutation, Query, ResolveField, Resolver } from '@nestjs/graphql';
|
||||
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { OMIT_IF_METADATA_KEY } from '@app/unraid-api/decorators/omit-if.decorator.js';
|
||||
import { UseFeatureFlag } from '@app/unraid-api/decorators/use-feature-flag.decorator.js';
|
||||
|
||||
// Mock the FeatureFlags
|
||||
vi.mock('@app/consts.js', () => ({
|
||||
FeatureFlags: Object.freeze({
|
||||
ENABLE_NEXT_DOCKER_RELEASE: false,
|
||||
ENABLE_EXPERIMENTAL_FEATURE: true,
|
||||
ENABLE_DEBUG_MODE: false,
|
||||
ENABLE_BETA_FEATURES: true,
|
||||
}),
|
||||
}));
|
||||
|
||||
describe('UseFeatureFlag Decorator', () => {
|
||||
let reflector: Reflector;
|
||||
|
||||
beforeEach(() => {
|
||||
reflector = new Reflector();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('Basic functionality', () => {
|
||||
it('should omit field when feature flag is false', () => {
|
||||
@Resolver()
|
||||
class TestResolver {
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@Query(() => String)
|
||||
testQuery() {
|
||||
return 'test';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
const metadata = reflector.get(OMIT_IF_METADATA_KEY, instance.testQuery);
|
||||
expect(metadata).toBe(true); // Should be omitted because flag is false
|
||||
});
|
||||
|
||||
it('should include field when feature flag is true', () => {
|
||||
@Resolver()
|
||||
class TestResolver {
|
||||
@UseFeatureFlag('ENABLE_EXPERIMENTAL_FEATURE')
|
||||
@Query(() => String)
|
||||
testQuery() {
|
||||
return 'test';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
const metadata = reflector.get(OMIT_IF_METADATA_KEY, instance.testQuery);
|
||||
expect(metadata).toBeUndefined(); // Should not be omitted because flag is true
|
||||
});
|
||||
});
|
||||
|
||||
describe('With different decorator types', () => {
|
||||
it('should work with @Query decorator', () => {
|
||||
@Resolver()
|
||||
class TestResolver {
|
||||
@UseFeatureFlag('ENABLE_DEBUG_MODE')
|
||||
@Query(() => String)
|
||||
debugQuery() {
|
||||
return 'debug';
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_BETA_FEATURES')
|
||||
@Query(() => String)
|
||||
betaQuery() {
|
||||
return 'beta';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
const debugMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.debugQuery);
|
||||
const betaMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.betaQuery);
|
||||
|
||||
expect(debugMetadata).toBe(true); // ENABLE_DEBUG_MODE is false
|
||||
expect(betaMetadata).toBeUndefined(); // ENABLE_BETA_FEATURES is true
|
||||
});
|
||||
|
||||
it('should work with @Mutation decorator', () => {
|
||||
@Resolver()
|
||||
class TestResolver {
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@Mutation(() => String)
|
||||
dockerMutation() {
|
||||
return 'docker';
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_EXPERIMENTAL_FEATURE')
|
||||
@Mutation(() => String)
|
||||
experimentalMutation() {
|
||||
return 'experimental';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
const dockerMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.dockerMutation);
|
||||
const experimentalMetadata = reflector.get(
|
||||
OMIT_IF_METADATA_KEY,
|
||||
instance.experimentalMutation
|
||||
);
|
||||
|
||||
expect(dockerMetadata).toBe(true); // ENABLE_NEXT_DOCKER_RELEASE is false
|
||||
expect(experimentalMetadata).toBeUndefined(); // ENABLE_EXPERIMENTAL_FEATURE is true
|
||||
});
|
||||
|
||||
it('should work with @ResolveField decorator', () => {
|
||||
@Resolver()
|
||||
class TestResolver {
|
||||
@UseFeatureFlag('ENABLE_DEBUG_MODE')
|
||||
@ResolveField(() => String)
|
||||
debugField() {
|
||||
return 'debug';
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_BETA_FEATURES')
|
||||
@ResolveField(() => String)
|
||||
betaField() {
|
||||
return 'beta';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
const debugMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.debugField);
|
||||
const betaMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.betaField);
|
||||
|
||||
expect(debugMetadata).toBe(true); // ENABLE_DEBUG_MODE is false
|
||||
expect(betaMetadata).toBeUndefined(); // ENABLE_BETA_FEATURES is true
|
||||
});
|
||||
});
|
||||
|
||||
describe('Multiple decorators on same class', () => {
|
||||
it('should handle multiple feature flags independently', () => {
|
||||
@Resolver()
|
||||
class TestResolver {
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@Query(() => String)
|
||||
dockerQuery() {
|
||||
return 'docker';
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_EXPERIMENTAL_FEATURE')
|
||||
@Query(() => String)
|
||||
experimentalQuery() {
|
||||
return 'experimental';
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_DEBUG_MODE')
|
||||
@Query(() => String)
|
||||
debugQuery() {
|
||||
return 'debug';
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_BETA_FEATURES')
|
||||
@Query(() => String)
|
||||
betaQuery() {
|
||||
return 'beta';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
|
||||
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.dockerQuery)).toBe(true);
|
||||
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.experimentalQuery)).toBeUndefined();
|
||||
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.debugQuery)).toBe(true);
|
||||
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.betaQuery)).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Type safety', () => {
|
||||
it('should only accept valid feature flag keys', () => {
|
||||
// This test verifies TypeScript compile-time type safety
|
||||
// The following would cause a TypeScript error if uncommented:
|
||||
// @UseFeatureFlag('INVALID_FLAG')
|
||||
|
||||
@Resolver()
|
||||
class TestResolver {
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@Query(() => String)
|
||||
validQuery() {
|
||||
return 'valid';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
expect(instance.validQuery).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Integration scenarios', () => {
|
||||
it('should work correctly with other decorators', () => {
|
||||
const customDecorator = (
|
||||
target: any,
|
||||
propertyKey: string | symbol,
|
||||
descriptor: PropertyDescriptor
|
||||
) => {
|
||||
Reflect.defineMetadata('custom', true, target, propertyKey);
|
||||
return descriptor;
|
||||
};
|
||||
|
||||
@Resolver()
|
||||
class TestResolver {
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@customDecorator
|
||||
@Query(() => String)
|
||||
multiDecoratorQuery() {
|
||||
return 'multi';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new TestResolver();
|
||||
const omitMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.multiDecoratorQuery);
|
||||
const customMetadata = Reflect.getMetadata('custom', instance, 'multiDecoratorQuery');
|
||||
|
||||
expect(omitMetadata).toBe(true);
|
||||
expect(customMetadata).toBe(true);
|
||||
});
|
||||
|
||||
it('should maintain correct decorator order', () => {
|
||||
const orderTracker: string[] = [];
|
||||
|
||||
const trackingDecorator = (name: string) => {
|
||||
return (target: any, propertyKey: string | symbol, descriptor: PropertyDescriptor) => {
|
||||
orderTracker.push(name);
|
||||
return descriptor;
|
||||
};
|
||||
};
|
||||
|
||||
@Resolver()
|
||||
class TestResolver {
|
||||
@trackingDecorator('first')
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@trackingDecorator('last')
|
||||
@Query(() => String)
|
||||
orderedQuery() {
|
||||
return 'ordered';
|
||||
}
|
||||
}
|
||||
|
||||
// Decorators are applied bottom-up
|
||||
expect(orderTracker).toEqual(['last', 'first']);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Real-world usage patterns', () => {
|
||||
it('should work with Docker resolver pattern', () => {
|
||||
@Resolver()
|
||||
class DockerResolver {
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@Mutation(() => String)
|
||||
async createDockerFolder(name: string) {
|
||||
return `Created folder: ${name}`;
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@Mutation(() => String)
|
||||
async deleteDockerEntries(entryIds: string[]) {
|
||||
return `Deleted entries: ${entryIds.join(', ')}`;
|
||||
}
|
||||
|
||||
@Query(() => String)
|
||||
async getDockerInfo() {
|
||||
return 'Docker info';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new DockerResolver();
|
||||
|
||||
// Feature flag is false, so these should be omitted
|
||||
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.createDockerFolder)).toBe(true);
|
||||
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.deleteDockerEntries)).toBe(true);
|
||||
|
||||
// No feature flag, so this should not be omitted
|
||||
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.getDockerInfo)).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should handle mixed feature flags in same resolver', () => {
|
||||
@Resolver()
|
||||
class MixedResolver {
|
||||
@UseFeatureFlag('ENABLE_EXPERIMENTAL_FEATURE')
|
||||
@Query(() => String)
|
||||
experimentalQuery() {
|
||||
return 'experimental';
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||
@Query(() => String)
|
||||
dockerQuery() {
|
||||
return 'docker';
|
||||
}
|
||||
|
||||
@UseFeatureFlag('ENABLE_BETA_FEATURES')
|
||||
@Mutation(() => String)
|
||||
betaMutation() {
|
||||
return 'beta';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = new MixedResolver();
|
||||
|
||||
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.experimentalQuery)).toBeUndefined();
|
||||
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.dockerQuery)).toBe(true);
|
||||
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.betaMutation)).toBeUndefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user