mirror of
https://github.com/unraid/api.git
synced 2026-01-02 14:40:01 -06:00
Compare commits
113 Commits
v4.11.0
...
feat/build
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
81d8d3ef62 | ||
|
|
5d4a16fe8f | ||
|
|
8a3c1b3ba8 | ||
|
|
59b257a50f | ||
|
|
4f8fd18a39 | ||
|
|
95eb841110 | ||
|
|
d06b0db923 | ||
|
|
1c4dc154e8 | ||
|
|
1f67f63513 | ||
|
|
761b3964a9 | ||
|
|
b9a4d4a864 | ||
|
|
c82e3d8427 | ||
|
|
3211312b0e | ||
|
|
fb575acc4f | ||
|
|
4a0b481a2d | ||
|
|
cd15e12cdd | ||
|
|
0e20fd0ab0 | ||
|
|
f44b4a87e9 | ||
|
|
4986f4251d | ||
|
|
8213738e26 | ||
|
|
f32493e728 | ||
|
|
78ce64e357 | ||
|
|
bb8c4a133e | ||
|
|
c3222cc6c4 | ||
|
|
71621072f8 | ||
|
|
d4a8edab49 | ||
|
|
88087d5201 | ||
|
|
5d89682a3f | ||
|
|
bc15bd3d70 | ||
|
|
7c3aee8f3f | ||
|
|
c7c3bb57ea | ||
|
|
99dbad57d5 | ||
|
|
c42f79d406 | ||
|
|
4d8588b173 | ||
|
|
0d1d27064e | ||
|
|
0fe2c2c1c8 | ||
|
|
a8e4119270 | ||
|
|
372a4ebb42 | ||
|
|
4e945f5f56 | ||
|
|
6356f9c41d | ||
|
|
a1ee915ca5 | ||
|
|
c147a6b507 | ||
|
|
9d42b36f74 | ||
|
|
26a95af953 | ||
|
|
0ead267838 | ||
|
|
163763f9e5 | ||
|
|
6469d002b7 | ||
|
|
ab11e7ff7f | ||
|
|
7316dc753f | ||
|
|
1bf74e9d6c | ||
|
|
9cd0d6ac65 | ||
|
|
f0348aa038 | ||
|
|
c1ab3a4746 | ||
|
|
7d67a40433 | ||
|
|
674323fd87 | ||
|
|
6947b5d4af | ||
|
|
c4cc54923c | ||
|
|
c508366702 | ||
|
|
9df6a3f5eb | ||
|
|
aa588883cc | ||
|
|
b2e7801238 | ||
|
|
fd895cacf0 | ||
|
|
6edd3a3d16 | ||
|
|
ac198d5d1a | ||
|
|
f1c043fe5f | ||
|
|
d0c66020e1 | ||
|
|
335f949b53 | ||
|
|
26aeca3624 | ||
|
|
2b4c2a264b | ||
|
|
b7798b82f4 | ||
|
|
426283011a | ||
|
|
effdbcf0f5 | ||
|
|
541b0edd35 | ||
|
|
ce63d5dca2 | ||
|
|
bcaacca061 | ||
|
|
0afc4e8e9a | ||
|
|
1a01696dc7 | ||
|
|
1bc5251310 | ||
|
|
3a10871918 | ||
|
|
58b5544bea | ||
|
|
a4ff3c4092 | ||
|
|
1e0a54d9ef | ||
|
|
096fe98710 | ||
|
|
57217852a3 | ||
|
|
979a267bc5 | ||
|
|
96c120f9b2 | ||
|
|
a2c5d2495f | ||
|
|
b3216874fa | ||
|
|
27dbfde845 | ||
|
|
1a25fedd23 | ||
|
|
ad6aa3b674 | ||
|
|
9c4e764c95 | ||
|
|
20c2d5b445 | ||
|
|
85a441b51d | ||
|
|
c9577e9bf2 | ||
|
|
18b5209087 | ||
|
|
ec8f4f38c8 | ||
|
|
db0e725107 | ||
|
|
5afca5ecba | ||
|
|
beab83b56e | ||
|
|
78997a02c6 | ||
|
|
3534d6fdd7 | ||
|
|
557b03f882 | ||
|
|
514a0ef560 | ||
|
|
dfe352dfa1 | ||
|
|
8005b8c3b6 | ||
|
|
d6fa102d06 | ||
|
|
52f22678e3 | ||
|
|
23ef760d76 | ||
|
|
6ea94f061d | ||
|
|
782d5ebadc | ||
|
|
dfe363bc37 | ||
|
|
6b3b951d82 |
@@ -10,4 +10,5 @@ alwaysApply: false
|
||||
* Test suite is VITEST, do not use jest
|
||||
pnpm --filter ./api test
|
||||
* Prefer to not mock simple dependencies
|
||||
* For error testing, use `.rejects.toThrow()` without arguments - don't test exact error message strings unless the message format is specifically what you're testing
|
||||
|
||||
|
||||
@@ -4,6 +4,10 @@ globs: **/*.test.ts,**/__test__/components/**/*.ts,**/__test__/store/**/*.ts,**/
|
||||
alwaysApply: false
|
||||
---
|
||||
|
||||
## General Testing Best Practices
|
||||
- **Error Testing:** Use `.rejects.toThrow()` without arguments to test that functions throw errors. Don't test exact error message strings unless the message format is specifically what you're testing
|
||||
- **Focus on Behavior:** Test what the code does, not implementation details like exact error message wording
|
||||
|
||||
## Vue Component Testing Best Practices
|
||||
- This is a Nuxt.js app but we are testing with vitest outside of the Nuxt environment
|
||||
- Nuxt is currently set to auto import so some vue files may need compute or ref imported
|
||||
|
||||
10
.github/workflows/build-plugin.yml
vendored
10
.github/workflows/build-plugin.yml
vendored
@@ -45,7 +45,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
@@ -88,19 +88,19 @@ jobs:
|
||||
pnpm install --frozen-lockfile --filter @unraid/connect-plugin
|
||||
|
||||
- name: Download Unraid UI Components
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
name: unraid-wc-ui
|
||||
path: ${{ github.workspace }}/plugin/source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components/uui
|
||||
merge-multiple: true
|
||||
- name: Download Unraid Web Components
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
pattern: unraid-wc-rich
|
||||
path: ${{ github.workspace }}/plugin/source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components/nuxt
|
||||
merge-multiple: true
|
||||
- name: Download Unraid API
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
name: unraid-api
|
||||
path: ${{ github.workspace }}/plugin/api/
|
||||
@@ -152,7 +152,7 @@ jobs:
|
||||
with:
|
||||
workflow: release-production.yml
|
||||
inputs: '{ "version": "${{ steps.vars.outputs.API_VERSION }}" }'
|
||||
token: ${{ secrets.WORKFLOW_TRIGGER_PAT }}
|
||||
token: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN }}
|
||||
|
||||
- name: Upload to Cloudflare
|
||||
if: inputs.RELEASE_CREATED == 'false'
|
||||
|
||||
89
.github/workflows/claude-code-review.yml
vendored
89
.github/workflows/claude-code-review.yml
vendored
@@ -3,20 +3,30 @@ name: Claude Code Review
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize]
|
||||
# Optional: Only run on specific file changes
|
||||
# paths:
|
||||
# - "src/**/*.ts"
|
||||
# - "src/**/*.tsx"
|
||||
# - "src/**/*.js"
|
||||
# - "src/**/*.jsx"
|
||||
# Skip reviews for non-code changes
|
||||
paths-ignore:
|
||||
- "**/*.md"
|
||||
- "**/package-lock.json"
|
||||
- "**/pnpm-lock.yaml"
|
||||
- "**/.gitignore"
|
||||
- "**/LICENSE"
|
||||
- "**/*.config.js"
|
||||
- "**/*.config.ts"
|
||||
- "**/tsconfig.json"
|
||||
- "**/.github/workflows/*.yml"
|
||||
- "**/docs/**"
|
||||
|
||||
jobs:
|
||||
claude-review:
|
||||
# Optional: Filter by PR author
|
||||
# if: |
|
||||
# github.event.pull_request.user.login == 'external-contributor' ||
|
||||
# github.event.pull_request.user.login == 'new-developer' ||
|
||||
# github.event.pull_request.author_association == 'FIRST_TIME_CONTRIBUTOR'
|
||||
# Skip review for bot PRs and WIP/skip-review PRs
|
||||
# Only run if changes are significant (>10 lines)
|
||||
if: |
|
||||
(github.event.pull_request.additions > 10 || github.event.pull_request.deletions > 10) &&
|
||||
!contains(github.event.pull_request.title, '[skip-review]') &&
|
||||
!contains(github.event.pull_request.title, '[WIP]') &&
|
||||
!endsWith(github.event.pull_request.user.login, '[bot]') &&
|
||||
github.event.pull_request.user.login != 'dependabot' &&
|
||||
github.event.pull_request.user.login != 'renovate'
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
@@ -27,7 +37,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
@@ -42,31 +52,46 @@ jobs:
|
||||
|
||||
# Direct prompt for automated review (no @claude mention needed)
|
||||
direct_prompt: |
|
||||
Please review this pull request and provide feedback on:
|
||||
- Code quality and best practices
|
||||
- Potential bugs or issues
|
||||
- Performance considerations
|
||||
- Security concerns
|
||||
- Test coverage
|
||||
IMPORTANT: Review ONLY the DIFF/CHANGESET - the actual lines that were added or modified in this PR.
|
||||
DO NOT review the entire file context, only analyze the specific changes being made.
|
||||
|
||||
Be constructive and helpful in your feedback.
|
||||
Look for HIGH-PRIORITY issues in the CHANGED LINES ONLY:
|
||||
|
||||
1. CRITICAL BUGS: Logic errors, null pointer issues, infinite loops, race conditions
|
||||
2. SECURITY: SQL injection, XSS, authentication bypass, exposed secrets, unsafe operations
|
||||
3. BREAKING CHANGES: API contract violations, removed exports, changed function signatures
|
||||
4. DATA LOSS RISKS: Destructive operations without safeguards, missing data validation
|
||||
|
||||
DO NOT comment on:
|
||||
- Code that wasn't changed in this PR
|
||||
- Style, formatting, or documentation
|
||||
- Test coverage (unless tests are broken by the changes)
|
||||
- Minor optimizations or best practices
|
||||
- Existing code issues that weren't introduced by this PR
|
||||
|
||||
If you find no critical issues in the DIFF, respond with: "✅ No critical issues found in changes"
|
||||
|
||||
Keep response under 10 lines. Reference specific line numbers from the diff when reporting issues.
|
||||
|
||||
# Optional: Use sticky comments to make Claude reuse the same comment on subsequent pushes to the same PR
|
||||
# use_sticky_comment: true
|
||||
use_sticky_comment: true
|
||||
|
||||
# Optional: Customize review based on file types
|
||||
# Context-aware review based on PR characteristics
|
||||
# Uncomment to enable different review strategies based on context
|
||||
# direct_prompt: |
|
||||
# Review this PR focusing on:
|
||||
# - For TypeScript files: Type safety and proper interface usage
|
||||
# - For API endpoints: Security, input validation, and error handling
|
||||
# - For React components: Performance, accessibility, and best practices
|
||||
# - For tests: Coverage, edge cases, and test quality
|
||||
|
||||
# Optional: Different prompts for different authors
|
||||
# direct_prompt: |
|
||||
# ${{ github.event.pull_request.author_association == 'FIRST_TIME_CONTRIBUTOR' &&
|
||||
# 'Welcome! Please review this PR from a first-time contributor. Be encouraging and provide detailed explanations for any suggestions.' ||
|
||||
# 'Please provide a thorough code review focusing on our coding standards and best practices.' }}
|
||||
# ${{
|
||||
# (github.event.pull_request.additions > 500) &&
|
||||
# 'Large PR detected. Focus only on architectural issues and breaking changes. Skip minor issues.' ||
|
||||
# contains(github.event.pull_request.title, 'fix') &&
|
||||
# 'Bug fix PR: Verify the fix addresses the root cause and check for regression risks.' ||
|
||||
# contains(github.event.pull_request.title, 'deps') &&
|
||||
# 'Dependency update: Check for breaking changes and security advisories only.' ||
|
||||
# contains(github.event.pull_request.title, 'refactor') &&
|
||||
# 'Refactor PR: Verify no behavior changes and check for performance regressions.' ||
|
||||
# contains(github.event.pull_request.title, 'feat') &&
|
||||
# 'New feature: Check for security issues, edge cases, and integration problems only.' ||
|
||||
# 'Standard review: Check for critical bugs, security issues, and breaking changes only.'
|
||||
# }}
|
||||
|
||||
# Optional: Add specific tools for running tests or linting
|
||||
# allowed_tools: "Bash(npm run test),Bash(npm run lint),Bash(npm run typecheck)"
|
||||
|
||||
2
.github/workflows/claude.yml
vendored
2
.github/workflows/claude.yml
vendored
@@ -26,7 +26,7 @@ jobs:
|
||||
actions: read # Required for Claude to read CI results on PRs
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
|
||||
2
.github/workflows/codeql-analysis.yml
vendored
2
.github/workflows/codeql-analysis.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
|
||||
31
.github/workflows/create-docusaurus-pr.yml
vendored
31
.github/workflows/create-docusaurus-pr.yml
vendored
@@ -20,26 +20,49 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout source repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
path: source-repo
|
||||
|
||||
- name: Checkout docs repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
repository: unraid/docs
|
||||
path: docs-repo
|
||||
token: ${{ secrets.DOCS_PAT_UNRAID_BOT }}
|
||||
|
||||
- name: Copy updated docs
|
||||
- name: Copy and process docs
|
||||
run: |
|
||||
if [ ! -d "source-repo/api/docs" ]; then
|
||||
echo "Source directory does not exist!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Remove old API docs but preserve other folders
|
||||
rm -rf docs-repo/docs/API/
|
||||
mkdir -p docs-repo/docs/API
|
||||
|
||||
# Copy all markdown files and maintain directory structure
|
||||
cp -r source-repo/api/docs/public/. docs-repo/docs/API/
|
||||
|
||||
# Copy images to Docusaurus static directory
|
||||
mkdir -p docs-repo/static/img/api
|
||||
|
||||
# Copy images from public/images if they exist
|
||||
if [ -d "source-repo/api/docs/public/images" ]; then
|
||||
cp -r source-repo/api/docs/public/images/. docs-repo/static/img/api/
|
||||
fi
|
||||
|
||||
# Also copy any images from the parent docs/images directory
|
||||
if [ -d "source-repo/api/docs/images" ]; then
|
||||
cp -r source-repo/api/docs/images/. docs-repo/static/img/api/
|
||||
fi
|
||||
|
||||
# Update image paths in markdown files
|
||||
# Replace relative image paths with absolute paths pointing to /img/api/
|
||||
find docs-repo/docs/API -name "*.md" -type f -exec sed -i 's|!\[\([^]]*\)\](\./images/\([^)]*\))||g' {} \;
|
||||
find docs-repo/docs/API -name "*.md" -type f -exec sed -i 's|!\[\([^]]*\)\](images/\([^)]*\))||g' {} \;
|
||||
find docs-repo/docs/API -name "*.md" -type f -exec sed -i 's|!\[\([^]]*\)\](../images/\([^)]*\))||g' {} \;
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
@@ -53,7 +76,7 @@ jobs:
|
||||
Changes were automatically generated from api/docs/* directory.
|
||||
|
||||
@coderabbitai ignore
|
||||
reviewers: ljm42, elibosley, pujitm, mdatelle
|
||||
reviewers: ljm42, elibosley
|
||||
branch: update-api-docs
|
||||
base: main
|
||||
delete-branch: true
|
||||
|
||||
6
.github/workflows/deploy-storybook.yml
vendored
6
.github/workflows/deploy-storybook.yml
vendored
@@ -20,12 +20,12 @@ jobs:
|
||||
name: Deploy Storybook
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '22.17.1'
|
||||
node-version: '22.18.0'
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
@@ -33,7 +33,7 @@ jobs:
|
||||
run_install: false
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.1
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential libvirt-daemon-system
|
||||
version: 1.0
|
||||
|
||||
64
.github/workflows/main.yml
vendored
64
.github/workflows/main.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
# Only run release-please on pushes to main
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
||||
|
||||
@@ -37,7 +37,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
@@ -45,9 +45,9 @@ jobs:
|
||||
node-version-file: ".nvmrc"
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.1
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential libvirt-daemon-system
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential libvirt-daemon-system php-cli
|
||||
version: 1.0
|
||||
|
||||
- name: Install pnpm
|
||||
@@ -117,42 +117,68 @@ jobs:
|
||||
# Verify libvirt is running using sudo to bypass group membership delays
|
||||
sudo virsh list --all || true
|
||||
|
||||
- uses: oven-sh/setup-bun@v2
|
||||
with:
|
||||
bun-version: latest
|
||||
- name: Build UI Package First
|
||||
run: |
|
||||
echo "🔧 Building UI package for web tests dependency..."
|
||||
cd ../unraid-ui && pnpm run build
|
||||
|
||||
- name: Run Tests Concurrently
|
||||
run: |
|
||||
set -e
|
||||
|
||||
# Run all tests in parallel with labeled output
|
||||
# Run all tests in parallel with labeled output and coverage generation
|
||||
echo "🚀 Starting API coverage tests..."
|
||||
pnpm run coverage > api-test.log 2>&1 &
|
||||
API_PID=$!
|
||||
|
||||
echo "🚀 Starting Connect plugin tests..."
|
||||
(cd ../packages/unraid-api-plugin-connect && pnpm test) > connect-test.log 2>&1 &
|
||||
(cd ../packages/unraid-api-plugin-connect && pnpm test --coverage 2>/dev/null || pnpm test) > connect-test.log 2>&1 &
|
||||
CONNECT_PID=$!
|
||||
|
||||
echo "🚀 Starting Shared package tests..."
|
||||
(cd ../packages/unraid-shared && pnpm test) > shared-test.log 2>&1 &
|
||||
(cd ../packages/unraid-shared && pnpm test --coverage 2>/dev/null || pnpm test) > shared-test.log 2>&1 &
|
||||
SHARED_PID=$!
|
||||
|
||||
echo "🚀 Starting Web package coverage tests..."
|
||||
(cd ../web && (pnpm test --coverage || pnpm test)) > web-test.log 2>&1 &
|
||||
WEB_PID=$!
|
||||
|
||||
echo "🚀 Starting UI package coverage tests..."
|
||||
(cd ../unraid-ui && pnpm test --coverage 2>/dev/null || pnpm test) > ui-test.log 2>&1 &
|
||||
UI_PID=$!
|
||||
|
||||
echo "🚀 Starting Plugin tests..."
|
||||
(cd ../plugin && pnpm test) > plugin-test.log 2>&1 &
|
||||
PLUGIN_PID=$!
|
||||
|
||||
# Wait for all processes and capture exit codes
|
||||
wait $API_PID && echo "✅ API tests completed" || { echo "❌ API tests failed"; API_EXIT=1; }
|
||||
wait $CONNECT_PID && echo "✅ Connect tests completed" || { echo "❌ Connect tests failed"; CONNECT_EXIT=1; }
|
||||
wait $SHARED_PID && echo "✅ Shared tests completed" || { echo "❌ Shared tests failed"; SHARED_EXIT=1; }
|
||||
wait $WEB_PID && echo "✅ Web tests completed" || { echo "❌ Web tests failed"; WEB_EXIT=1; }
|
||||
wait $UI_PID && echo "✅ UI tests completed" || { echo "❌ UI tests failed"; UI_EXIT=1; }
|
||||
wait $PLUGIN_PID && echo "✅ Plugin tests completed" || { echo "❌ Plugin tests failed"; PLUGIN_EXIT=1; }
|
||||
|
||||
# Display all outputs
|
||||
echo "📋 API Test Results:" && cat api-test.log
|
||||
echo "📋 Connect Plugin Test Results:" && cat connect-test.log
|
||||
echo "📋 Shared Package Test Results:" && cat shared-test.log
|
||||
echo "📋 Web Package Test Results:" && cat web-test.log
|
||||
echo "📋 UI Package Test Results:" && cat ui-test.log
|
||||
echo "📋 Plugin Test Results:" && cat plugin-test.log
|
||||
|
||||
# Exit with error if any test failed
|
||||
if [[ ${API_EXIT:-0} -eq 1 || ${CONNECT_EXIT:-0} -eq 1 || ${SHARED_EXIT:-0} -eq 1 ]]; then
|
||||
if [[ ${API_EXIT:-0} -eq 1 || ${CONNECT_EXIT:-0} -eq 1 || ${SHARED_EXIT:-0} -eq 1 || ${WEB_EXIT:-0} -eq 1 || ${UI_EXIT:-0} -eq 1 || ${PLUGIN_EXIT:-0} -eq 1 ]]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Upload all coverage reports to Codecov
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
files: ./coverage/coverage-final.json,../web/coverage/coverage-final.json,../unraid-ui/coverage/coverage-final.json,../packages/unraid-api-plugin-connect/coverage/coverage-final.json,../packages/unraid-shared/coverage/coverage-final.json
|
||||
fail_ci_if_error: false
|
||||
|
||||
build-api:
|
||||
name: Build API
|
||||
runs-on: ubuntu-latest
|
||||
@@ -163,7 +189,7 @@ jobs:
|
||||
working-directory: api
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
@@ -190,7 +216,7 @@ jobs:
|
||||
${{ runner.os }}-pnpm-store-
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.1
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential
|
||||
version: 1.0
|
||||
@@ -240,7 +266,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
@@ -267,7 +293,7 @@ jobs:
|
||||
${{ runner.os }}-pnpm-store-
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.1
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential
|
||||
version: 1.0
|
||||
@@ -298,7 +324,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Create env file
|
||||
run: |
|
||||
@@ -307,7 +333,9 @@ jobs:
|
||||
echo VITE_CONNECT=${{ secrets.VITE_CONNECT }} >> .env
|
||||
echo VITE_UNRAID_NET=${{ secrets.VITE_UNRAID_NET }} >> .env
|
||||
echo VITE_CALLBACK_KEY=${{ secrets.VITE_CALLBACK_KEY }} >> .env
|
||||
cat .env
|
||||
|
||||
touch .env.production
|
||||
echo NUXT_UI_PRO_LICENSE=${{ secrets.NUXT_UI_PRO_LICENSE }} >> .env.production
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
@@ -359,7 +387,7 @@ jobs:
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: unraid-wc-rich
|
||||
path: web/.nuxt/nuxt-custom-elements/dist/unraid-components
|
||||
path: web/.nuxt/standalone-apps
|
||||
|
||||
build-plugin-staging-pr:
|
||||
name: Build and Deploy Plugin
|
||||
|
||||
2
.github/workflows/release-production.yml
vendored
2
.github/workflows/release-production.yml
vendored
@@ -30,7 +30,7 @@ jobs:
|
||||
prerelease: false
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '22.17.1'
|
||||
node-version: '22.18.0'
|
||||
- run: |
|
||||
cat << 'EOF' > release-notes.txt
|
||||
${{ steps.release-info.outputs.body }}
|
||||
|
||||
8
.github/workflows/test-libvirt.yml
vendored
8
.github/workflows/test-libvirt.yml
vendored
@@ -22,16 +22,16 @@ jobs:
|
||||
working-directory: ./libvirt
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
submodules: recursive
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.13.5"
|
||||
python-version: "3.13.7"
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.1
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
packages: libvirt-dev
|
||||
version: 1.0
|
||||
@@ -44,7 +44,7 @@ jobs:
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10
|
||||
version: 10.15.0
|
||||
run_install: false
|
||||
|
||||
- name: Get pnpm store directory
|
||||
|
||||
9
.gitignore
vendored
9
.gitignore
vendored
@@ -76,6 +76,9 @@ typescript
|
||||
# Github actions
|
||||
RELEASE_NOTES.md
|
||||
|
||||
# Test backups
|
||||
api/dev/configs/api.json.backup
|
||||
|
||||
# Docker Deploy Folder
|
||||
deploy/*
|
||||
!deploy/.gitkeep
|
||||
@@ -112,3 +115,9 @@ api/dev/Unraid.net/myservers.cfg
|
||||
|
||||
# Claude local settings
|
||||
.claude/settings.local.json
|
||||
|
||||
# local Mise settings
|
||||
.mise.toml
|
||||
|
||||
# environment variables
|
||||
web/.env.production
|
||||
|
||||
@@ -1 +1 @@
|
||||
{".":"4.11.0"}
|
||||
{".":"4.18.2"}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
@custom-variant dark (&:where(.dark, .dark *));
|
||||
|
||||
@layer utilities {
|
||||
:host {
|
||||
/* Utility defaults for web components (when we were using shadow DOM) */
|
||||
:host {
|
||||
--tw-divide-y-reverse: 0;
|
||||
--tw-border-style: solid;
|
||||
--tw-font-weight: initial;
|
||||
@@ -48,21 +48,20 @@
|
||||
--tw-drop-shadow: initial;
|
||||
--tw-duration: initial;
|
||||
--tw-ease: initial;
|
||||
}
|
||||
}
|
||||
|
||||
@layer base {
|
||||
*,
|
||||
::after,
|
||||
::before,
|
||||
::backdrop,
|
||||
::file-selector-button {
|
||||
border-color: hsl(var(--border));
|
||||
}
|
||||
/* Global border color - this is what's causing the issue! */
|
||||
/* Commenting out since it affects all elements globally
|
||||
*,
|
||||
::after,
|
||||
::before,
|
||||
::backdrop,
|
||||
::file-selector-button {
|
||||
border-color: hsl(var(--border));
|
||||
}
|
||||
*/
|
||||
|
||||
|
||||
|
||||
body {
|
||||
body {
|
||||
--color-alpha: #1c1b1b;
|
||||
--color-beta: #f2f2f2;
|
||||
--color-gamma: #999999;
|
||||
@@ -74,8 +73,7 @@
|
||||
--ring-shadow: 0 0 var(--color-beta);
|
||||
}
|
||||
|
||||
button:not(:disabled),
|
||||
[role='button']:not(:disabled) {
|
||||
cursor: pointer;
|
||||
}
|
||||
button:not(:disabled),
|
||||
[role='button']:not(:disabled) {
|
||||
cursor: pointer;
|
||||
}
|
||||
@@ -1,7 +1,11 @@
|
||||
/* Hybrid theme system: Native CSS + Theme Store fallback */
|
||||
@layer base {
|
||||
/* Light mode defaults */
|
||||
:root {
|
||||
|
||||
/* Light mode defaults */
|
||||
:root {
|
||||
/* Override Tailwind v4 global styles to use webgui variables */
|
||||
--ui-bg: var(--background-color) !important;
|
||||
--ui-text: var(--text-color) !important;
|
||||
|
||||
--background: 0 0% 100%;
|
||||
--foreground: 0 0% 3.9%;
|
||||
--muted: 0 0% 96.1%;
|
||||
@@ -30,6 +34,10 @@
|
||||
|
||||
/* Dark mode */
|
||||
.dark {
|
||||
/* Override Tailwind v4 global styles to use webgui variables */
|
||||
--ui-bg: var(--background-color) !important;
|
||||
--ui-text: var(--text-color) !important;
|
||||
|
||||
--background: 0 0% 3.9%;
|
||||
--foreground: 0 0% 98%;
|
||||
--muted: 0 0% 14.9%;
|
||||
@@ -62,69 +70,4 @@
|
||||
--background: 0 0% 3.9%;
|
||||
--foreground: 0 0% 98%;
|
||||
--border: 0 0% 14.9%;
|
||||
}
|
||||
|
||||
/* For web components: inherit CSS variables from the host */
|
||||
:host {
|
||||
--background: inherit;
|
||||
--foreground: inherit;
|
||||
--muted: inherit;
|
||||
--muted-foreground: inherit;
|
||||
--popover: inherit;
|
||||
--popover-foreground: inherit;
|
||||
--card: inherit;
|
||||
--card-foreground: inherit;
|
||||
--border: inherit;
|
||||
--input: inherit;
|
||||
--primary: inherit;
|
||||
--primary-foreground: inherit;
|
||||
--secondary: inherit;
|
||||
--secondary-foreground: inherit;
|
||||
--accent: inherit;
|
||||
--accent-foreground: inherit;
|
||||
--destructive: inherit;
|
||||
--destructive-foreground: inherit;
|
||||
--ring: inherit;
|
||||
--chart-1: inherit;
|
||||
--chart-2: inherit;
|
||||
--chart-3: inherit;
|
||||
--chart-4: inherit;
|
||||
--chart-5: inherit;
|
||||
}
|
||||
|
||||
/* Class-based dark mode support for web components using :host-context */
|
||||
:host-context(.dark) {
|
||||
--background: 0 0% 3.9%;
|
||||
--foreground: 0 0% 98%;
|
||||
--muted: 0 0% 14.9%;
|
||||
--muted-foreground: 0 0% 63.9%;
|
||||
--popover: 0 0% 3.9%;
|
||||
--popover-foreground: 0 0% 98%;
|
||||
--card: 0 0% 3.9%;
|
||||
--card-foreground: 0 0% 98%;
|
||||
--border: 0 0% 14.9%;
|
||||
--input: 0 0% 14.9%;
|
||||
--primary: 0 0% 98%;
|
||||
--primary-foreground: 0 0% 9%;
|
||||
--secondary: 0 0% 14.9%;
|
||||
--secondary-foreground: 0 0% 98%;
|
||||
--accent: 0 0% 14.9%;
|
||||
--accent-foreground: 0 0% 98%;
|
||||
--destructive: 0 62.8% 30.6%;
|
||||
--destructive-foreground: 0 0% 98%;
|
||||
--ring: 0 0% 83.1%;
|
||||
--chart-1: 220 70% 50%;
|
||||
--chart-2: 160 60% 45%;
|
||||
--chart-3: 30 80% 55%;
|
||||
--chart-4: 280 65% 60%;
|
||||
--chart-5: 340 75% 55%;
|
||||
}
|
||||
|
||||
/* Alternative class-based dark mode support for specific Unraid themes */
|
||||
:host-context(.dark[data-theme='black']),
|
||||
:host-context(.dark[data-theme='gray']) {
|
||||
--background: 0 0% 3.9%;
|
||||
--foreground: 0 0% 98%;
|
||||
--border: 0 0% 14.9%;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2,4 +2,4 @@
|
||||
@import './css-variables.css';
|
||||
@import './unraid-theme.css';
|
||||
@import './base-utilities.css';
|
||||
@import './sonner.css';
|
||||
@import './sonner.css';
|
||||
|
||||
@@ -233,8 +233,8 @@
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
padding: 0;
|
||||
color: var(--gray12);
|
||||
border: 1px solid var(--gray4);
|
||||
color: hsl(var(--foreground));
|
||||
border: 1px solid hsl(var(--border));
|
||||
transform: var(--toast-close-button-transform);
|
||||
border-radius: 50%;
|
||||
cursor: pointer;
|
||||
@@ -243,7 +243,7 @@
|
||||
}
|
||||
|
||||
[data-sonner-toast] [data-close-button] {
|
||||
background: var(--gray1);
|
||||
background: hsl(var(--background));
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) :where([data-close-button]):focus-visible {
|
||||
@@ -255,8 +255,8 @@
|
||||
}
|
||||
|
||||
[data-sonner-toast]:hover [data-close-button]:hover {
|
||||
background: var(--gray2);
|
||||
border-color: var(--gray5);
|
||||
background: hsl(var(--muted));
|
||||
border-color: hsl(var(--border));
|
||||
}
|
||||
|
||||
/* Leave a ghost div to avoid setting hover to false when swiping out */
|
||||
@@ -414,10 +414,27 @@
|
||||
}
|
||||
|
||||
[data-sonner-toaster][data-theme='light'] {
|
||||
--normal-bg: #fff;
|
||||
--normal-border: var(--gray4);
|
||||
--normal-text: var(--gray12);
|
||||
--normal-bg: hsl(var(--background));
|
||||
--normal-border: hsl(var(--border));
|
||||
--normal-text: hsl(var(--foreground));
|
||||
|
||||
--success-bg: hsl(var(--background));
|
||||
--success-border: hsl(var(--border));
|
||||
--success-text: hsl(140, 100%, 27%);
|
||||
|
||||
--info-bg: hsl(var(--background));
|
||||
--info-border: hsl(var(--border));
|
||||
--info-text: hsl(210, 92%, 45%);
|
||||
|
||||
--warning-bg: hsl(var(--background));
|
||||
--warning-border: hsl(var(--border));
|
||||
--warning-text: hsl(31, 92%, 45%);
|
||||
|
||||
--error-bg: hsl(var(--background));
|
||||
--error-border: hsl(var(--border));
|
||||
--error-text: hsl(360, 100%, 45%);
|
||||
|
||||
/* Old colors, preserved for reference
|
||||
--success-bg: hsl(143, 85%, 96%);
|
||||
--success-border: hsl(145, 92%, 91%);
|
||||
--success-text: hsl(140, 100%, 27%);
|
||||
@@ -432,26 +449,43 @@
|
||||
|
||||
--error-bg: hsl(359, 100%, 97%);
|
||||
--error-border: hsl(359, 100%, 94%);
|
||||
--error-text: hsl(360, 100%, 45%);
|
||||
--error-text: hsl(360, 100%, 45%); */
|
||||
}
|
||||
|
||||
[data-sonner-toaster][data-theme='light'] [data-sonner-toast][data-invert='true'] {
|
||||
--normal-bg: #000;
|
||||
--normal-border: hsl(0, 0%, 20%);
|
||||
--normal-text: var(--gray1);
|
||||
--normal-bg: hsl(0 0% 3.9%);
|
||||
--normal-border: hsl(0 0% 14.9%);
|
||||
--normal-text: hsl(0 0% 98%);
|
||||
}
|
||||
|
||||
[data-sonner-toaster][data-theme='dark'] [data-sonner-toast][data-invert='true'] {
|
||||
--normal-bg: #fff;
|
||||
--normal-border: var(--gray3);
|
||||
--normal-text: var(--gray12);
|
||||
--normal-bg: hsl(0 0% 100%);
|
||||
--normal-border: hsl(0 0% 89.8%);
|
||||
--normal-text: hsl(0 0% 3.9%);
|
||||
}
|
||||
|
||||
[data-sonner-toaster][data-theme='dark'] {
|
||||
--normal-bg: #000;
|
||||
--normal-border: hsl(0, 0%, 20%);
|
||||
--normal-text: var(--gray1);
|
||||
--normal-bg: hsl(var(--background));
|
||||
--normal-border: hsl(var(--border));
|
||||
--normal-text: hsl(var(--foreground));
|
||||
|
||||
--success-bg: hsl(var(--background));
|
||||
--success-border: hsl(var(--border));
|
||||
--success-text: hsl(150, 86%, 65%);
|
||||
|
||||
--info-bg: hsl(var(--background));
|
||||
--info-border: hsl(var(--border));
|
||||
--info-text: hsl(216, 87%, 65%);
|
||||
|
||||
--warning-bg: hsl(var(--background));
|
||||
--warning-border: hsl(var(--border));
|
||||
--warning-text: hsl(46, 87%, 65%);
|
||||
|
||||
--error-bg: hsl(var(--background));
|
||||
--error-border: hsl(var(--border));
|
||||
--error-text: hsl(358, 100%, 81%);
|
||||
|
||||
/* Old colors, preserved for reference
|
||||
--success-bg: hsl(150, 100%, 6%);
|
||||
--success-border: hsl(147, 100%, 12%);
|
||||
--success-text: hsl(150, 86%, 65%);
|
||||
@@ -466,7 +500,7 @@
|
||||
|
||||
--error-bg: hsl(358, 76%, 10%);
|
||||
--error-border: hsl(357, 89%, 16%);
|
||||
--error-text: hsl(358, 100%, 81%);
|
||||
--error-text: hsl(358, 100%, 81%); */
|
||||
}
|
||||
|
||||
[data-rich-colors='true'][data-sonner-toast][data-type='success'] {
|
||||
@@ -541,7 +575,7 @@
|
||||
|
||||
.sonner-loading-bar {
|
||||
animation: sonner-spin 1.2s linear infinite;
|
||||
background: var(--gray11);
|
||||
background: hsl(var(--muted-foreground));
|
||||
border-radius: 6px;
|
||||
height: 8%;
|
||||
left: -10%;
|
||||
|
||||
14
CLAUDE.md
14
CLAUDE.md
@@ -120,6 +120,13 @@ Enables GraphQL playground at `http://tower.local/graphql`
|
||||
|
||||
### Testing Guidelines
|
||||
|
||||
#### General Testing Best Practices
|
||||
|
||||
- **Error Testing:** Use `.rejects.toThrow()` without arguments to test that functions throw errors. Don't test exact error message strings unless the message format is specifically what you're testing
|
||||
- **Focus on Behavior:** Test what the code does, not implementation details like exact error message wording
|
||||
- **Avoid Brittleness:** Don't write tests that break when minor changes are made to error messages, log formats, or other non-essential details
|
||||
- **Use Mocks Correctly**: Mocks should be used as nouns, not verbs.
|
||||
|
||||
#### Vue Component Testing
|
||||
|
||||
- This is a Nuxt.js app but we are testing with vitest outside of the Nuxt environment
|
||||
@@ -149,4 +156,9 @@ Enables GraphQL playground at `http://tower.local/graphql`
|
||||
## Development Memories
|
||||
|
||||
- We are using tailwind v4 we do not need a tailwind config anymore
|
||||
- always search the internet for tailwind v4 documentation when making tailwind related style changes
|
||||
- always search the internet for tailwind v4 documentation when making tailwind related style changes
|
||||
- never run or restart the API server or web server. I will handle the lifecycle, simply wait and ask me to do this for you
|
||||
- Never use the `any` type. Always prefer proper typing
|
||||
- Avoid using casting whenever possible, prefer proper typing from the start
|
||||
- **IMPORTANT:** cache-manager v7 expects TTL values in **milliseconds**, not seconds. Always use milliseconds when setting cache TTL (e.g., 600000 for 10 minutes, not 600)
|
||||
- Use the nuxt UI library in VUE MODE NOT IN NUXT MODE
|
||||
|
||||
10
api/.depcheckrc
Normal file
10
api/.depcheckrc
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"parsers": {
|
||||
"**/*.ts": [
|
||||
"@depcheck/parser-typescript",
|
||||
{
|
||||
"project": "tsconfig.json"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -15,7 +15,10 @@ PATHS_ACTIVATION_BASE=./dev/activation
|
||||
PATHS_PASSWD=./dev/passwd
|
||||
PATHS_RCLONE_SOCKET=./dev/rclone-socket
|
||||
PATHS_LOG_BASE=./dev/log # Where we store logs
|
||||
PATHS_LOGS_FILE=./dev/log/graphql-api.log
|
||||
PATHS_CONNECT_STATUS_FILE_PATH=./dev/connectStatus.json # Connect plugin status file
|
||||
PATHS_OIDC_JSON=./dev/configs/oidc.local.json
|
||||
PATHS_LOCAL_SESSION_FILE=./dev/local-session
|
||||
ENVIRONMENT="development"
|
||||
NODE_ENV="development"
|
||||
PORT="3001"
|
||||
|
||||
@@ -13,5 +13,7 @@ PATHS_PARITY_CHECKS=./dev/states/parity-checks.log
|
||||
PATHS_CONFIG_MODULES=./dev/configs
|
||||
PATHS_ACTIVATION_BASE=./dev/activation
|
||||
PATHS_PASSWD=./dev/passwd
|
||||
PATHS_LOGS_FILE=./dev/log/graphql-api.log
|
||||
PATHS_LOCAL_SESSION_FILE=./dev/local-session
|
||||
PORT=5000
|
||||
NODE_ENV="test"
|
||||
|
||||
11
api/.gitignore
vendored
11
api/.gitignore
vendored
@@ -82,3 +82,14 @@ deploy/*
|
||||
.idea
|
||||
|
||||
!**/*.login.*
|
||||
|
||||
# local api configs - don't need project-wide tracking
|
||||
dev/connectStatus.json
|
||||
dev/configs/*
|
||||
# local status - doesn't need to be tracked
|
||||
dev/connectStatus.json
|
||||
# mock local session file
|
||||
dev/local-session
|
||||
|
||||
# local OIDC config for testing - contains secrets
|
||||
dev/configs/oidc.local.json
|
||||
|
||||
138
api/CHANGELOG.md
138
api/CHANGELOG.md
@@ -1,5 +1,143 @@
|
||||
# Changelog
|
||||
|
||||
## [4.18.2](https://github.com/unraid/api/compare/v4.18.1...v4.18.2) (2025-09-03)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* add missing CPU guest metrics to CPU responses ([#1644](https://github.com/unraid/api/issues/1644)) ([99dbad5](https://github.com/unraid/api/commit/99dbad57d55a256f5f3f850f9a47a6eaa6348065))
|
||||
* **plugin:** raise minimum unraid os version to 6.12.15 ([#1649](https://github.com/unraid/api/issues/1649)) ([bc15bd3](https://github.com/unraid/api/commit/bc15bd3d7008acb416ac3c6fb1f4724c685ec7e7))
|
||||
* update GitHub Actions token for workflow trigger ([4d8588b](https://github.com/unraid/api/commit/4d8588b17331afa45ba8caf84fcec8c0ea03591f))
|
||||
* update OIDC URL validation and add tests ([#1646](https://github.com/unraid/api/issues/1646)) ([c7c3bb5](https://github.com/unraid/api/commit/c7c3bb57ea482633a7acff064b39fbc8d4e07213))
|
||||
* use shared bg & border color for styled toasts ([#1647](https://github.com/unraid/api/issues/1647)) ([7c3aee8](https://github.com/unraid/api/commit/7c3aee8f3f9ba82ae8c8ed3840c20ab47f3cb00f))
|
||||
|
||||
## [4.18.1](https://github.com/unraid/api/compare/v4.18.0...v4.18.1) (2025-09-03)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* OIDC and API Key management issues ([#1642](https://github.com/unraid/api/issues/1642)) ([0fe2c2c](https://github.com/unraid/api/commit/0fe2c2c1c85dcc547e4b1217a3b5636d7dd6d4b4))
|
||||
* rm redundant emission to `$HOME/.pm2/logs` ([#1640](https://github.com/unraid/api/issues/1640)) ([a8e4119](https://github.com/unraid/api/commit/a8e4119270868a1dabccd405853a7340f8dcd8a5))
|
||||
|
||||
## [4.18.0](https://github.com/unraid/api/compare/v4.17.0...v4.18.0) (2025-09-02)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **api:** enhance OIDC redirect URI handling in service and tests ([#1618](https://github.com/unraid/api/issues/1618)) ([4e945f5](https://github.com/unraid/api/commit/4e945f5f56ce059eb275a9576caf3194a5df8a90))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* api key creation cli ([#1637](https://github.com/unraid/api/issues/1637)) ([c147a6b](https://github.com/unraid/api/commit/c147a6b5075969e77798210c4a5cfd1fa5b96ae3))
|
||||
* **cli:** support `--log-level` for `start` and `restart` cmds ([#1623](https://github.com/unraid/api/issues/1623)) ([a1ee915](https://github.com/unraid/api/commit/a1ee915ca52e5a063eccf8facbada911a63f37f6))
|
||||
* confusing server -> status query ([#1635](https://github.com/unraid/api/issues/1635)) ([9d42b36](https://github.com/unraid/api/commit/9d42b36f74274cad72490da5152fdb98fdc5b89b))
|
||||
* use unraid css variables in sonner ([#1634](https://github.com/unraid/api/issues/1634)) ([26a95af](https://github.com/unraid/api/commit/26a95af9539d05a837112d62dc6b7dd46761c83f))
|
||||
|
||||
## [4.17.0](https://github.com/unraid/api/compare/v4.16.0...v4.17.0) (2025-08-27)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add tailwind class sort plugin ([#1562](https://github.com/unraid/api/issues/1562)) ([ab11e7f](https://github.com/unraid/api/commit/ab11e7ff7ff74da1f1cd5e49938459d00bfc846b))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* cleanup obsoleted legacy api keys on api startup (cli / connect) ([#1630](https://github.com/unraid/api/issues/1630)) ([6469d00](https://github.com/unraid/api/commit/6469d002b7b18e49c77ee650a4255974ab43e790))
|
||||
|
||||
## [4.16.0](https://github.com/unraid/api/compare/v4.15.1...v4.16.0) (2025-08-27)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add `parityCheckStatus` field to `array` query ([#1611](https://github.com/unraid/api/issues/1611)) ([c508366](https://github.com/unraid/api/commit/c508366702b9fa20d9ed05559fe73da282116aa6))
|
||||
* generated UI API key management + OAuth-like API Key Flows ([#1609](https://github.com/unraid/api/issues/1609)) ([674323f](https://github.com/unraid/api/commit/674323fd87bbcc55932e6b28f6433a2de79b7ab0))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **connect:** clear `wanport` upon disabling remote access ([#1624](https://github.com/unraid/api/issues/1624)) ([9df6a3f](https://github.com/unraid/api/commit/9df6a3f5ebb0319aa7e3fe3be6159d39ec6f587f))
|
||||
* **connect:** valid LAN FQDN while remote access is enabled ([#1625](https://github.com/unraid/api/issues/1625)) ([aa58888](https://github.com/unraid/api/commit/aa588883cc2e2fe4aa4aea1d035236c888638f5b))
|
||||
* correctly parse periods in share names from ini file ([#1629](https://github.com/unraid/api/issues/1629)) ([7d67a40](https://github.com/unraid/api/commit/7d67a404333a38d6e1ba5c3febf02be8b1b71901))
|
||||
* **rc.unraid-api:** remove profile sourcing ([#1622](https://github.com/unraid/api/issues/1622)) ([6947b5d](https://github.com/unraid/api/commit/6947b5d4aff70319116eb65cf4c639444f3749e9))
|
||||
* remove unused api key calls ([#1628](https://github.com/unraid/api/issues/1628)) ([9cd0d6a](https://github.com/unraid/api/commit/9cd0d6ac658475efa25683ef6e3f2e1d68f7e903))
|
||||
* retry VMs init for up to 2 min ([#1612](https://github.com/unraid/api/issues/1612)) ([b2e7801](https://github.com/unraid/api/commit/b2e78012384e6b3f2630341281fc811026be23b9))
|
||||
|
||||
## [4.15.1](https://github.com/unraid/api/compare/v4.15.0...v4.15.1) (2025-08-20)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* minor duplicate click handler and version resolver nullability issue ([ac198d5](https://github.com/unraid/api/commit/ac198d5d1a3073fdeb053c2ff8f704b0dba0d047))
|
||||
|
||||
## [4.15.0](https://github.com/unraid/api/compare/v4.14.0...v4.15.0) (2025-08-20)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **api:** restructure versioning information in GraphQL schema ([#1600](https://github.com/unraid/api/issues/1600)) ([d0c6602](https://github.com/unraid/api/commit/d0c66020e1d1d5b6fcbc4ee8979bba4b3d34c7ad))
|
||||
|
||||
## [4.14.0](https://github.com/unraid/api/compare/v4.13.1...v4.14.0) (2025-08-19)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **api:** add cpu utilization query and subscription ([#1590](https://github.com/unraid/api/issues/1590)) ([2b4c2a2](https://github.com/unraid/api/commit/2b4c2a264bb2769f88c3000d16447889cae57e98))
|
||||
* enhance OIDC claim evaluation with array handling ([#1596](https://github.com/unraid/api/issues/1596)) ([b7798b8](https://github.com/unraid/api/commit/b7798b82f44aae9a428261270fd9dbde35ff7751))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* remove unraid-api sso users & always apply sso modification on < 7.2 ([#1595](https://github.com/unraid/api/issues/1595)) ([4262830](https://github.com/unraid/api/commit/426283011afd41e3af7e48cfbb2a2d351c014bd1))
|
||||
* update Docusaurus PR workflow to process and copy API docs ([3a10871](https://github.com/unraid/api/commit/3a10871918fe392a1974b69d16a135546166e058))
|
||||
* update OIDC provider setup documentation for navigation clarity ([1a01696](https://github.com/unraid/api/commit/1a01696dc7b947abf5f2f097de1b231d5593c2ff))
|
||||
* update OIDC provider setup documentation for redirect URI and screenshots ([1bc5251](https://github.com/unraid/api/commit/1bc52513109436b3ce8237c3796af765e208f9fc))
|
||||
|
||||
## [4.13.1](https://github.com/unraid/api/compare/v4.13.0...v4.13.1) (2025-08-15)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* insecure routes not working for SSO ([#1587](https://github.com/unraid/api/issues/1587)) ([a4ff3c4](https://github.com/unraid/api/commit/a4ff3c40926915f6989ed4af679b30cf295ea15d))
|
||||
|
||||
## [4.13.0](https://github.com/unraid/api/compare/v4.12.0...v4.13.0) (2025-08-15)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* `createDockerFolder` & `setDockerFolderChildren` mutations ([#1558](https://github.com/unraid/api/issues/1558)) ([557b03f](https://github.com/unraid/api/commit/557b03f8829d3f179b5e26162fa250121cb33420))
|
||||
* `deleteDockerEntries` mutation ([#1564](https://github.com/unraid/api/issues/1564)) ([78997a0](https://github.com/unraid/api/commit/78997a02c6d96ec0ed75352dfc9849524147428c))
|
||||
* add `moveDockerEntriesToFolder` mutation ([#1569](https://github.com/unraid/api/issues/1569)) ([20c2d5b](https://github.com/unraid/api/commit/20c2d5b4457ad50d1e287fb3141aa98e8e7de665))
|
||||
* add docker -> organizer query ([#1555](https://github.com/unraid/api/issues/1555)) ([dfe352d](https://github.com/unraid/api/commit/dfe352dfa1bd6aa059cab56357ba6bff5e8ed7cb))
|
||||
* connect settings page updated for responsive webgui ([#1585](https://github.com/unraid/api/issues/1585)) ([96c120f](https://github.com/unraid/api/commit/96c120f9b24d3c91df5e9401917c8994eef36c46))
|
||||
* implement OIDC provider management in GraphQL API ([#1563](https://github.com/unraid/api/issues/1563)) ([979a267](https://github.com/unraid/api/commit/979a267bc5e128a8b789f0123e23c61860ebb11b))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* change config file loading error log to debug ([#1565](https://github.com/unraid/api/issues/1565)) ([3534d6f](https://github.com/unraid/api/commit/3534d6fdd7c59e65615167cfe306deebad9ca4d3))
|
||||
* **connect:** remove unraid-api folder before creating symlink ([#1556](https://github.com/unraid/api/issues/1556)) ([514a0ef](https://github.com/unraid/api/commit/514a0ef560a90595f774b6c0db60f1d2b4cd853c))
|
||||
* **deps:** pin dependencies ([#1586](https://github.com/unraid/api/issues/1586)) ([5721785](https://github.com/unraid/api/commit/57217852a337ead4c8c8e7596d1b7d590b64a26f))
|
||||
* **deps:** update all non-major dependencies ([#1543](https://github.com/unraid/api/issues/1543)) ([18b5209](https://github.com/unraid/api/commit/18b52090874c0ba86878d0f7e31bf0dc42734d75))
|
||||
* **deps:** update all non-major dependencies ([#1579](https://github.com/unraid/api/issues/1579)) ([ad6aa3b](https://github.com/unraid/api/commit/ad6aa3b6743aeeb42eff34d1c89ad874dfd0af09))
|
||||
* refactor API client to support Unix socket connections ([#1575](https://github.com/unraid/api/issues/1575)) ([a2c5d24](https://github.com/unraid/api/commit/a2c5d2495ffc02efa1ec5c63f0a1c5d23c9ed7ff))
|
||||
* **theme:** API key white text on white background ([#1584](https://github.com/unraid/api/issues/1584)) ([b321687](https://github.com/unraid/api/commit/b3216874faae208cdfc3edec719629fce428b6a3))
|
||||
|
||||
## [4.12.0](https://github.com/unraid/api/compare/v4.11.0...v4.12.0) (2025-07-30)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add ups monitoring to graphql api ([#1526](https://github.com/unraid/api/issues/1526)) ([6ea94f0](https://github.com/unraid/api/commit/6ea94f061d5b2e6c6fbfa6949006960501e3f4e7))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* enhance plugin management with interactive removal prompts ([#1549](https://github.com/unraid/api/issues/1549)) ([23ef760](https://github.com/unraid/api/commit/23ef760d763c525a38108048200fa73fc8531aed))
|
||||
* remove connect api plugin upon removal of Connect Unraid plugin ([#1548](https://github.com/unraid/api/issues/1548)) ([782d5eb](https://github.com/unraid/api/commit/782d5ebadc67854298f3b2355255983024d2a225))
|
||||
* SSO not being detected ([#1546](https://github.com/unraid/api/issues/1546)) ([6b3b951](https://github.com/unraid/api/commit/6b3b951d8288cd31d096252be544537dc2bfce50))
|
||||
|
||||
## [4.11.0](https://github.com/unraid/api/compare/v4.10.0...v4.11.0) (2025-07-28)
|
||||
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
###########################################################
|
||||
# Development/Build Image
|
||||
###########################################################
|
||||
FROM node:22.17.1-bookworm-slim AS development
|
||||
FROM node:22.18.0-bookworm-slim AS development
|
||||
|
||||
# Install build tools and dependencies
|
||||
RUN apt-get update -y && apt-get install -y \
|
||||
|
||||
34
api/dev/configs/README.md
Normal file
34
api/dev/configs/README.md
Normal file
@@ -0,0 +1,34 @@
|
||||
# Development Configuration Files
|
||||
|
||||
This directory contains configuration files for local development.
|
||||
|
||||
## OIDC Configuration
|
||||
|
||||
### oidc.json
|
||||
The default OIDC configuration file. This file is committed to git and should only contain non-sensitive test configurations.
|
||||
|
||||
### Using a Local Configuration (gitignored)
|
||||
For local testing with real OAuth providers:
|
||||
|
||||
1. Create an `oidc.local.json` file based on `oidc.json`
|
||||
2. Set the environment variable: `PATHS_OIDC_JSON=./dev/configs/oidc.local.json`
|
||||
3. The API will load your local configuration instead of the default
|
||||
|
||||
Example:
|
||||
```bash
|
||||
PATHS_OIDC_JSON=./dev/configs/oidc.local.json pnpm dev
|
||||
```
|
||||
|
||||
### Setting up OAuth Apps
|
||||
|
||||
#### Google
|
||||
1. Go to [Google Cloud Console](https://console.cloud.google.com/)
|
||||
2. Create a new project or select existing
|
||||
3. Enable Google+ API
|
||||
4. Create OAuth 2.0 credentials
|
||||
5. Add authorized redirect URI: `http://localhost:3000/graphql/api/auth/oidc/callback`
|
||||
|
||||
#### GitHub
|
||||
1. Go to GitHub Settings > Developer settings > OAuth Apps
|
||||
2. Create a new OAuth App
|
||||
3. Set Authorization callback URL: `http://localhost:3000/graphql/api/auth/oidc/callback`
|
||||
@@ -1,9 +1,6 @@
|
||||
{
|
||||
"version": "4.10.0",
|
||||
"extraOrigins": [
|
||||
"https://google.com",
|
||||
"https://test.com"
|
||||
],
|
||||
"version": "4.18.1",
|
||||
"extraOrigins": [],
|
||||
"sandbox": true,
|
||||
"ssoSubIds": [],
|
||||
"plugins": [
|
||||
|
||||
@@ -2,11 +2,11 @@
|
||||
"wanaccess": true,
|
||||
"wanport": 8443,
|
||||
"upnpEnabled": false,
|
||||
"apikey": "_______________________BIG_API_KEY_HERE_________________________",
|
||||
"apikey": "",
|
||||
"localApiKey": "_______________________LOCAL_API_KEY_HERE_________________________",
|
||||
"email": "test@example.com",
|
||||
"username": "zspearmint",
|
||||
"avatar": "https://via.placeholder.com/200",
|
||||
"regWizTime": "1611175408732_0951-1653-3509-FBA155FA23C0",
|
||||
"dynamicRemoteAccessType": "DISABLED"
|
||||
"dynamicRemoteAccessType": "STATIC"
|
||||
}
|
||||
22
api/dev/configs/oidc.json
Normal file
22
api/dev/configs/oidc.json
Normal file
@@ -0,0 +1,22 @@
|
||||
{
|
||||
"providers": [
|
||||
{
|
||||
"id": "unraid.net",
|
||||
"name": "Unraid.net",
|
||||
"clientId": "CONNECT_SERVER_SSO",
|
||||
"issuer": "https://account.unraid.net",
|
||||
"authorizationEndpoint": "https://account.unraid.net/sso/",
|
||||
"tokenEndpoint": "https://account.unraid.net/api/oauth2/token",
|
||||
"scopes": [
|
||||
"openid",
|
||||
"profile",
|
||||
"email"
|
||||
],
|
||||
"authorizedSubIds": [
|
||||
"297294e2-b31c-4bcc-a441-88aee0ad609f"
|
||||
],
|
||||
"buttonText": "Login With Unraid.net"
|
||||
}
|
||||
],
|
||||
"defaultAllowedOrigins": []
|
||||
}
|
||||
@@ -1,11 +0,0 @@
|
||||
{
|
||||
"createdAt": "2025-01-27T16:22:56.501Z",
|
||||
"description": "API key for Connect user",
|
||||
"id": "b5b4aa3d-8e40-4c92-bc40-d50182071886",
|
||||
"key": "_______________________LOCAL_API_KEY_HERE_________________________",
|
||||
"name": "Connect",
|
||||
"permissions": [],
|
||||
"roles": [
|
||||
"CONNECT"
|
||||
]
|
||||
}
|
||||
@@ -1,11 +0,0 @@
|
||||
{
|
||||
"createdAt": "2025-07-23T17:34:06.301Z",
|
||||
"description": "Internal admin API key used by CLI commands for system operations",
|
||||
"id": "fc91da7b-0284-46f4-9018-55aa9759fba9",
|
||||
"key": "_______SUPER_SECRET_KEY_______",
|
||||
"name": "CliInternal",
|
||||
"permissions": [],
|
||||
"roles": [
|
||||
"ADMIN"
|
||||
]
|
||||
}
|
||||
1
api/dev/log/.gitkeep
Normal file
1
api/dev/log/.gitkeep
Normal file
@@ -0,0 +1 @@
|
||||
# custom log directory for tests & development
|
||||
@@ -65,4 +65,38 @@ color="yellow-on"
|
||||
size="0"
|
||||
free="9091184"
|
||||
used="32831348"
|
||||
luksStatus="0"
|
||||
["system.with.periods"]
|
||||
name="system.with.periods"
|
||||
nameOrig="system.with.periods"
|
||||
comment="system data with periods"
|
||||
allocator="highwater"
|
||||
splitLevel="1"
|
||||
floor="0"
|
||||
include=""
|
||||
exclude=""
|
||||
useCache="prefer"
|
||||
cachePool="cache"
|
||||
cow="auto"
|
||||
color="yellow-on"
|
||||
size="0"
|
||||
free="9091184"
|
||||
used="32831348"
|
||||
luksStatus="0"
|
||||
["system.with.🚀"]
|
||||
name="system.with.🚀"
|
||||
nameOrig="system.with.🚀"
|
||||
comment="system data with 🚀"
|
||||
allocator="highwater"
|
||||
splitLevel="1"
|
||||
floor="0"
|
||||
include=""
|
||||
exclude=""
|
||||
useCache="prefer"
|
||||
cachePool="cache"
|
||||
cow="auto"
|
||||
color="yellow-on"
|
||||
size="0"
|
||||
free="9091184"
|
||||
used="32831348"
|
||||
luksStatus="0"
|
||||
100
api/docs/public/api-key-app-developer-authorization-flow.md
Normal file
100
api/docs/public/api-key-app-developer-authorization-flow.md
Normal file
@@ -0,0 +1,100 @@
|
||||
# API Key Authorization Flow
|
||||
|
||||
This document describes the self-service API key creation flow for third-party applications.
|
||||
|
||||
## Overview
|
||||
|
||||
Applications can request API access to an Unraid server by redirecting users to a special authorization page where users can review requested permissions and create an API key with one click.
|
||||
|
||||
## Flow
|
||||
|
||||
1. **Application initiates request**: The app redirects the user to:
|
||||
|
||||
```
|
||||
https://[unraid-server]/ApiKeyAuthorize?name=MyApp&scopes=docker:read,vm:*&redirect_uri=https://myapp.com/callback&state=abc123
|
||||
```
|
||||
|
||||
2. **User authentication**: If not already logged in, the user is redirected to login first (standard Unraid auth)
|
||||
|
||||
3. **Consent screen**: User sees:
|
||||
- Application name and description
|
||||
- Requested permissions (with checkboxes to approve/deny specific scopes)
|
||||
- API key name field (pre-filled)
|
||||
- Authorize & Cancel buttons
|
||||
|
||||
4. **API key creation**: Upon authorization:
|
||||
- API key is created with approved scopes
|
||||
- Key is displayed to the user
|
||||
- If `redirect_uri` is provided, user is redirected back with the key
|
||||
|
||||
5. **Callback**: App receives the API key:
|
||||
```
|
||||
https://myapp.com/callback?api_key=xxx&state=abc123
|
||||
```
|
||||
|
||||
## Query Parameters
|
||||
|
||||
- `name` (required): Name of the requesting application
|
||||
- `description` (optional): Description of the application
|
||||
- `scopes` (required): Comma-separated list of requested scopes
|
||||
- `redirect_uri` (optional): URL to redirect after authorization
|
||||
- `state` (optional): Opaque value for maintaining state
|
||||
|
||||
## Scope Format
|
||||
|
||||
Scopes follow the pattern: `resource:action`
|
||||
|
||||
### Examples:
|
||||
|
||||
- `docker:read` - Read access to Docker
|
||||
- `vm:*` - Full access to VMs
|
||||
- `system:update` - Update access to system
|
||||
- `role:viewer` - Viewer role access
|
||||
- `role:admin` - Admin role access
|
||||
|
||||
### Available Resources:
|
||||
|
||||
- `docker`, `vm`, `system`, `share`, `user`, `network`, `disk`, etc.
|
||||
|
||||
### Available Actions:
|
||||
|
||||
- `create`, `read`, `update`, `delete` or `*` for all
|
||||
|
||||
## Security Considerations
|
||||
|
||||
1. **HTTPS required**: Redirect URIs must use HTTPS (except localhost for development)
|
||||
2. **User consent**: Users explicitly approve each permission
|
||||
3. **Session-based**: Uses existing Unraid authentication session
|
||||
4. **One-time display**: API keys are shown once and must be saved securely
|
||||
|
||||
## Example Integration
|
||||
|
||||
```javascript
|
||||
// JavaScript example
|
||||
const unraidServer = 'tower.local';
|
||||
const appName = 'My Docker Manager';
|
||||
const scopes = 'docker:*,system:read';
|
||||
const redirectUri = 'https://myapp.com/unraid/callback';
|
||||
const state = generateRandomState();
|
||||
|
||||
// Store state for verification
|
||||
sessionStorage.setItem('oauth_state', state);
|
||||
|
||||
// Redirect user to authorization page
|
||||
window.location.href =
|
||||
`https://${unraidServer}/ApiKeyAuthorize?` +
|
||||
`name=${encodeURIComponent(appName)}&` +
|
||||
`scopes=${encodeURIComponent(scopes)}&` +
|
||||
`redirect_uri=${encodeURIComponent(redirectUri)}&` +
|
||||
`state=${encodeURIComponent(state)}`;
|
||||
|
||||
// Handle callback
|
||||
const urlParams = new URLSearchParams(window.location.search);
|
||||
const apiKey = urlParams.get('api_key');
|
||||
const returnedState = urlParams.get('state');
|
||||
|
||||
if (returnedState === sessionStorage.getItem('oauth_state')) {
|
||||
// Save API key securely
|
||||
saveApiKey(apiKey);
|
||||
}
|
||||
```
|
||||
@@ -1,5 +1,17 @@
|
||||
---
|
||||
title: CLI Reference
|
||||
description: Complete reference for all Unraid API CLI commands
|
||||
sidebar_position: 4
|
||||
---
|
||||
|
||||
# CLI Commands
|
||||
|
||||
:::info[Command Structure]
|
||||
All commands follow the pattern: `unraid-api <command> [options]`
|
||||
:::
|
||||
|
||||
## 🚀 Service Management
|
||||
|
||||
### Start
|
||||
|
||||
```bash
|
||||
@@ -9,7 +21,14 @@ unraid-api start [--log-level <level>]
|
||||
Starts the Unraid API service.
|
||||
|
||||
Options:
|
||||
- `--log-level`: Set logging level (trace|debug|info|warn|error)
|
||||
|
||||
- `--log-level`: Set logging level (trace|debug|info|warn|error|fatal)
|
||||
|
||||
Alternative: You can also set the log level using the `LOG_LEVEL` environment variable:
|
||||
|
||||
```bash
|
||||
LOG_LEVEL=trace unraid-api start
|
||||
```
|
||||
|
||||
### Stop
|
||||
|
||||
@@ -24,11 +43,21 @@ Stops the Unraid API service.
|
||||
### Restart
|
||||
|
||||
```bash
|
||||
unraid-api restart
|
||||
unraid-api restart [--log-level <level>]
|
||||
```
|
||||
|
||||
Restarts the Unraid API service.
|
||||
|
||||
Options:
|
||||
|
||||
- `--log-level`: Set logging level (trace|debug|info|warn|error|fatal)
|
||||
|
||||
Alternative: You can also set the log level using the `LOG_LEVEL` environment variable:
|
||||
|
||||
```bash
|
||||
LOG_LEVEL=trace unraid-api restart
|
||||
```
|
||||
|
||||
### Logs
|
||||
|
||||
```bash
|
||||
@@ -39,7 +68,7 @@ View the API logs.
|
||||
|
||||
- `-l, --lines`: Optional. Number of lines to tail (default: 100)
|
||||
|
||||
## Configuration Commands
|
||||
## ⚙️ Configuration Commands
|
||||
|
||||
### Config
|
||||
|
||||
@@ -61,6 +90,10 @@ Switch between production and staging environments.
|
||||
|
||||
### Developer Mode
|
||||
|
||||
:::tip Web GUI Management
|
||||
You can also manage developer options through the web interface at **Settings** → **Management Access** → **Developer Options**
|
||||
:::
|
||||
|
||||
```bash
|
||||
unraid-api developer # Interactive prompt for tools
|
||||
unraid-api developer --sandbox true # Enable GraphQL sandbox
|
||||
@@ -76,13 +109,17 @@ Configure developer features for the API:
|
||||
|
||||
## API Key Management
|
||||
|
||||
:::tip Web GUI Management
|
||||
You can also manage API keys through the web interface at **Settings** → **Management Access** → **API Keys**
|
||||
:::
|
||||
|
||||
### API Key Commands
|
||||
|
||||
```bash
|
||||
unraid-api apikey [options]
|
||||
```
|
||||
|
||||
Create and manage API keys.
|
||||
Create and manage API keys via CLI.
|
||||
|
||||
Options:
|
||||
|
||||
@@ -94,6 +131,10 @@ Options:
|
||||
|
||||
## SSO (Single Sign-On) Management
|
||||
|
||||
:::info OIDC Configuration
|
||||
For OIDC/SSO provider configuration, see the web interface at **Settings** → **Management Access** → **API** → **OIDC** or refer to the [OIDC Provider Setup](./oidc-provider-setup.md) guide.
|
||||
:::
|
||||
|
||||
### SSO Base Command
|
||||
|
||||
```bash
|
||||
|
||||
@@ -1,39 +1,75 @@
|
||||
---
|
||||
title: Using the Unraid API
|
||||
description: Learn how to interact with your Unraid server through the GraphQL API
|
||||
sidebar_position: 2
|
||||
---
|
||||
|
||||
# Using the Unraid API
|
||||
|
||||
:::tip[Quick Start]
|
||||
The Unraid API provides a powerful GraphQL interface for managing your server. This guide covers authentication, common queries, and best practices.
|
||||
:::
|
||||
|
||||
The Unraid API provides a GraphQL interface that allows you to interact with your Unraid server. This guide will help you get started with exploring and using the API.
|
||||
|
||||
## Enabling the GraphQL Sandbox
|
||||
## 🎮 Enabling the GraphQL Sandbox
|
||||
|
||||
1. Enable developer mode using the CLI:
|
||||
### Web GUI Method (Recommended)
|
||||
|
||||
```bash
|
||||
unraid-api developer --sandbox true
|
||||
```
|
||||
|
||||
Or use the interactive mode:
|
||||
|
||||
```bash
|
||||
unraid-api developer
|
||||
```
|
||||
|
||||
2. Once enabled, you can access the Apollo Sandbox interface
|
||||
:::info[Preferred Method]
|
||||
Using the Web GUI is the easiest way to enable the GraphQL sandbox.
|
||||
:::
|
||||
|
||||
1. Navigate to **Settings** → **Management Access** → **Developer Options**
|
||||
2. Enable the **GraphQL Sandbox** toggle
|
||||
3. Access the GraphQL playground by navigating to:
|
||||
|
||||
```txt
|
||||
http://YOUR_SERVER_IP/graphql
|
||||
```
|
||||
|
||||
## Authentication
|
||||
### CLI Method
|
||||
|
||||
Most queries and mutations require authentication. You can authenticate using either:
|
||||
Alternatively, you can enable developer mode using the CLI:
|
||||
|
||||
1. API Keys
|
||||
2. Cookies (default method when signed into the WebGUI)
|
||||
```bash
|
||||
unraid-api developer --sandbox true
|
||||
```
|
||||
|
||||
### Creating an API Key
|
||||
Or use the interactive mode:
|
||||
|
||||
Use the CLI to create an API key:
|
||||
```bash
|
||||
unraid-api developer
|
||||
```
|
||||
|
||||
## 🔑 Authentication
|
||||
|
||||
:::warning[Required for Most Operations]
|
||||
Most queries and mutations require authentication. Always include appropriate credentials in your requests.
|
||||
:::
|
||||
|
||||
You can authenticate using:
|
||||
|
||||
1. **API Keys** - For programmatic access
|
||||
2. **Cookies** - Automatic when signed into the WebGUI
|
||||
3. **SSO/OIDC** - When configured with external providers
|
||||
|
||||
### Managing API Keys
|
||||
|
||||
<tabs>
|
||||
<tabItem value="gui" label="Web GUI (Recommended)" default>
|
||||
|
||||
Navigate to **Settings** → **Management Access** → **API Keys** in your Unraid web interface to:
|
||||
|
||||
- View existing API keys
|
||||
- Create new API keys
|
||||
- Manage permissions and roles
|
||||
- Revoke or regenerate keys
|
||||
|
||||
</tabItem>
|
||||
<tabItem value="cli" label="CLI Method">
|
||||
|
||||
You can also use the CLI to create an API key:
|
||||
|
||||
```bash
|
||||
unraid-api apikey --create
|
||||
@@ -46,6 +82,11 @@ Follow the prompts to set:
|
||||
- Roles
|
||||
- Permissions
|
||||
|
||||
</tabItem>
|
||||
</tabs>
|
||||
|
||||
### Using API Keys
|
||||
|
||||
The generated API key should be included in your GraphQL requests as a header:
|
||||
|
||||
```json
|
||||
@@ -54,7 +95,7 @@ The generated API key should be included in your GraphQL requests as a header:
|
||||
}
|
||||
```
|
||||
|
||||
## Available Schemas
|
||||
## 📊 Available Schemas
|
||||
|
||||
The API provides access to various aspects of your Unraid server:
|
||||
|
||||
@@ -83,9 +124,9 @@ The API provides access to various aspects of your Unraid server:
|
||||
- Handle SSO configuration
|
||||
- Manage allowed origins
|
||||
|
||||
### Example Queries
|
||||
### 💻 Example Queries
|
||||
|
||||
1. Check System Status:
|
||||
#### Check System Status
|
||||
|
||||
```graphql
|
||||
query {
|
||||
@@ -106,7 +147,7 @@ query {
|
||||
}
|
||||
```
|
||||
|
||||
2. Monitor Array Status:
|
||||
#### Monitor Array Status
|
||||
|
||||
```graphql
|
||||
query {
|
||||
@@ -129,7 +170,7 @@ query {
|
||||
}
|
||||
```
|
||||
|
||||
3. List Docker Containers:
|
||||
#### List Docker Containers
|
||||
|
||||
```graphql
|
||||
query {
|
||||
@@ -143,7 +184,7 @@ query {
|
||||
}
|
||||
```
|
||||
|
||||
## Schema Types
|
||||
## 🏗️ Schema Types
|
||||
|
||||
The API includes several core types:
|
||||
|
||||
@@ -170,19 +211,23 @@ Available roles:
|
||||
- `connect`: Remote access features
|
||||
- `guest`: Limited read access
|
||||
|
||||
## Best Practices
|
||||
## ✨ Best Practices
|
||||
|
||||
:::tip[Pro Tips]
|
||||
1. Use the Apollo Sandbox to explore the schema and test queries
|
||||
2. Start with small queries and gradually add fields as needed
|
||||
3. Monitor your query complexity to maintain performance
|
||||
4. Use appropriate roles and permissions for your API keys
|
||||
5. Keep your API keys secure and rotate them periodically
|
||||
:::
|
||||
|
||||
## Rate Limiting
|
||||
## ⏱️ Rate Limiting
|
||||
|
||||
:::caution[Rate Limits]
|
||||
The API implements rate limiting to prevent abuse. Ensure your applications handle rate limit responses appropriately.
|
||||
:::
|
||||
|
||||
## Error Handling
|
||||
## 🚨 Error Handling
|
||||
|
||||
The API returns standard GraphQL errors in the following format:
|
||||
|
||||
@@ -198,11 +243,13 @@ The API returns standard GraphQL errors in the following format:
|
||||
}
|
||||
```
|
||||
|
||||
## Additional Resources
|
||||
## 📚 Additional Resources
|
||||
|
||||
:::info[Learn More]
|
||||
- Use the Apollo Sandbox's schema explorer to browse all available types and fields
|
||||
- Check the documentation tab in Apollo Sandbox for detailed field descriptions
|
||||
- Monitor the API's health using `unraid-api status`
|
||||
- Generate reports using `unraid-api report` for troubleshooting
|
||||
|
||||
For more information about specific commands and configuration options, refer to the CLI documentation or run `unraid-api --help`.
|
||||
For more information about specific commands and configuration options, refer to the [CLI documentation](/cli) or run `unraid-api --help`.
|
||||
:::
|
||||
|
||||
BIN
api/docs/public/images/advanced-rules.png
Normal file
BIN
api/docs/public/images/advanced-rules.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 101 KiB |
BIN
api/docs/public/images/button-customization.png
Normal file
BIN
api/docs/public/images/button-customization.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 96 KiB |
BIN
api/docs/public/images/configured-provider.png
Normal file
BIN
api/docs/public/images/configured-provider.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 85 KiB |
BIN
api/docs/public/images/default-unraid-provider.png
Normal file
BIN
api/docs/public/images/default-unraid-provider.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 128 KiB |
BIN
api/docs/public/images/sso-with-options.png
Normal file
BIN
api/docs/public/images/sso-with-options.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 75 KiB |
@@ -1,37 +1,94 @@
|
||||
# Unraid API
|
||||
---
|
||||
title: Welcome to Unraid API
|
||||
description: The official GraphQL API for Unraid Server management and automation
|
||||
sidebar_position: 1
|
||||
---
|
||||
|
||||
# Welcome to Unraid API
|
||||
|
||||
:::tip[What's New]
|
||||
Starting with Unraid OS v7.2, the API comes built into the operating system - no plugin installation required!
|
||||
:::
|
||||
|
||||
The Unraid API provides a GraphQL interface for programmatic interaction with your Unraid server. It enables automation, monitoring, and integration capabilities.
|
||||
|
||||
## Current Availability
|
||||
## 📦 Availability
|
||||
|
||||
The API is available through the Unraid Connect Plugin:
|
||||
### ✨ Native Integration (Unraid OS v7.2+)
|
||||
|
||||
1. Install Unraid Connect Plugin from Apps
|
||||
Starting with Unraid OS v7.2, the API is integrated directly into the operating system:
|
||||
|
||||
- No plugin installation required
|
||||
- Automatically available on system startup
|
||||
- Deep system integration
|
||||
- Access through **Settings** → **Management Access** → **API**
|
||||
|
||||
### 🔌 Plugin Installation (Pre-7.2 and Advanced Users)
|
||||
|
||||
For Unraid versions prior to v7.2 or to access newer API features:
|
||||
|
||||
1. Install the Unraid Connect Plugin from Community Apps
|
||||
2. [Configure the plugin](./how-to-use-the-api.md#enabling-the-graphql-sandbox)
|
||||
3. Access API functionality through the [GraphQL Sandbox](./how-to-use-the-api.md#accessing-the-graphql-sandbox)
|
||||
3. Access API functionality through the [GraphQL Sandbox](./how-to-use-the-api.md)
|
||||
|
||||
## Future Availability
|
||||
:::info Important Notes
|
||||
- The Unraid Connect plugin provides the API for pre-7.2 versions
|
||||
- You do NOT need to sign in to Unraid Connect to use the API locally
|
||||
- Installing the plugin on 7.2+ gives you access to newer API features before they're included in OS releases
|
||||
:::
|
||||
|
||||
The API will be integrated directly into the Unraid operating system in an upcoming OS release. This integration will:
|
||||
## 📚 Documentation Sections
|
||||
|
||||
- Make the API a core part of the Unraid system
|
||||
- Remove the need for separate plugin installation
|
||||
- Enable deeper system integration capabilities
|
||||
<cards>
|
||||
<card title="CLI Commands" icon="terminal" href="./cli">
|
||||
Complete reference for all CLI commands
|
||||
</card>
|
||||
<card title="Using the API" icon="code" href="./how-to-use-the-api">
|
||||
Learn how to interact with the GraphQL API
|
||||
</card>
|
||||
<card title="OIDC Setup" icon="shield" href="./oidc-provider-setup">
|
||||
Configure SSO authentication providers
|
||||
</card>
|
||||
<card title="Upcoming Features" icon="rocket" href="./upcoming-features">
|
||||
See what's coming next
|
||||
</card>
|
||||
</cards>
|
||||
|
||||
## Documentation Sections
|
||||
|
||||
- [CLI Commands](./cli.md) - Reference for all available command-line interface commands
|
||||
- [Using the Unraid API](./how-to-use-the-api.md) - Comprehensive guide on using the GraphQL API
|
||||
- [Upcoming Features](./upcoming-features.md) - Roadmap of planned features and improvements
|
||||
|
||||
## Key Features
|
||||
## 🌟 Key Features
|
||||
|
||||
:::info[Core Capabilities]
|
||||
The API provides:
|
||||
|
||||
- GraphQL Interface: Modern, flexible API with strong typing
|
||||
- Authentication: Secure access via API keys or session cookies
|
||||
- Comprehensive Coverage: Access to system information, array management, and Docker operations
|
||||
- Developer Tools: Built-in GraphQL sandbox for testing
|
||||
- Role-Based Access: Granular permission control
|
||||
- **GraphQL Interface**: Modern, flexible API with strong typing
|
||||
- **Authentication**: Multiple methods including API keys, session cookies, and SSO/OIDC
|
||||
- **Comprehensive Coverage**: Access to system information, array management, and Docker operations
|
||||
- **Developer Tools**: Built-in GraphQL sandbox configurable via web interface or CLI
|
||||
- **Role-Based Access**: Granular permission control
|
||||
- **Web Management**: Manage API keys and settings through the web interface
|
||||
:::
|
||||
|
||||
For detailed usage instructions, see [CLI Commands](./cli.md).
|
||||
## 🚀 Get Started
|
||||
|
||||
<tabs>
|
||||
<tabItem value="v72" label="Unraid OS v7.2+" default>
|
||||
|
||||
1. The API is already installed and running
|
||||
2. Access settings at **Settings** → **Management Access** → **API**
|
||||
3. Enable the GraphQL Sandbox for development
|
||||
4. Create your first API key
|
||||
5. Start making GraphQL queries!
|
||||
|
||||
</tabItem>
|
||||
<tabItem value="older" label="Pre-7.2 Versions">
|
||||
|
||||
1. Install the Unraid Connect plugin from Community Apps
|
||||
2. No Unraid Connect login required for local API access
|
||||
3. Configure the plugin settings
|
||||
4. Enable the GraphQL Sandbox
|
||||
5. Start exploring the API!
|
||||
|
||||
</tabItem>
|
||||
</tabs>
|
||||
|
||||
For detailed usage instructions, see the [CLI Commands](./cli) reference.
|
||||
|
||||
420
api/docs/public/oidc-provider-setup.md
Normal file
420
api/docs/public/oidc-provider-setup.md
Normal file
@@ -0,0 +1,420 @@
|
||||
---
|
||||
title: OIDC Provider Setup
|
||||
description: Configure OIDC (OpenID Connect) providers for SSO authentication in Unraid API
|
||||
sidebar_position: 3
|
||||
---
|
||||
|
||||
# OIDC Provider Setup
|
||||
|
||||
:::info[What is OIDC?]
|
||||
OpenID Connect (OIDC) is an authentication protocol that allows users to sign in using their existing accounts from providers like Google, Microsoft, or your corporate identity provider. It enables Single Sign-On (SSO) for seamless and secure authentication.
|
||||
:::
|
||||
|
||||
This guide walks you through configuring OIDC (OpenID Connect) providers for SSO authentication in the Unraid API using the web interface.
|
||||
|
||||
## 🚀 Quick Start
|
||||
|
||||
<details open>
|
||||
<summary><strong>Getting to OIDC Settings</strong></summary>
|
||||
|
||||
1. Navigate to your Unraid server's web interface
|
||||
2. Go to **Settings** → **Management Access** → **API** → **OIDC**
|
||||
3. You'll see tabs for different providers - click the **+** button to add a new provider
|
||||
|
||||
</details>
|
||||
|
||||
### OIDC Providers Interface Overview
|
||||
|
||||

|
||||
*Login page showing traditional login form with SSO options - "Login With Unraid.net" and "Sign in with Google" buttons*
|
||||
|
||||
The interface includes:
|
||||
|
||||
- **Provider tabs**: Each configured provider (Unraid.net, Google, etc.) appears as a tab
|
||||
- **Add Provider button**: Click the **+** button to add new providers
|
||||
- **Authorization Mode dropdown**: Toggle between "simple" and "advanced" modes
|
||||
- **Simple Authorization section**: Configure allowed email domains and specific addresses
|
||||
- **Add Item buttons**: Click to add multiple authorization rules
|
||||
|
||||
## Understanding Authorization Modes
|
||||
|
||||
The interface provides two authorization modes:
|
||||
|
||||
### Simple Mode (Recommended)
|
||||
|
||||
Simple mode is the easiest way to configure authorization. You can:
|
||||
|
||||
- Allow specific email domains (e.g., @company.com)
|
||||
- Allow specific email addresses
|
||||
- Configure who can access your Unraid server with minimal setup
|
||||
|
||||
**When to use Simple Mode:**
|
||||
|
||||
- You want to allow all users from your company domain
|
||||
- You have a small list of specific users
|
||||
- You're new to OIDC configuration
|
||||
|
||||
<details>
|
||||
<summary><strong>Advanced Mode</strong></summary>
|
||||
|
||||
Advanced mode provides granular control using claim-based rules. You can:
|
||||
|
||||
- Create complex authorization rules based on JWT claims
|
||||
- Use operators like equals, contains, endsWith, startsWith
|
||||
- Combine multiple conditions with OR/AND logic
|
||||
- Choose whether ANY rule must pass (OR mode) or ALL rules must pass (AND mode)
|
||||
|
||||
**When to use Advanced Mode:**
|
||||
|
||||
- You need to check group memberships
|
||||
- You want to verify multiple claims (e.g., email domain AND verified status)
|
||||
- You have complex authorization requirements
|
||||
- You need fine-grained control over how rules are evaluated
|
||||
|
||||
</details>
|
||||
|
||||
## Authorization Rules
|
||||
|
||||

|
||||
*Advanced authorization rules showing JWT claim configuration with email endsWith operator for domain-based access control*
|
||||
|
||||
### Simple Mode Examples
|
||||
|
||||
#### Allow Company Domain
|
||||
|
||||
In Simple Authorization:
|
||||
|
||||
- **Allowed Email Domains**: Enter `company.com`
|
||||
- This allows anyone with @company.com email
|
||||
|
||||
#### Allow Specific Users
|
||||
|
||||
- **Specific Email Addresses**: Add individual emails
|
||||
- Click **Add Item** to add multiple addresses
|
||||
|
||||
<details>
|
||||
<summary><strong>Advanced Mode Examples</strong></summary>
|
||||
|
||||
#### Authorization Rule Mode
|
||||
|
||||
When using multiple rules, you can choose how they're evaluated:
|
||||
|
||||
- **OR Mode** (default): User is authorized if ANY rule passes
|
||||
- **AND Mode**: User is authorized only if ALL rules pass
|
||||
|
||||
#### Email Domain with Verification (AND Mode)
|
||||
|
||||
To require both email domain AND verification:
|
||||
|
||||
1. Set **Authorization Rule Mode** to `AND`
|
||||
2. Add two rules:
|
||||
- Rule 1:
|
||||
- **Claim**: `email`
|
||||
- **Operator**: `endsWith`
|
||||
- **Value**: `@company.com`
|
||||
- Rule 2:
|
||||
- **Claim**: `email_verified`
|
||||
- **Operator**: `equals`
|
||||
- **Value**: `true`
|
||||
|
||||
This ensures users must have both a company email AND a verified email address.
|
||||
|
||||
#### Group-Based Access (OR Mode)
|
||||
|
||||
To allow access to multiple groups:
|
||||
|
||||
1. Set **Authorization Rule Mode** to `OR` (default)
|
||||
2. Add rules for each group:
|
||||
- **Claim**: `groups`
|
||||
- **Operator**: `contains`
|
||||
- **Value**: `admins`
|
||||
|
||||
Or add another rule:
|
||||
- **Claim**: `groups`
|
||||
- **Operator**: `contains`
|
||||
- **Value**: `developers`
|
||||
|
||||
Users in either `admins` OR `developers` group will be authorized.
|
||||
|
||||
#### Multiple Domains
|
||||
|
||||
- **Claim**: `email`
|
||||
- **Operator**: `endsWith`
|
||||
- **Values**: Add multiple domains (e.g., `company.com`, `subsidiary.com`)
|
||||
|
||||
#### Complex Authorization (AND Mode)
|
||||
|
||||
For strict security requiring multiple conditions:
|
||||
|
||||
1. Set **Authorization Rule Mode** to `AND`
|
||||
2. Add multiple rules that ALL must pass:
|
||||
- Email must be from company domain
|
||||
- Email must be verified
|
||||
- User must be in specific group
|
||||
- Account must have 2FA enabled (if claim available)
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Configuration Interface Details</strong></summary>
|
||||
|
||||
### Provider Tabs
|
||||
|
||||
- Each configured provider appears as a tab at the top
|
||||
- Click a tab to switch between provider configurations
|
||||
- The **+** button on the right adds a new provider
|
||||
|
||||
### Authorization Mode Dropdown
|
||||
|
||||
- **simple**: Best for email-based authorization (recommended for most users)
|
||||
- **advanced**: For complex claim-based rules using JWT claims
|
||||
|
||||
### Simple Authorization Fields
|
||||
|
||||
When "simple" mode is selected, you'll see:
|
||||
|
||||
- **Allowed Email Domains**: Enter domains without @ (e.g., `company.com`)
|
||||
- Helper text: "Users with emails ending in these domains can login"
|
||||
- **Specific Email Addresses**: Add individual email addresses
|
||||
- Helper text: "Only these exact email addresses can login"
|
||||
- **Add Item** buttons to add multiple entries
|
||||
|
||||
### Advanced Authorization Fields
|
||||
|
||||
When "advanced" mode is selected, you'll see:
|
||||
|
||||
- **Authorization Rule Mode**: Choose `OR` (any rule passes) or `AND` (all rules must pass)
|
||||
- **Authorization Rules**: Add multiple claim-based rules
|
||||
- **For each rule**:
|
||||
- **Claim**: The JWT claim to check
|
||||
- **Operator**: How to compare (equals, contains, endsWith, startsWith)
|
||||
- **Value**: What to match against
|
||||
|
||||
### Additional Interface Elements
|
||||
|
||||
- **Enable Developer Sandbox**: Toggle to enable GraphQL sandbox at `/graphql`
|
||||
- The interface uses a dark theme for better visibility
|
||||
- Field validation indicators help ensure correct configuration
|
||||
|
||||
</details>
|
||||
|
||||
### Required Redirect URI
|
||||
|
||||
:::caution[Important Configuration]
|
||||
All providers must be configured with this exact redirect URI format:
|
||||
:::
|
||||
|
||||
```bash
|
||||
http://YOUR_UNRAID_IP/graphql/api/auth/oidc/callback
|
||||
```
|
||||
|
||||
:::tip
|
||||
Replace `YOUR_UNRAID_IP` with your actual server IP address (e.g., `192.168.1.100` or `tower.local`).
|
||||
:::
|
||||
|
||||
### Issuer URL Format
|
||||
|
||||
The **Issuer URL** field accepts both formats, but **base URL is strongly recommended** for security:
|
||||
|
||||
- **Base URL** (recommended): `https://accounts.google.com`
|
||||
- **Full discovery URL**: `https://accounts.google.com/.well-known/openid-configuration`
|
||||
|
||||
**⚠️ Security Note**: Always use the base URL format when possible. The system automatically appends `/.well-known/openid-configuration` for OIDC discovery. Using the full discovery URL directly disables important issuer validation checks and is not recommended by the OpenID Connect specification.
|
||||
|
||||
**Examples of correct base URLs:**
|
||||
- Google: `https://accounts.google.com`
|
||||
- Microsoft/Azure: `https://login.microsoftonline.com/YOUR_TENANT_ID/v2.0`
|
||||
- Keycloak: `https://keycloak.example.com/realms/YOUR_REALM`
|
||||
- Authelia: `https://auth.yourdomain.com`
|
||||
|
||||
## ✅ Testing Your Configuration
|
||||
|
||||

|
||||
*Unraid login page displaying both traditional username/password authentication and SSO options with customized provider buttons*
|
||||
|
||||
1. Save your provider configuration
|
||||
2. Log out (if logged in)
|
||||
3. Navigate to the login page
|
||||
4. Your configured provider button should appear
|
||||
5. Click to test the login flow
|
||||
|
||||
## 🔧 Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
#### "Provider not found" error
|
||||
|
||||
- Ensure the Issuer URL is correct
|
||||
- Check that the provider supports OIDC discovery (/.well-known/openid-configuration)
|
||||
|
||||
#### "Authorization failed"
|
||||
|
||||
- In Simple Mode: Check email domains are entered correctly (without @)
|
||||
- In Advanced Mode:
|
||||
- Verify claim names match exactly what your provider sends
|
||||
- Check if Authorization Rule Mode is set correctly (OR vs AND)
|
||||
- Ensure all required claims are present in the token
|
||||
- Enable debug logging to see actual claims and rule evaluation
|
||||
|
||||
#### "Invalid redirect URI"
|
||||
|
||||
- Ensure the redirect URI in your provider matches exactly
|
||||
- Include the correct port if using a non-standard configuration
|
||||
- Verify the redirect URI protocol matches your server's configuration (HTTP or HTTPS)
|
||||
|
||||
#### Cannot see login button
|
||||
|
||||
- Check that at least one authorization rule is configured
|
||||
- Verify the provider is enabled/saved
|
||||
|
||||
### Debug Mode
|
||||
|
||||
To troubleshoot issues:
|
||||
|
||||
1. Enable debug logging:
|
||||
|
||||
```bash
|
||||
LOG_LEVEL=debug unraid-api start --debug
|
||||
```
|
||||
|
||||
2. Check logs for:
|
||||
|
||||
- Received claims from provider
|
||||
- Authorization rule evaluation
|
||||
- Token validation errors
|
||||
|
||||
## 🔐 Security Best Practices
|
||||
|
||||
1. **Use Simple Mode for authorization** - Prevents overly accepting configurations and reduces misconfiguration risks
|
||||
2. **Be specific with authorization** - Don't use overly broad rules
|
||||
3. **Rotate secrets regularly** - Update client secrets periodically
|
||||
4. **Test thoroughly** - Verify only intended users can access
|
||||
|
||||
## 💡 Need Help?
|
||||
|
||||
- Check provider's OIDC documentation
|
||||
- Review Unraid API logs for detailed error messages
|
||||
- Ensure your provider supports standard OIDC discovery
|
||||
- Verify network connectivity between Unraid and provider
|
||||
|
||||
## 🏢 Provider-Specific Setup
|
||||
|
||||
### Unraid.net Provider
|
||||
|
||||
The Unraid.net provider is built-in and pre-configured. You only need to configure authorization rules in the interface.
|
||||
|
||||
**Configuration:**
|
||||
|
||||
- **Issuer URL**: Pre-configured (built-in provider)
|
||||
- **Client ID/Secret**: Pre-configured (built-in provider)
|
||||
- **Redirect URI**: `http://YOUR_UNRAID_IP/graphql/api/auth/oidc/callback`
|
||||
|
||||
:::tip[Redirect URI Protocol]
|
||||
**Match the protocol to your server setup:** Use `http://` if accessing your Unraid server without SSL/TLS (typical for local network access). Use `https://` if you've configured SSL/TLS on your server. Some OIDC providers (like Google) require HTTPS and won't accept HTTP redirect URIs.
|
||||
:::
|
||||
|
||||
Configure authorization rules using Simple Mode (allowed email domains/addresses) or Advanced Mode for complex requirements.
|
||||
|
||||
### Google
|
||||
|
||||
<details>
|
||||
<summary><strong>📋 Setup Steps</strong></summary>
|
||||
|
||||
Set up OAuth 2.0 credentials in [Google Cloud Console](https://console.cloud.google.com/):
|
||||
|
||||
1. Go to **APIs & Services** → **Credentials**
|
||||
2. Click **Create Credentials** → **OAuth client ID**
|
||||
3. Choose **Web application** as the application type
|
||||
4. Add your redirect URI to **Authorized redirect URIs**
|
||||
5. Configure the OAuth consent screen if prompted
|
||||
|
||||
</details>
|
||||
|
||||
**Configuration:**
|
||||
|
||||
- **Issuer URL**: `https://accounts.google.com`
|
||||
- **Client ID/Secret**: From your OAuth 2.0 client credentials
|
||||
- **Required Scopes**: `openid`, `profile`, `email`
|
||||
- **Redirect URI**: `http://YOUR_UNRAID_IP/graphql/api/auth/oidc/callback`
|
||||
|
||||
:::warning[Google Domain Requirements]
|
||||
**Google requires valid domain names for OAuth redirect URIs.** Local IP addresses and `.local` domains are not accepted. To use Google OAuth with your Unraid server, you'll need:
|
||||
|
||||
- **Option 1: Reverse Proxy** - Set up a reverse proxy (like NGINX Proxy Manager or Traefik) with a valid domain name pointing to your Unraid API
|
||||
- **Option 2: Tailscale** - Use Tailscale to get a valid `*.ts.net` domain that Google will accept
|
||||
- **Option 3: Dynamic DNS** - Use a DDNS service to get a public domain name for your server
|
||||
|
||||
Remember to update your redirect URI in both Google Cloud Console and your Unraid OIDC configuration to use the valid domain.
|
||||
:::
|
||||
|
||||
For Google Workspace domains, use Advanced Mode with the `hd` claim to restrict access to your organization's domain.
|
||||
|
||||
### Authelia
|
||||
|
||||
Configure OIDC client in your Authelia `configuration.yml` with client ID `unraid-api` and generate a hashed secret using the Authelia hash-password command.
|
||||
|
||||
**Configuration:**
|
||||
|
||||
- **Issuer URL**: `https://auth.yourdomain.com`
|
||||
- **Client ID**: `unraid-api` (or as configured in Authelia)
|
||||
- **Client Secret**: Your unhashed secret
|
||||
- **Required Scopes**: `openid`, `profile`, `email`, `groups`
|
||||
- **Redirect URI**: `http://YOUR_UNRAID_IP/graphql/api/auth/oidc/callback`
|
||||
|
||||
Use Advanced Mode with `groups` claim for group-based authorization.
|
||||
|
||||
### Microsoft/Azure AD
|
||||
|
||||
Register a new app in [Azure Portal](https://portal.azure.com/) under Azure Active Directory → App registrations. Note the Application ID, create a client secret, and note your tenant ID.
|
||||
|
||||
**Configuration:**
|
||||
|
||||
- **Issuer URL**: `https://login.microsoftonline.com/YOUR_TENANT_ID/v2.0`
|
||||
- **Client ID**: Your Application (client) ID
|
||||
- **Client Secret**: Generated client secret
|
||||
- **Required Scopes**: `openid`, `profile`, `email`
|
||||
- **Redirect URI**: `http://YOUR_UNRAID_IP/graphql/api/auth/oidc/callback`
|
||||
|
||||
Authorization rules can be configured in the interface using email domains or advanced claims.
|
||||
|
||||
### Keycloak
|
||||
|
||||
Create a new confidential client in Keycloak Admin Console with `openid-connect` protocol and copy the client secret from the Credentials tab.
|
||||
|
||||
**Configuration:**
|
||||
|
||||
- **Issuer URL**: `https://keycloak.example.com/realms/YOUR_REALM`
|
||||
- **Client ID**: `unraid-api` (or as configured in Keycloak)
|
||||
- **Client Secret**: From Keycloak Credentials tab
|
||||
- **Required Scopes**: `openid`, `profile`, `email`
|
||||
- **Redirect URI**: `http://YOUR_UNRAID_IP/graphql/api/auth/oidc/callback`
|
||||
|
||||
For role-based authorization, use Advanced Mode with `realm_access.roles` or `resource_access` claims.
|
||||
|
||||
### Authentik
|
||||
|
||||
Create a new OAuth2/OpenID Provider in Authentik, then create an Application and link it to the provider.
|
||||
|
||||
**Configuration:**
|
||||
|
||||
- **Issuer URL**: `https://authentik.example.com/application/o/<application_slug>/`
|
||||
- **Client ID**: From Authentik provider configuration
|
||||
- **Client Secret**: From Authentik provider configuration
|
||||
- **Required Scopes**: `openid`, `profile`, `email`
|
||||
- **Redirect URI**: `http://YOUR_UNRAID_IP/graphql/api/auth/oidc/callback`
|
||||
|
||||
Authorization rules can be configured in the interface.
|
||||
|
||||
### Okta
|
||||
|
||||
Create a new OIDC Web Application in Okta Admin Console and assign appropriate users or groups.
|
||||
|
||||
**Configuration:**
|
||||
|
||||
- **Issuer URL**: `https://YOUR_DOMAIN.okta.com`
|
||||
- **Client ID**: From Okta application configuration
|
||||
- **Client Secret**: From Okta application configuration
|
||||
- **Required Scopes**: `openid`, `profile`, `email`
|
||||
- **Redirect URI**: `http://YOUR_UNRAID_IP/graphql/api/auth/oidc/callback`
|
||||
|
||||
Authorization rules can be configured in the interface using email domains or advanced claims.
|
||||
252
api/docs/public/programmatic-api-key-management.md
Normal file
252
api/docs/public/programmatic-api-key-management.md
Normal file
@@ -0,0 +1,252 @@
|
||||
---
|
||||
title: Programmatic API Key Management
|
||||
description: Create, use, and delete API keys programmatically for automated workflows
|
||||
sidebar_position: 4
|
||||
---
|
||||
|
||||
# Programmatic API Key Management
|
||||
|
||||
This guide explains how to create, use, and delete API keys programmatically using the Unraid API CLI, enabling automated workflows and scripts.
|
||||
|
||||
## Overview
|
||||
|
||||
The `unraid-api apikey` command supports both interactive and non-interactive modes, making it suitable for:
|
||||
|
||||
- Automated deployment scripts
|
||||
- CI/CD pipelines
|
||||
- Temporary access provisioning
|
||||
- Infrastructure as code workflows
|
||||
|
||||
:::tip[Quick Start]
|
||||
Jump to the [Complete Workflow Example](#complete-workflow-example) to see everything in action.
|
||||
:::
|
||||
|
||||
## Creating API Keys Programmatically
|
||||
|
||||
### Basic Creation with JSON Output
|
||||
|
||||
Use the `--json` flag to get machine-readable output:
|
||||
|
||||
```bash
|
||||
unraid-api apikey --create --name "workflow key" --roles ADMIN --json
|
||||
```
|
||||
|
||||
**Output:**
|
||||
|
||||
```json
|
||||
{
|
||||
"key": "your-generated-api-key-here",
|
||||
"name": "workflow key",
|
||||
"id": "generated-uuid"
|
||||
}
|
||||
```
|
||||
|
||||
### Advanced Creation with Permissions
|
||||
|
||||
```bash
|
||||
unraid-api apikey --create \
|
||||
--name "limited access key" \
|
||||
--permissions "DOCKER:READ_ANY,ARRAY:READ_ANY" \
|
||||
--description "Read-only access for monitoring" \
|
||||
--json
|
||||
```
|
||||
|
||||
### Handling Existing Keys
|
||||
|
||||
If a key with the same name exists, use `--overwrite`:
|
||||
|
||||
```bash
|
||||
unraid-api apikey --create --name "existing key" --roles ADMIN --overwrite --json
|
||||
```
|
||||
|
||||
:::warning[Key Replacement]
|
||||
The `--overwrite` flag will permanently replace the existing key. The old key will be immediately invalidated.
|
||||
:::
|
||||
|
||||
## Deleting API Keys Programmatically
|
||||
|
||||
### Non-Interactive Deletion
|
||||
|
||||
Delete a key by name without prompts:
|
||||
|
||||
```bash
|
||||
unraid-api apikey --delete --name "workflow key"
|
||||
```
|
||||
|
||||
**Output:**
|
||||
|
||||
```
|
||||
Successfully deleted 1 API key
|
||||
```
|
||||
|
||||
### JSON Output for Deletion
|
||||
|
||||
Use `--json` flag for machine-readable delete confirmation:
|
||||
|
||||
```bash
|
||||
unraid-api apikey --delete --name "workflow key" --json
|
||||
```
|
||||
|
||||
**Success Output:**
|
||||
|
||||
```json
|
||||
{
|
||||
"deleted": 1,
|
||||
"keys": [
|
||||
{
|
||||
"id": "generated-uuid",
|
||||
"name": "workflow key"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
**Error Output:**
|
||||
|
||||
```json
|
||||
{
|
||||
"deleted": 0,
|
||||
"error": "No API key found with name: nonexistent key"
|
||||
}
|
||||
```
|
||||
|
||||
### Error Handling
|
||||
|
||||
When the specified key doesn't exist:
|
||||
|
||||
```bash
|
||||
unraid-api apikey --delete --name "nonexistent key"
|
||||
# Output: No API keys found to delete
|
||||
```
|
||||
|
||||
**JSON Error Output:**
|
||||
|
||||
```json
|
||||
{
|
||||
"deleted": 0,
|
||||
"message": "No API keys found to delete"
|
||||
}
|
||||
```
|
||||
|
||||
## Complete Workflow Example
|
||||
|
||||
Here's a complete example for temporary access provisioning:
|
||||
|
||||
```bash
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# 1. Create temporary API key
|
||||
echo "Creating temporary API key..."
|
||||
KEY_DATA=$(unraid-api apikey --create \
|
||||
--name "temp deployment key" \
|
||||
--roles ADMIN \
|
||||
--description "Temporary key for deployment $(date)" \
|
||||
--json)
|
||||
|
||||
# 2. Extract the API key
|
||||
API_KEY=$(echo "$KEY_DATA" | jq -r '.key')
|
||||
echo "API key created successfully"
|
||||
|
||||
# 3. Use the key for operations
|
||||
echo "Configuring services..."
|
||||
curl -H "Authorization: Bearer $API_KEY" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{"provider": "azure", "clientId": "your-client-id"}' \
|
||||
http://localhost:3001/graphql
|
||||
|
||||
# 4. Clean up (always runs, even on error)
|
||||
trap 'echo "Cleaning up..."; unraid-api apikey --delete --name "temp deployment key"' EXIT
|
||||
|
||||
echo "Deployment completed successfully"
|
||||
```
|
||||
|
||||
## Command Reference
|
||||
|
||||
### Create Command Options
|
||||
|
||||
| Flag | Description | Example |
|
||||
| ----------------------- | ----------------------- | --------------------------------- |
|
||||
| `--name <name>` | Key name (required) | `--name "my key"` |
|
||||
| `--roles <roles>` | Comma-separated roles | `--roles ADMIN,VIEWER` |
|
||||
| `--permissions <perms>` | Resource:action pairs | `--permissions "DOCKER:READ_ANY"` |
|
||||
| `--description <desc>` | Key description | `--description "CI/CD key"` |
|
||||
| `--overwrite` | Replace existing key | `--overwrite` |
|
||||
| `--json` | Machine-readable output | `--json` |
|
||||
|
||||
### Available Roles
|
||||
|
||||
- `ADMIN` - Full system access
|
||||
- `CONNECT` - Unraid Connect features
|
||||
- `VIEWER` - Read-only access
|
||||
- `GUEST` - Limited access
|
||||
|
||||
### Available Resources and Actions
|
||||
|
||||
**Resources:** `ACTIVATION_CODE`, `API_KEY`, `ARRAY`, `CLOUD`, `CONFIG`, `CONNECT`, `CONNECT__REMOTE_ACCESS`, `CUSTOMIZATIONS`, `DASHBOARD`, `DISK`, `DISPLAY`, `DOCKER`, `FLASH`, `INFO`, `LOGS`, `ME`, `NETWORK`, `NOTIFICATIONS`, `ONLINE`, `OS`, `OWNER`, `PERMISSION`, `REGISTRATION`, `SERVERS`, `SERVICES`, `SHARE`, `VARS`, `VMS`, `WELCOME`
|
||||
|
||||
**Actions:** `CREATE_ANY`, `CREATE_OWN`, `READ_ANY`, `READ_OWN`, `UPDATE_ANY`, `UPDATE_OWN`, `DELETE_ANY`, `DELETE_OWN`
|
||||
|
||||
### Delete Command Options
|
||||
|
||||
| Flag | Description | Example |
|
||||
| --------------- | ------------------------ | ----------------- |
|
||||
| `--delete` | Enable delete mode | `--delete` |
|
||||
| `--name <name>` | Key to delete (optional) | `--name "my key"` |
|
||||
|
||||
**Note:** If `--name` is omitted, the command runs interactively.
|
||||
|
||||
## Best Practices
|
||||
|
||||
:::info[Security Best Practices]
|
||||
**Minimal Permissions**
|
||||
|
||||
- Use specific permissions instead of ADMIN role when possible
|
||||
- Example: `--permissions "DOCKER:READ_ANY"` instead of `--roles ADMIN`
|
||||
|
||||
**Key Lifecycle Management**
|
||||
|
||||
- Always clean up temporary keys after use
|
||||
- Store API keys securely (environment variables, secrets management)
|
||||
- Use descriptive names and descriptions for audit trails
|
||||
:::
|
||||
|
||||
### Error Handling
|
||||
|
||||
- Check exit codes (`$?`) after each command
|
||||
- Use `set -e` in bash scripts to fail fast
|
||||
- Implement proper cleanup with `trap`
|
||||
|
||||
### Key Naming
|
||||
|
||||
- Use descriptive names that include purpose and date
|
||||
- Names must contain only letters, numbers, and spaces
|
||||
- Unicode letters are supported
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
:::note[Common Error Messages]
|
||||
|
||||
**"API key name must contain only letters, numbers, and spaces"**
|
||||
|
||||
- **Solution:** Remove special characters like hyphens, underscores, or symbols
|
||||
|
||||
**"API key with name 'x' already exists"**
|
||||
|
||||
- **Solution:** Use `--overwrite` flag or choose a different name
|
||||
|
||||
**"Please add at least one role or permission to the key"**
|
||||
|
||||
- **Solution:** Specify either `--roles` or `--permissions` (or both)
|
||||
|
||||
:::
|
||||
|
||||
### Debug Mode
|
||||
|
||||
For troubleshooting, run with debug logging:
|
||||
|
||||
```bash
|
||||
LOG_LEVEL=debug unraid-api apikey --create --name "debug key" --roles ADMIN
|
||||
```
|
||||
@@ -1,71 +1,172 @@
|
||||
# Upcoming Features
|
||||
---
|
||||
title: Roadmap & Features
|
||||
description: Current status and upcoming features for the Unraid API
|
||||
sidebar_position: 10
|
||||
---
|
||||
|
||||
Note: This roadmap outlines planned features and improvements for the Unraid API. Features and timelines may change based on development priorities and community feedback.
|
||||
# Roadmap & Features
|
||||
|
||||
:::info Development Status
|
||||
This roadmap outlines completed and planned features for the Unraid API. Features and timelines may change based on development priorities and community feedback.
|
||||
:::
|
||||
|
||||
## Feature Status Legend
|
||||
|
||||
| Status | Description |
|
||||
|--------|-------------|
|
||||
| ✅ **Done** | Feature is complete and available |
|
||||
| 🚧 **In Progress** | Currently under active development |
|
||||
| 📅 **Planned** | Scheduled for future development |
|
||||
| 💡 **Under Consideration** | Being evaluated for future inclusion |
|
||||
|
||||
## Core Infrastructure
|
||||
|
||||
| Feature | Status | Tag |
|
||||
|---------|--------|-----|
|
||||
| API Development Environment Improvements | Done | v4.0.0 |
|
||||
| Include API in Unraid OS | Planned (Q1 2025) | - |
|
||||
| Make API Open Source | Planned (Q1 2025) | - |
|
||||
| Separate API from Connect Plugin | Planned (Q2 2025) | - |
|
||||
| Developer Tools for Plugins | Planned (Q2 2025) | - |
|
||||
### Completed Features ✅
|
||||
|
||||
| Feature | Available Since |
|
||||
|---------|-----------------|
|
||||
| **API Development Environment Improvements** | v4.0.0 |
|
||||
| **Include API in Unraid OS** | Unraid v7.2-beta.1 |
|
||||
| **Separate API from Connect Plugin** | Unraid v7.2-beta.1 |
|
||||
|
||||
### Upcoming Features 📅
|
||||
|
||||
| Feature | Target Timeline |
|
||||
|---------|-----------------|
|
||||
| **Make API Open Source** | Q1 2025 |
|
||||
| **Developer Tools for Plugins** | Q2 2025 |
|
||||
|
||||
## Security & Authentication
|
||||
|
||||
| Feature | Status | Tag |
|
||||
|---------|--------|-----|
|
||||
| Permissions System Rewrite | Done | v4.0.0 |
|
||||
| User Interface Component Library | In Progress | - |
|
||||
### Completed Features ✅
|
||||
|
||||
| Feature | Available Since |
|
||||
|---------|-----------------|
|
||||
| **Permissions System Rewrite** | v4.0.0 |
|
||||
| **OIDC/SSO Support** | Unraid v7.2-beta.1 |
|
||||
|
||||
### In Development 🚧
|
||||
|
||||
- **User Interface Component Library** - Enhanced security components for the UI
|
||||
|
||||
## User Interface Improvements
|
||||
|
||||
| Feature | Status | Tag |
|
||||
|---------|--------|-----|
|
||||
| New Settings Pages | Planned (Q2 2025) | - |
|
||||
| Custom Theme Creator | Planned (Q2-Q3 2025) | - |
|
||||
| New Connect Settings Interface | Planned (Q1 2025) | - |
|
||||
### Planned Features 📅
|
||||
|
||||
| Feature | Target Timeline | Description |
|
||||
|---------|-----------------|-------------|
|
||||
| **New Settings Pages** | Q2 2025 | Modernized settings interface with improved UX |
|
||||
| **Custom Theme Creator** | Q2-Q3 2025 | Allow users to create and share custom themes |
|
||||
| **New Connect Settings Interface** | Q1 2025 | Redesigned Unraid Connect configuration |
|
||||
|
||||
## Array Management
|
||||
|
||||
| Feature | Status | Tag |
|
||||
|---------|--------|-----|
|
||||
| Array Status Monitoring | Done | v4.0.0 |
|
||||
| Storage Pool Creation Interface | Planned (Q2 2025) | - |
|
||||
| Storage Pool Status Interface | Planned (Q2 2025) | - |
|
||||
### Completed Features ✅
|
||||
|
||||
| Feature | Available Since |
|
||||
|---------|-----------------|
|
||||
| **Array Status Monitoring** | v4.0.0 |
|
||||
|
||||
### Planned Features 📅
|
||||
|
||||
| Feature | Target Timeline | Description |
|
||||
|---------|-----------------|-------------|
|
||||
| **Storage Pool Creation Interface** | Q2 2025 | Simplified pool creation workflow |
|
||||
| **Storage Pool Status Interface** | Q2 2025 | Real-time pool health monitoring |
|
||||
|
||||
## Docker Integration
|
||||
|
||||
| Feature | Status | Tag |
|
||||
|---------|--------|-----|
|
||||
| Docker Container Status Monitoring | Done | v4.0.0 |
|
||||
| New Docker Status Interface Design | Planned (Q3 2025) | - |
|
||||
| New Docker Status Interface | Planned (Q3 2025) | - |
|
||||
| Docker Container Setup Interface | Planned (Q3 2025) | - |
|
||||
| Docker Compose Support | Planned | - |
|
||||
### Completed Features ✅
|
||||
|
||||
| Feature | Available Since |
|
||||
|---------|-----------------|
|
||||
| **Docker Container Status Monitoring** | v4.0.0 |
|
||||
|
||||
### Planned Features 📅
|
||||
|
||||
| Feature | Target Timeline | Description |
|
||||
|---------|-----------------|-------------|
|
||||
| **New Docker Status Interface Design** | Q3 2025 | Modern container management UI |
|
||||
| **New Docker Status Interface** | Q3 2025 | Implementation of new design |
|
||||
| **Docker Container Setup Interface** | Q3 2025 | Streamlined container deployment |
|
||||
| **Docker Compose Support** | TBD | Native docker-compose.yml support |
|
||||
|
||||
## Share Management
|
||||
|
||||
| Feature | Status | Tag |
|
||||
|---------|--------|-----|
|
||||
| Array/Cache Share Status Monitoring | Done | v4.0.0 |
|
||||
| Storage Share Creation & Settings | Planned | - |
|
||||
| Storage Share Management Interface | Planned | - |
|
||||
### Completed Features ✅
|
||||
|
||||
| Feature | Available Since |
|
||||
|---------|-----------------|
|
||||
| **Array/Cache Share Status Monitoring** | v4.0.0 |
|
||||
|
||||
### Under Consideration 💡
|
||||
|
||||
- **Storage Share Creation & Settings** - Enhanced share configuration options
|
||||
- **Storage Share Management Interface** - Unified share management dashboard
|
||||
|
||||
## Plugin System
|
||||
|
||||
| Feature | Status | Tag |
|
||||
|---------|--------|-----|
|
||||
| New Plugins Interface | Planned (Q3 2025) | - |
|
||||
| Plugin Management Interface | Planned | - |
|
||||
| Plugin Development Tools | Planned | - |
|
||||
### Planned Features 📅
|
||||
|
||||
| Feature | Target Timeline | Description |
|
||||
|---------|-----------------|-------------|
|
||||
| **New Plugins Interface** | Q3 2025 | Redesigned plugin management UI |
|
||||
| **Plugin Management Interface** | TBD | Advanced plugin configuration |
|
||||
| **Plugin Development Tools** | TBD | SDK and tooling for developers |
|
||||
|
||||
## Notifications
|
||||
|
||||
| Feature | Status | Tag |
|
||||
|---------|--------|-----|
|
||||
| Notifications System | Done | v4.0.0 |
|
||||
| Notifications Interface | Done | v4.0.0 |
|
||||
### Completed Features ✅
|
||||
|
||||
Features marked as "Done" are available in current releases. The tag column shows the version where a feature was first introduced.
|
||||
| Feature | Available Since |
|
||||
|---------|-----------------|
|
||||
| **Notifications System** | v4.0.0 |
|
||||
| **Notifications Interface** | v4.0.0 |
|
||||
|
||||
---
|
||||
|
||||
## Recent Releases
|
||||
|
||||
:::info Full Release History
|
||||
For a complete list of all releases, changelogs, and download links, visit the [Unraid API GitHub Releases](https://github.com/unraid/api/releases) page.
|
||||
:::
|
||||
|
||||
### Unraid v7.2-beta.1 Highlights
|
||||
|
||||
- 🎉 **API included in Unraid OS** - Native integration
|
||||
- 🔐 **OIDC/SSO Support** - Enterprise authentication
|
||||
- 📦 **Standalone API** - Separated from Connect plugin
|
||||
|
||||
### v4.0.0 Highlights
|
||||
|
||||
- 🛡️ **Permissions System Rewrite** - Enhanced security
|
||||
- 📊 **Comprehensive Monitoring** - Array, Docker, and Share status
|
||||
- 🔔 **Notifications System** - Real-time alerts and notifications
|
||||
- 🛠️ **Developer Environment** - Improved development tools
|
||||
|
||||
## Community Feedback
|
||||
|
||||
:::tip Have a Feature Request?
|
||||
We value community input! Please submit feature requests and feedback through:
|
||||
|
||||
- [Unraid Forums](https://forums.unraid.net)
|
||||
- [GitHub Issues](https://github.com/unraid/api/issues) - API is open source!
|
||||
|
||||
:::
|
||||
|
||||
## Version Support
|
||||
|
||||
| Unraid Version | API Version | Support Status |
|
||||
|----------------|-------------|----------------|
|
||||
| Unraid v7.2-beta.1+ | Latest | ✅ Active |
|
||||
| 7.0 - 7.1.x | v4.x via Plugin | ⚠️ Limited |
|
||||
| 6.12.x | v4.x via Plugin | ⚠️ Limited |
|
||||
| < 6.12 | Not Supported | ❌ EOL |
|
||||
|
||||
:::warning Legacy Support
|
||||
Versions prior to Unraid 7.2 require the API to be installed through the Unraid Connect plugin. Some features may not be available on older versions.
|
||||
:::
|
||||
|
||||
:::tip Pre-release Versions
|
||||
You can always install the Unraid Connect plugin to access pre-release versions of the API and get early access to new features before they're included in Unraid OS releases.
|
||||
:::
|
||||
|
||||
@@ -13,7 +13,9 @@
|
||||
"watch": false,
|
||||
"interpreter": "/usr/local/bin/node",
|
||||
"ignore_watch": ["node_modules", "src", ".env.*", "myservers.cfg"],
|
||||
"log_file": "/var/log/graphql-api.log",
|
||||
"out_file": "/var/log/graphql-api.log",
|
||||
"error_file": "/var/log/graphql-api.log",
|
||||
"merge_logs": true,
|
||||
"kill_timeout": 10000
|
||||
}
|
||||
]
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@unraid/api",
|
||||
"version": "4.11.0",
|
||||
"version": "4.18.2",
|
||||
"main": "src/cli/index.ts",
|
||||
"type": "module",
|
||||
"corepack": {
|
||||
@@ -10,14 +10,14 @@
|
||||
"author": "Lime Technology, Inc. <unraid.net>",
|
||||
"license": "GPL-2.0-or-later",
|
||||
"engines": {
|
||||
"pnpm": "10.13.1"
|
||||
"pnpm": "10.15.0"
|
||||
},
|
||||
"scripts": {
|
||||
"// Development": "",
|
||||
"start": "node dist/main.js",
|
||||
"dev": "vite",
|
||||
"dev": "clear && vite",
|
||||
"dev:debug": "NODE_OPTIONS='--inspect-brk=9229 --enable-source-maps' vite",
|
||||
"command": "pnpm run build && clear && ./dist/cli.js",
|
||||
"command": "COMMAND_TESTER=true pnpm run build > /dev/null 2>&1 && NODE_ENV=development ./dist/cli.js",
|
||||
"command:raw": "./dist/cli.js",
|
||||
"// Build and Deploy": "",
|
||||
"build": "vite build --mode=production",
|
||||
@@ -51,7 +51,7 @@
|
||||
"unraid-api": "dist/cli.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"@apollo/client": "3.13.8",
|
||||
"@apollo/client": "3.14.0",
|
||||
"@apollo/server": "4.12.2",
|
||||
"@as-integrations/fastify": "2.1.1",
|
||||
"@fastify/cookie": "11.0.2",
|
||||
@@ -64,13 +64,13 @@
|
||||
"@jsonforms/core": "3.6.0",
|
||||
"@nestjs/apollo": "13.1.0",
|
||||
"@nestjs/cache-manager": "3.0.1",
|
||||
"@nestjs/common": "11.1.5",
|
||||
"@nestjs/common": "11.1.6",
|
||||
"@nestjs/config": "4.0.2",
|
||||
"@nestjs/core": "11.1.5",
|
||||
"@nestjs/core": "11.1.6",
|
||||
"@nestjs/event-emitter": "3.0.1",
|
||||
"@nestjs/graphql": "13.1.0",
|
||||
"@nestjs/passport": "11.0.5",
|
||||
"@nestjs/platform-fastify": "11.1.5",
|
||||
"@nestjs/platform-fastify": "11.1.6",
|
||||
"@nestjs/schedule": "6.0.0",
|
||||
"@nestjs/throttler": "6.4.0",
|
||||
"@reduxjs/toolkit": "2.8.2",
|
||||
@@ -79,9 +79,10 @@
|
||||
"@unraid/libvirt": "2.1.0",
|
||||
"@unraid/shared": "workspace:*",
|
||||
"accesscontrol": "2.2.1",
|
||||
"atomically": "2.0.3",
|
||||
"bycontract": "2.0.11",
|
||||
"bytes": "3.1.2",
|
||||
"cache-manager": "7.0.1",
|
||||
"cache-manager": "7.2.0",
|
||||
"cacheable-lookup": "7.0.0",
|
||||
"camelcase-keys": "9.1.3",
|
||||
"casbin": "5.38.0",
|
||||
@@ -93,16 +94,17 @@
|
||||
"command-exists": "1.2.9",
|
||||
"convert": "5.12.0",
|
||||
"cookie": "1.0.2",
|
||||
"cron": "4.3.2",
|
||||
"cron": "4.3.3",
|
||||
"cross-fetch": "4.1.0",
|
||||
"diff": "8.0.2",
|
||||
"dockerode": "4.0.7",
|
||||
"dotenv": "17.2.1",
|
||||
"escape-html": "1.0.3",
|
||||
"execa": "9.6.0",
|
||||
"exit-hook": "4.0.0",
|
||||
"fastify": "5.4.0",
|
||||
"fastify": "5.5.0",
|
||||
"filenamify": "6.0.0",
|
||||
"fs-extra": "11.3.0",
|
||||
"fs-extra": "11.3.1",
|
||||
"glob": "11.0.3",
|
||||
"global-agent": "3.0.0",
|
||||
"got": "14.4.7",
|
||||
@@ -114,30 +116,31 @@
|
||||
"graphql-ws": "6.0.6",
|
||||
"ini": "5.0.0",
|
||||
"ip": "2.0.1",
|
||||
"jose": "6.0.12",
|
||||
"jose": "6.0.13",
|
||||
"json-bigint-patch": "0.0.8",
|
||||
"lodash-es": "4.17.21",
|
||||
"multi-ini": "2.3.2",
|
||||
"mustache": "4.2.0",
|
||||
"nest-authz": "2.17.0",
|
||||
"nest-commander": "3.18.0",
|
||||
"nest-commander": "3.19.0",
|
||||
"nestjs-pino": "4.4.0",
|
||||
"node-cache": "5.1.2",
|
||||
"node-window-polyfill": "1.0.4",
|
||||
"openid-client": "6.6.4",
|
||||
"p-retry": "6.2.1",
|
||||
"passport-custom": "1.1.1",
|
||||
"passport-http-header-strategy": "1.1.0",
|
||||
"path-type": "6.0.0",
|
||||
"pino": "9.7.0",
|
||||
"pino": "9.9.0",
|
||||
"pino-http": "10.5.0",
|
||||
"pino-pretty": "13.0.0",
|
||||
"pino-pretty": "13.1.1",
|
||||
"pm2": "6.0.8",
|
||||
"reflect-metadata": "^0.1.14",
|
||||
"request": "2.88.2",
|
||||
"rxjs": "7.8.2",
|
||||
"semver": "7.7.2",
|
||||
"strftime": "0.10.3",
|
||||
"systeminformation": "5.27.7",
|
||||
"systeminformation": "5.27.8",
|
||||
"undici": "7.15.0",
|
||||
"uuid": "11.1.0",
|
||||
"ws": "8.18.3",
|
||||
"zen-observable-ts": "1.1.0",
|
||||
@@ -152,7 +155,7 @@
|
||||
}
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/js": "9.32.0",
|
||||
"@eslint/js": "9.34.0",
|
||||
"@graphql-codegen/add": "5.0.3",
|
||||
"@graphql-codegen/cli": "5.0.7",
|
||||
"@graphql-codegen/fragment-matcher": "5.1.0",
|
||||
@@ -162,17 +165,17 @@
|
||||
"@graphql-codegen/typescript-operations": "4.6.1",
|
||||
"@graphql-codegen/typescript-resolvers": "4.5.1",
|
||||
"@graphql-typed-document-node/core": "3.2.0",
|
||||
"@ianvs/prettier-plugin-sort-imports": "4.5.1",
|
||||
"@nestjs/testing": "11.1.5",
|
||||
"@ianvs/prettier-plugin-sort-imports": "4.6.3",
|
||||
"@nestjs/testing": "11.1.6",
|
||||
"@originjs/vite-plugin-commonjs": "1.0.3",
|
||||
"@rollup/plugin-node-resolve": "16.0.1",
|
||||
"@swc/core": "1.13.2",
|
||||
"@swc/core": "1.13.5",
|
||||
"@types/async-exit-hook": "2.0.2",
|
||||
"@types/bytes": "3.1.5",
|
||||
"@types/cli-table": "0.3.4",
|
||||
"@types/command-exists": "1.2.3",
|
||||
"@types/cors": "2.8.19",
|
||||
"@types/dockerode": "3.3.42",
|
||||
"@types/dockerode": "3.3.43",
|
||||
"@types/graphql-fields": "1.3.9",
|
||||
"@types/graphql-type-uuid": "0.2.6",
|
||||
"@types/ini": "4.1.1",
|
||||
@@ -180,41 +183,37 @@
|
||||
"@types/lodash": "4.17.20",
|
||||
"@types/lodash-es": "4.17.12",
|
||||
"@types/mustache": "4.2.6",
|
||||
"@types/node": "22.16.5",
|
||||
"@types/node": "22.18.0",
|
||||
"@types/pify": "6.1.0",
|
||||
"@types/semver": "7.7.0",
|
||||
"@types/sendmail": "1.4.7",
|
||||
"@types/stoppable": "1.1.3",
|
||||
"@types/strftime": "0.9.8",
|
||||
"@types/supertest": "^6.0.3",
|
||||
"@types/supertest": "6.0.3",
|
||||
"@types/uuid": "10.0.0",
|
||||
"@types/ws": "8.18.1",
|
||||
"@types/wtfnode": "0.7.3",
|
||||
"@types/wtfnode": "0.10.0",
|
||||
"@vitest/coverage-v8": "3.2.4",
|
||||
"@vitest/ui": "3.2.4",
|
||||
"commit-and-tag-version": "9.6.0",
|
||||
"cz-conventional-changelog": "3.3.0",
|
||||
"eslint": "9.32.0",
|
||||
"eslint": "9.34.0",
|
||||
"eslint-plugin-import": "2.32.0",
|
||||
"eslint-plugin-n": "17.21.2",
|
||||
"eslint-plugin-no-relative-import-paths": "1.6.1",
|
||||
"eslint-plugin-prettier": "5.5.3",
|
||||
"graphql-codegen-typescript-validation-schema": "0.17.1",
|
||||
"eslint-plugin-prettier": "5.5.4",
|
||||
"jiti": "2.5.1",
|
||||
"nodemon": "3.1.10",
|
||||
"prettier": "3.6.2",
|
||||
"rollup-plugin-node-externals": "8.0.1",
|
||||
"supertest": "^7.1.4",
|
||||
"tsx": "4.20.3",
|
||||
"rollup-plugin-node-externals": "8.1.0",
|
||||
"supertest": "7.1.4",
|
||||
"tsx": "4.20.5",
|
||||
"type-fest": "4.41.0",
|
||||
"typescript": "5.8.3",
|
||||
"typescript-eslint": "8.38.0",
|
||||
"unplugin-swc": "1.5.5",
|
||||
"vite": "7.0.6",
|
||||
"typescript": "5.9.2",
|
||||
"typescript-eslint": "8.41.0",
|
||||
"unplugin-swc": "1.5.7",
|
||||
"vite": "7.1.3",
|
||||
"vite-plugin-node": "7.0.0",
|
||||
"vite-tsconfig-paths": "5.1.4",
|
||||
"vitest": "3.2.4",
|
||||
"zx": "8.7.1"
|
||||
"zx": "8.8.1"
|
||||
},
|
||||
"overrides": {
|
||||
"eslint": {
|
||||
@@ -229,5 +228,5 @@
|
||||
}
|
||||
},
|
||||
"private": true,
|
||||
"packageManager": "pnpm@10.13.1"
|
||||
"packageManager": "pnpm@10.15.0"
|
||||
}
|
||||
|
||||
@@ -95,6 +95,48 @@ test('Returns both disk and user shares', async () => {
|
||||
"type": "user",
|
||||
"used": 33619300,
|
||||
},
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "system data with periods",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"id": "system.with.periods",
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "system.with.periods",
|
||||
"nameOrig": "system.with.periods",
|
||||
"nfs": {},
|
||||
"size": 0,
|
||||
"smb": {},
|
||||
"splitLevel": "1",
|
||||
"type": "user",
|
||||
"used": 33619300,
|
||||
},
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "system data with 🚀",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"id": "system.with.🚀",
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "system.with.🚀",
|
||||
"nameOrig": "system.with.🚀",
|
||||
"nfs": {},
|
||||
"size": 0,
|
||||
"smb": {},
|
||||
"splitLevel": "1",
|
||||
"type": "user",
|
||||
"used": 33619300,
|
||||
},
|
||||
],
|
||||
}
|
||||
`);
|
||||
@@ -211,6 +253,48 @@ test('Returns shares by type', async () => {
|
||||
"type": "user",
|
||||
"used": 33619300,
|
||||
},
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "system data with periods",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"id": "system.with.periods",
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "system.with.periods",
|
||||
"nameOrig": "system.with.periods",
|
||||
"nfs": {},
|
||||
"size": 0,
|
||||
"smb": {},
|
||||
"splitLevel": "1",
|
||||
"type": "user",
|
||||
"used": 33619300,
|
||||
},
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "system data with 🚀",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"id": "system.with.🚀",
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "system.with.🚀",
|
||||
"nameOrig": "system.with.🚀",
|
||||
"nfs": {},
|
||||
"size": 0,
|
||||
"smb": {},
|
||||
"splitLevel": "1",
|
||||
"type": "user",
|
||||
"used": 33619300,
|
||||
},
|
||||
]
|
||||
`);
|
||||
expect(getShares('disk')).toMatchInlineSnapshot('null');
|
||||
|
||||
@@ -34,6 +34,15 @@ vi.mock('@app/store/index.js', () => ({
|
||||
}),
|
||||
},
|
||||
}));
|
||||
vi.mock('@app/environment.js', () => ({
|
||||
ENVIRONMENT: 'development',
|
||||
environment: {
|
||||
IS_MAIN_PROCESS: true,
|
||||
},
|
||||
}));
|
||||
vi.mock('@app/core/utils/files/file-exists.js', () => ({
|
||||
fileExists: vi.fn().mockResolvedValue(true),
|
||||
}));
|
||||
|
||||
// Mock NestJS Logger to suppress logs during tests
|
||||
vi.mock('@nestjs/common', async (importOriginal) => {
|
||||
@@ -63,13 +72,22 @@ describe('RCloneApiService', () => {
|
||||
const { execa } = await import('execa');
|
||||
const pRetry = await import('p-retry');
|
||||
const { existsSync } = await import('node:fs');
|
||||
const { fileExists } = await import('@app/core/utils/files/file-exists.js');
|
||||
|
||||
mockGot = vi.mocked(got);
|
||||
mockExeca = vi.mocked(execa);
|
||||
mockPRetry = vi.mocked(pRetry.default);
|
||||
mockExistsSync = vi.mocked(existsSync);
|
||||
|
||||
mockGot.post = vi.fn().mockResolvedValue({ body: {} });
|
||||
// Mock successful RClone API response for socket check
|
||||
mockGot.post = vi.fn().mockResolvedValue({ body: { pid: 12345 } });
|
||||
|
||||
// Mock RClone binary exists check
|
||||
vi.mocked(fileExists).mockResolvedValue(true);
|
||||
|
||||
// Mock socket exists
|
||||
mockExistsSync.mockReturnValue(true);
|
||||
|
||||
mockExeca.mockReturnValue({
|
||||
on: vi.fn(),
|
||||
kill: vi.fn(),
|
||||
@@ -77,10 +95,12 @@ describe('RCloneApiService', () => {
|
||||
pid: 12345,
|
||||
} as any);
|
||||
mockPRetry.mockResolvedValue(undefined);
|
||||
mockExistsSync.mockReturnValue(false);
|
||||
|
||||
service = new RCloneApiService();
|
||||
await service.onModuleInit();
|
||||
|
||||
// Reset the mock after initialization to prepare for test-specific responses
|
||||
mockGot.post.mockClear();
|
||||
});
|
||||
|
||||
describe('getProviders', () => {
|
||||
@@ -102,6 +122,9 @@ describe('RCloneApiService', () => {
|
||||
json: {},
|
||||
responseType: 'json',
|
||||
enableUnixSockets: true,
|
||||
headers: expect.objectContaining({
|
||||
Authorization: expect.stringMatching(/^Basic /),
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
@@ -129,6 +152,11 @@ describe('RCloneApiService', () => {
|
||||
'http://unix:/tmp/rclone.sock:/config/listremotes',
|
||||
expect.objectContaining({
|
||||
json: {},
|
||||
responseType: 'json',
|
||||
enableUnixSockets: true,
|
||||
headers: expect.objectContaining({
|
||||
Authorization: expect.stringMatching(/^Basic /),
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
@@ -155,6 +183,11 @@ describe('RCloneApiService', () => {
|
||||
'http://unix:/tmp/rclone.sock:/config/get',
|
||||
expect.objectContaining({
|
||||
json: { name: 'test-remote' },
|
||||
responseType: 'json',
|
||||
enableUnixSockets: true,
|
||||
headers: expect.objectContaining({
|
||||
Authorization: expect.stringMatching(/^Basic /),
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
@@ -193,6 +226,11 @@ describe('RCloneApiService', () => {
|
||||
type: 's3',
|
||||
parameters: { access_key_id: 'AKIA...', secret_access_key: 'secret' },
|
||||
},
|
||||
responseType: 'json',
|
||||
enableUnixSockets: true,
|
||||
headers: expect.objectContaining({
|
||||
Authorization: expect.stringMatching(/^Basic /),
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
@@ -217,6 +255,11 @@ describe('RCloneApiService', () => {
|
||||
name: 'existing-remote',
|
||||
access_key_id: 'NEW_AKIA...',
|
||||
},
|
||||
responseType: 'json',
|
||||
enableUnixSockets: true,
|
||||
headers: expect.objectContaining({
|
||||
Authorization: expect.stringMatching(/^Basic /),
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
@@ -235,6 +278,11 @@ describe('RCloneApiService', () => {
|
||||
'http://unix:/tmp/rclone.sock:/config/delete',
|
||||
expect.objectContaining({
|
||||
json: { name: 'remote-to-delete' },
|
||||
responseType: 'json',
|
||||
enableUnixSockets: true,
|
||||
headers: expect.objectContaining({
|
||||
Authorization: expect.stringMatching(/^Basic /),
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
@@ -261,6 +309,11 @@ describe('RCloneApiService', () => {
|
||||
dstFs: 'remote:backup/path',
|
||||
delete_on: 'dst',
|
||||
},
|
||||
responseType: 'json',
|
||||
enableUnixSockets: true,
|
||||
headers: expect.objectContaining({
|
||||
Authorization: expect.stringMatching(/^Basic /),
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
@@ -279,6 +332,11 @@ describe('RCloneApiService', () => {
|
||||
'http://unix:/tmp/rclone.sock:/job/status',
|
||||
expect.objectContaining({
|
||||
json: { jobid: 'job-123' },
|
||||
responseType: 'json',
|
||||
enableUnixSockets: true,
|
||||
headers: expect.objectContaining({
|
||||
Authorization: expect.stringMatching(/^Basic /),
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
@@ -299,6 +357,11 @@ describe('RCloneApiService', () => {
|
||||
'http://unix:/tmp/rclone.sock:/job/list',
|
||||
expect.objectContaining({
|
||||
json: {},
|
||||
responseType: 'json',
|
||||
enableUnixSockets: true,
|
||||
headers: expect.objectContaining({
|
||||
Authorization: expect.stringMatching(/^Basic /),
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
@@ -3,6 +3,7 @@ import '@app/__test__/setup/env-setup.js';
|
||||
import '@app/__test__/setup/keyserver-mock.js';
|
||||
import '@app/__test__/setup/config-setup.js';
|
||||
import '@app/__test__/setup/store-reset.js';
|
||||
import '@app/__test__/setup/api-json-backup.js';
|
||||
|
||||
// This file is automatically loaded by Vitest before running tests
|
||||
// It imports all the setup files that need to be run before tests
|
||||
|
||||
36
api/src/__test__/setup/api-json-backup.ts
Normal file
36
api/src/__test__/setup/api-json-backup.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
import { existsSync, readFileSync, writeFileSync } from 'fs';
|
||||
import { join, resolve } from 'path';
|
||||
|
||||
import { afterAll, beforeAll } from 'vitest';
|
||||
|
||||
// Get the project root directory
|
||||
const projectRoot = resolve(process.cwd());
|
||||
const apiJsonPath = join(projectRoot, 'dev/configs/api.json');
|
||||
const apiJsonBackupPath = join(projectRoot, 'dev/configs/api.json.backup');
|
||||
|
||||
let originalContent: string | null = null;
|
||||
|
||||
/**
|
||||
* Backs up api.json before tests run and restores it after tests complete.
|
||||
* This prevents tests from permanently modifying the development configuration.
|
||||
*/
|
||||
export function setupApiJsonBackup() {
|
||||
beforeAll(() => {
|
||||
// Save the original content if the file exists
|
||||
if (existsSync(apiJsonPath)) {
|
||||
originalContent = readFileSync(apiJsonPath, 'utf-8');
|
||||
// Create a backup file as well for safety
|
||||
writeFileSync(apiJsonBackupPath, originalContent, 'utf-8');
|
||||
}
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
// Restore the original content if we saved it
|
||||
if (originalContent !== null) {
|
||||
writeFileSync(apiJsonPath, originalContent, 'utf-8');
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Auto-run for all tests that import this module
|
||||
setupApiJsonBackup();
|
||||
@@ -1,5 +1,6 @@
|
||||
import { expect, test } from 'vitest';
|
||||
import { beforeEach, describe, expect, test, vi } from 'vitest';
|
||||
|
||||
import { parseConfig } from '@app/core/utils/misc/parse-config.js';
|
||||
import { store } from '@app/store/index.js';
|
||||
import { FileLoadStatus } from '@app/store/types.js';
|
||||
|
||||
@@ -446,6 +447,44 @@ test('After init returns values from cfg file for all fields', { timeout: 30000
|
||||
"splitLevel": "1",
|
||||
"used": 33619300,
|
||||
},
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cache": false,
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "system data with periods",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"id": "system.with.periods",
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "system.with.periods",
|
||||
"nameOrig": "system.with.periods",
|
||||
"size": 0,
|
||||
"splitLevel": "1",
|
||||
"used": 33619300,
|
||||
},
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cache": false,
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "system data with 🚀",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"id": "system.with.🚀",
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "system.with.🚀",
|
||||
"nameOrig": "system.with.🚀",
|
||||
"size": 0,
|
||||
"splitLevel": "1",
|
||||
"used": 33619300,
|
||||
},
|
||||
]
|
||||
`);
|
||||
expect(nfsShares).toMatchInlineSnapshot(`
|
||||
@@ -1110,3 +1149,209 @@ test('After init returns values from cfg file for all fields', { timeout: 30000
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
describe('Share parsing with periods in names', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
test('parseConfig handles periods in INI section names', () => {
|
||||
const mockIniContent = `
|
||||
["share.with.periods"]
|
||||
name=share.with.periods
|
||||
useCache=yes
|
||||
include=
|
||||
exclude=
|
||||
|
||||
[normal_share]
|
||||
name=normal_share
|
||||
useCache=no
|
||||
include=
|
||||
exclude=
|
||||
`;
|
||||
|
||||
const result = parseConfig<any>({
|
||||
file: mockIniContent,
|
||||
type: 'ini',
|
||||
});
|
||||
|
||||
// The result should now have properly flattened keys
|
||||
|
||||
expect(result).toHaveProperty('shareWithPeriods');
|
||||
expect(result).toHaveProperty('normalShare');
|
||||
expect(result.shareWithPeriods.name).toBe('share.with.periods');
|
||||
expect(result.normalShare.name).toBe('normal_share');
|
||||
});
|
||||
|
||||
test('shares parser handles periods in share names correctly', async () => {
|
||||
const { parse } = await import('@app/store/state-parsers/shares.js');
|
||||
|
||||
// The parser expects an object where values are share configs
|
||||
const mockSharesState = {
|
||||
shareWithPeriods: {
|
||||
name: 'share.with.periods',
|
||||
free: '1000000',
|
||||
used: '500000',
|
||||
size: '1500000',
|
||||
include: '',
|
||||
exclude: '',
|
||||
useCache: 'yes',
|
||||
},
|
||||
normalShare: {
|
||||
name: 'normal_share',
|
||||
free: '2000000',
|
||||
used: '750000',
|
||||
size: '2750000',
|
||||
include: '',
|
||||
exclude: '',
|
||||
useCache: 'no',
|
||||
},
|
||||
} as any;
|
||||
|
||||
const result = parse(mockSharesState);
|
||||
|
||||
expect(result).toHaveLength(2);
|
||||
const periodShare = result.find((s) => s.name === 'share.with.periods');
|
||||
const normalShare = result.find((s) => s.name === 'normal_share');
|
||||
|
||||
expect(periodShare).toBeDefined();
|
||||
expect(periodShare?.id).toBe('share.with.periods');
|
||||
expect(periodShare?.name).toBe('share.with.periods');
|
||||
expect(periodShare?.cache).toBe(true);
|
||||
|
||||
expect(normalShare).toBeDefined();
|
||||
expect(normalShare?.id).toBe('normal_share');
|
||||
expect(normalShare?.name).toBe('normal_share');
|
||||
expect(normalShare?.cache).toBe(false);
|
||||
});
|
||||
|
||||
test('SMB parser handles periods in share names', async () => {
|
||||
const { parse } = await import('@app/store/state-parsers/smb.js');
|
||||
|
||||
const mockSmbState = {
|
||||
'share.with.periods': {
|
||||
export: 'e',
|
||||
security: 'public',
|
||||
writeList: '',
|
||||
readList: '',
|
||||
volsizelimit: '0',
|
||||
},
|
||||
normal_share: {
|
||||
export: 'e',
|
||||
security: 'private',
|
||||
writeList: 'user1,user2',
|
||||
readList: '',
|
||||
volsizelimit: '1000',
|
||||
},
|
||||
} as any;
|
||||
|
||||
const result = parse(mockSmbState);
|
||||
|
||||
expect(result).toHaveLength(2);
|
||||
const periodShare = result.find((s) => s.name === 'share.with.periods');
|
||||
const normalShare = result.find((s) => s.name === 'normal_share');
|
||||
|
||||
expect(periodShare).toBeDefined();
|
||||
expect(periodShare?.name).toBe('share.with.periods');
|
||||
expect(periodShare?.enabled).toBe(true);
|
||||
|
||||
expect(normalShare).toBeDefined();
|
||||
expect(normalShare?.name).toBe('normal_share');
|
||||
expect(normalShare?.writeList).toEqual(['user1', 'user2']);
|
||||
});
|
||||
|
||||
test('NFS parser handles periods in share names', async () => {
|
||||
const { parse } = await import('@app/store/state-parsers/nfs.js');
|
||||
|
||||
const mockNfsState = {
|
||||
'share.with.periods': {
|
||||
export: 'e',
|
||||
security: 'public',
|
||||
writeList: '',
|
||||
readList: 'user1',
|
||||
hostList: '',
|
||||
},
|
||||
normal_share: {
|
||||
export: 'd',
|
||||
security: 'private',
|
||||
writeList: 'user2',
|
||||
readList: '',
|
||||
hostList: '192.168.1.0/24',
|
||||
},
|
||||
} as any;
|
||||
|
||||
const result = parse(mockNfsState);
|
||||
|
||||
expect(result).toHaveLength(2);
|
||||
const periodShare = result.find((s) => s.name === 'share.with.periods');
|
||||
const normalShare = result.find((s) => s.name === 'normal_share');
|
||||
|
||||
expect(periodShare).toBeDefined();
|
||||
expect(periodShare?.name).toBe('share.with.periods');
|
||||
expect(periodShare?.enabled).toBe(true);
|
||||
expect(periodShare?.readList).toEqual(['user1']);
|
||||
|
||||
expect(normalShare).toBeDefined();
|
||||
expect(normalShare?.name).toBe('normal_share');
|
||||
expect(normalShare?.enabled).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Share lookup with periods in names', () => {
|
||||
test('getShares finds user shares with periods in names', async () => {
|
||||
// Mock the store state
|
||||
const mockStore = await import('@app/store/index.js');
|
||||
const mockEmhttpState = {
|
||||
shares: [
|
||||
{
|
||||
id: 'share.with.periods',
|
||||
name: 'share.with.periods',
|
||||
cache: true,
|
||||
free: 1000000,
|
||||
used: 500000,
|
||||
size: 1500000,
|
||||
include: [],
|
||||
exclude: [],
|
||||
},
|
||||
{
|
||||
id: 'normal_share',
|
||||
name: 'normal_share',
|
||||
cache: false,
|
||||
free: 2000000,
|
||||
used: 750000,
|
||||
size: 2750000,
|
||||
include: [],
|
||||
exclude: [],
|
||||
},
|
||||
],
|
||||
smbShares: [
|
||||
{ name: 'share.with.periods', enabled: true, security: 'public' },
|
||||
{ name: 'normal_share', enabled: true, security: 'private' },
|
||||
],
|
||||
nfsShares: [
|
||||
{ name: 'share.with.periods', enabled: false },
|
||||
{ name: 'normal_share', enabled: true },
|
||||
],
|
||||
disks: [],
|
||||
};
|
||||
|
||||
const gettersSpy = vi.spyOn(mockStore, 'getters', 'get').mockReturnValue({
|
||||
emhttp: () => mockEmhttpState,
|
||||
} as any);
|
||||
|
||||
const { getShares } = await import('@app/core/utils/shares/get-shares.js');
|
||||
|
||||
const periodShare = getShares('user', { name: 'share.with.periods' });
|
||||
const normalShare = getShares('user', { name: 'normal_share' });
|
||||
|
||||
expect(periodShare).not.toBeNull();
|
||||
expect(periodShare?.name).toBe('share.with.periods');
|
||||
expect(periodShare?.type).toBe('user');
|
||||
|
||||
expect(normalShare).not.toBeNull();
|
||||
expect(normalShare?.name).toBe('normal_share');
|
||||
expect(normalShare?.type).toBe('user');
|
||||
|
||||
gettersSpy.mockRestore();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -92,6 +92,44 @@ test('Returns parsed state file', async () => {
|
||||
"splitLevel": "1",
|
||||
"used": 33619300,
|
||||
},
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cache": false,
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "system data with periods",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"id": "system.with.periods",
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "system.with.periods",
|
||||
"nameOrig": "system.with.periods",
|
||||
"size": 0,
|
||||
"splitLevel": "1",
|
||||
"used": 33619300,
|
||||
},
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cache": false,
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "system data with 🚀",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"id": "system.with.🚀",
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "system.with.🚀",
|
||||
"nameOrig": "system.with.🚀",
|
||||
"size": 0,
|
||||
"splitLevel": "1",
|
||||
"used": 33619300,
|
||||
},
|
||||
]
|
||||
`);
|
||||
});
|
||||
|
||||
@@ -1,29 +1,37 @@
|
||||
import '@app/dotenv.js';
|
||||
|
||||
import { execa } from 'execa';
|
||||
import { Logger } from '@nestjs/common';
|
||||
|
||||
import { CommandFactory } from 'nest-commander';
|
||||
|
||||
import { internalLogger, logger } from '@app/core/log.js';
|
||||
import { LOG_LEVEL } from '@app/environment.js';
|
||||
import { CliModule } from '@app/unraid-api/cli/cli.module.js';
|
||||
import { LOG_LEVEL, SUPPRESS_LOGS } from '@app/environment.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
|
||||
const getUnraidApiLocation = async () => {
|
||||
const { execa } = await import('execa');
|
||||
try {
|
||||
const shellToUse = await execa('which unraid-api');
|
||||
return shellToUse.stdout.trim();
|
||||
} catch (err) {
|
||||
logger.debug('Could not find unraid-api in PATH, using default location');
|
||||
|
||||
return '/usr/bin/unraid-api';
|
||||
}
|
||||
};
|
||||
|
||||
const getLogger = () => {
|
||||
if (LOG_LEVEL === 'TRACE' && !SUPPRESS_LOGS) {
|
||||
return new LogService();
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
const logger = getLogger();
|
||||
try {
|
||||
await import('json-bigint-patch');
|
||||
const { CliModule } = await import('@app/unraid-api/cli/cli.module.js');
|
||||
|
||||
await CommandFactory.run(CliModule, {
|
||||
cliName: 'unraid-api',
|
||||
logger: LOG_LEVEL === 'TRACE' ? new LogService() : false, // - enable this to see nest initialization issues
|
||||
logger: logger, // - enable this to see nest initialization issues
|
||||
completion: {
|
||||
fig: false,
|
||||
cmd: 'completion-script',
|
||||
@@ -32,10 +40,8 @@ try {
|
||||
});
|
||||
process.exit(0);
|
||||
} catch (error) {
|
||||
logger.error('ERROR:', error);
|
||||
internalLogger.error({
|
||||
message: 'Failed to start unraid-api',
|
||||
error,
|
||||
});
|
||||
if (logger) {
|
||||
logger.error('ERROR:', error);
|
||||
}
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { pino } from 'pino';
|
||||
import pino from 'pino';
|
||||
import pretty from 'pino-pretty';
|
||||
|
||||
import { API_VERSION, LOG_LEVEL, LOG_TYPE } from '@app/environment.js';
|
||||
import { API_VERSION, LOG_LEVEL, LOG_TYPE, PATHS_LOGS_FILE, SUPPRESS_LOGS } from '@app/environment.js';
|
||||
|
||||
export const levels = ['trace', 'debug', 'info', 'warn', 'error', 'fatal'] as const;
|
||||
|
||||
@@ -9,18 +9,46 @@ export type LogLevel = (typeof levels)[number];
|
||||
|
||||
const level = levels[levels.indexOf(LOG_LEVEL.toLowerCase() as LogLevel)] ?? 'info';
|
||||
|
||||
export const logDestination = pino.destination();
|
||||
const nullDestination = pino.destination({
|
||||
write() {
|
||||
// Suppress all logs
|
||||
},
|
||||
});
|
||||
|
||||
const stream =
|
||||
LOG_TYPE === 'pretty'
|
||||
? pretty({
|
||||
singleLine: true,
|
||||
hideObject: false,
|
||||
colorize: true,
|
||||
ignore: 'hostname,pid',
|
||||
destination: logDestination,
|
||||
})
|
||||
: logDestination;
|
||||
export const logDestination =
|
||||
process.env.SUPPRESS_LOGS === 'true' ? nullDestination : pino.destination();
|
||||
const localFileDestination = pino.destination({
|
||||
dest: PATHS_LOGS_FILE,
|
||||
sync: true,
|
||||
});
|
||||
|
||||
const stream = SUPPRESS_LOGS
|
||||
? nullDestination
|
||||
: LOG_TYPE === 'pretty'
|
||||
? pretty({
|
||||
singleLine: true,
|
||||
hideObject: false,
|
||||
colorize: true,
|
||||
colorizeObjects: true,
|
||||
levelFirst: false,
|
||||
ignore: 'hostname,pid',
|
||||
destination: logDestination,
|
||||
translateTime: 'HH:mm:ss',
|
||||
customPrettifiers: {
|
||||
time: (timestamp: string | object) => `[${timestamp}`,
|
||||
level: (logLevel: string | object, key: string, log: any, extras: any) => {
|
||||
// Use labelColorized which preserves the colors
|
||||
const { labelColorized } = extras;
|
||||
const context = log.context || log.logger || 'app';
|
||||
return `${labelColorized} ${context}]`;
|
||||
},
|
||||
},
|
||||
messageFormat: (log: any, messageKey: string) => {
|
||||
const msg = log[messageKey] || log.msg || '';
|
||||
return msg;
|
||||
},
|
||||
})
|
||||
: logDestination;
|
||||
|
||||
export const logger = pino(
|
||||
{
|
||||
@@ -70,6 +98,7 @@ export const keyServerLogger = logger.child({ logger: 'key-server' });
|
||||
export const remoteAccessLogger = logger.child({ logger: 'remote-access' });
|
||||
export const remoteQueryLogger = logger.child({ logger: 'remote-query' });
|
||||
export const apiLogger = logger.child({ logger: 'api' });
|
||||
export const pluginLogger = logger.child({ logger: 'plugin', stream: localFileDestination });
|
||||
|
||||
export const loggers = [
|
||||
internalLogger,
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { GraphQLError } from 'graphql';
|
||||
import { sum } from 'lodash-es';
|
||||
|
||||
import { getParityCheckStatus } from '@app/core/modules/array/parity-check-status.js';
|
||||
import { store } from '@app/store/index.js';
|
||||
import { FileLoadStatus } from '@app/store/types.js';
|
||||
import {
|
||||
@@ -61,5 +62,6 @@ export const getArrayData = (getState = store.getState): UnraidArray => {
|
||||
parities,
|
||||
disks,
|
||||
caches,
|
||||
parityCheckStatus: getParityCheckStatus(emhttp.var),
|
||||
};
|
||||
};
|
||||
|
||||
1080
api/src/core/modules/array/parity-check-status.test.ts
Normal file
1080
api/src/core/modules/array/parity-check-status.test.ts
Normal file
File diff suppressed because it is too large
Load Diff
72
api/src/core/modules/array/parity-check-status.ts
Normal file
72
api/src/core/modules/array/parity-check-status.ts
Normal file
@@ -0,0 +1,72 @@
|
||||
import { toNumberAlways } from '@unraid/shared/util/data.js';
|
||||
|
||||
import type { Var } from '@app/core/types/states/var.js';
|
||||
import type { ParityCheck } from '@app/unraid-api/graph/resolvers/array/parity.model.js';
|
||||
|
||||
export enum ParityCheckStatus {
|
||||
NEVER_RUN = 'never_run',
|
||||
RUNNING = 'running',
|
||||
PAUSED = 'paused',
|
||||
COMPLETED = 'completed',
|
||||
CANCELLED = 'cancelled',
|
||||
FAILED = 'failed',
|
||||
}
|
||||
|
||||
function calculateParitySpeed(deltaTime: number, deltaBlocks: number) {
|
||||
if (deltaTime === 0 || deltaBlocks === 0) return 0;
|
||||
const deltaBytes = deltaBlocks * 1024;
|
||||
const speedMBps = deltaBytes / deltaTime / 1024 / 1024;
|
||||
return Math.round(speedMBps);
|
||||
}
|
||||
|
||||
type RelevantVarData = Pick<
|
||||
Var,
|
||||
| 'mdResyncPos'
|
||||
| 'mdResyncDt'
|
||||
| 'sbSyncExit'
|
||||
| 'sbSynced'
|
||||
| 'sbSynced2'
|
||||
| 'mdResyncDb'
|
||||
| 'mdResyncSize'
|
||||
>;
|
||||
|
||||
function getStatusFromVarData(varData: RelevantVarData): ParityCheckStatus {
|
||||
const { mdResyncPos, mdResyncDt, sbSyncExit, sbSynced, sbSynced2 } = varData;
|
||||
const mdResyncDtNumber = toNumberAlways(mdResyncDt, 0);
|
||||
const sbSyncExitNumber = toNumberAlways(sbSyncExit, 0);
|
||||
|
||||
switch (true) {
|
||||
case mdResyncPos > 0:
|
||||
return mdResyncDtNumber > 0 ? ParityCheckStatus.RUNNING : ParityCheckStatus.PAUSED;
|
||||
case sbSynced === 0:
|
||||
return ParityCheckStatus.NEVER_RUN;
|
||||
case sbSyncExitNumber === -4:
|
||||
return ParityCheckStatus.CANCELLED;
|
||||
case sbSyncExitNumber !== 0:
|
||||
return ParityCheckStatus.FAILED;
|
||||
case sbSynced2 > 0:
|
||||
return ParityCheckStatus.COMPLETED;
|
||||
default:
|
||||
return ParityCheckStatus.NEVER_RUN;
|
||||
}
|
||||
}
|
||||
|
||||
export function getParityCheckStatus(varData: RelevantVarData): ParityCheck {
|
||||
const { sbSynced, sbSynced2, mdResyncDt, mdResyncDb, mdResyncPos, mdResyncSize } = varData;
|
||||
const deltaTime = toNumberAlways(mdResyncDt, 0);
|
||||
const deltaBlocks = toNumberAlways(mdResyncDb, 0);
|
||||
|
||||
// seconds since epoch (unix timestamp)
|
||||
const now = sbSynced2 > 0 ? sbSynced2 : Date.now() / 1000;
|
||||
return {
|
||||
status: getStatusFromVarData(varData),
|
||||
speed: String(calculateParitySpeed(deltaTime, deltaBlocks)),
|
||||
date: sbSynced > 0 ? new Date(sbSynced * 1000) : undefined,
|
||||
duration: sbSynced > 0 ? Math.round(now - sbSynced) : undefined,
|
||||
// percentage as integer, clamped to [0, 100]
|
||||
progress:
|
||||
mdResyncSize <= 0
|
||||
? 0
|
||||
: Math.round(Math.min(100, Math.max(0, (mdResyncPos / mdResyncSize) * 100))),
|
||||
};
|
||||
}
|
||||
@@ -8,7 +8,7 @@ export class NginxManager {
|
||||
await execa('/etc/rc.d/rc.nginx', ['reload']);
|
||||
return true;
|
||||
} catch (err: unknown) {
|
||||
logger.warn('Failed to restart Nginx with error: ', err);
|
||||
logger.warn('Failed to restart Nginx with error: %o', err as object);
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
@@ -8,7 +8,7 @@ export class UpdateDNSManager {
|
||||
await execa('/usr/bin/php', ['/usr/local/emhttp/plugins/dynamix/include/UpdateDNS.php']);
|
||||
return true;
|
||||
} catch (err: unknown) {
|
||||
logger.warn('Failed to call Update DNS with error: ', err);
|
||||
logger.warn('Failed to call Update DNS with error: %o', err as object);
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
@@ -13,8 +13,11 @@ export const pubsub = new PubSub({ eventEmitter });
|
||||
|
||||
/**
|
||||
* Create a pubsub subscription.
|
||||
* @param channel The pubsub channel to subscribe to.
|
||||
* @param channel The pubsub channel to subscribe to. Can be either a predefined GRAPHQL_PUBSUB_CHANNEL
|
||||
* or a dynamic string for runtime-generated topics (e.g., log file paths like "LOG_FILE:/var/log/test.log")
|
||||
*/
|
||||
export const createSubscription = (channel: GRAPHQL_PUBSUB_CHANNEL) => {
|
||||
return pubsub.asyncIterableIterator(channel);
|
||||
export const createSubscription = <T = any>(
|
||||
channel: GRAPHQL_PUBSUB_CHANNEL | string
|
||||
): AsyncIterableIterator<T> => {
|
||||
return pubsub.asyncIterableIterator<T>(channel);
|
||||
};
|
||||
|
||||
@@ -68,11 +68,24 @@ export type Var = {
|
||||
mdNumStripes: number;
|
||||
mdNumStripesDefault: number;
|
||||
mdNumStripesStatus: string;
|
||||
/**
|
||||
* Serves a dual purpose depending on context:
|
||||
* - Total size of the operation (in sectors/blocks)
|
||||
* - Running state indicator (0 = paused, >0 = running)
|
||||
*/
|
||||
mdResync: number;
|
||||
mdResyncAction: string;
|
||||
mdResyncCorr: string;
|
||||
mdResyncDb: string;
|
||||
/** Average time interval (delta time) in seconds of current parity operations */
|
||||
mdResyncDt: string;
|
||||
/**
|
||||
* Current position in the parity operation (in sectors/blocks).
|
||||
* When mdResyncPos > 0, a parity operation is active.
|
||||
* When mdResyncPos = 0, no parity operation is running.
|
||||
*
|
||||
* Used to calculate progress percentage.
|
||||
*/
|
||||
mdResyncPos: number;
|
||||
mdResyncSize: number;
|
||||
mdState: ArrayState;
|
||||
@@ -136,9 +149,36 @@ export type Var = {
|
||||
sbName: string;
|
||||
sbNumDisks: number;
|
||||
sbState: string;
|
||||
/**
|
||||
* Unix timestamp when parity operation started.
|
||||
* When sbSynced = 0, indicates no parity check has ever been run.
|
||||
*
|
||||
* Used to calculate elapsed time during active operations.
|
||||
*/
|
||||
sbSynced: number;
|
||||
sbSynced2: number;
|
||||
/**
|
||||
* Unix timestamp when parity operation completed (successfully or with errors).
|
||||
* Used to display completion time in status messages.
|
||||
*
|
||||
* When sbSynced2 = 0, indicates operation started but not yet finished
|
||||
*/
|
||||
sbSyncErrs: number;
|
||||
/**
|
||||
* Exit status code that indicates how the last parity operation completed, following standard Unix conventions.
|
||||
*
|
||||
* sbSyncExit = 0 - Successful Completion
|
||||
* - Parity operation completed normally without errors
|
||||
* - Used to calculate speed and display success message
|
||||
*
|
||||
* sbSyncExit = -4 - Aborted/Cancelled
|
||||
* - Operation was manually cancelled by user
|
||||
* - Displays as "aborted" in the UI
|
||||
*
|
||||
* sbSyncExit ≠ 0 (other values) - Failed/Incomplete
|
||||
* - Operation failed due to errors or other issues
|
||||
* - Displays the numeric error code
|
||||
*/
|
||||
sbSyncExit: string;
|
||||
sbUpdated: string;
|
||||
sbVersion: string;
|
||||
|
||||
@@ -26,7 +26,7 @@ export const loadState = <T extends Record<string, unknown>>(filePath: string):
|
||||
logger.trace(
|
||||
'Failed loading state file "%s" with "%s"',
|
||||
filePath,
|
||||
error instanceof Error ? error.message : error
|
||||
error instanceof Error ? error.message : String(error)
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -23,6 +23,54 @@ type OptionsWithLoadedFile = {
|
||||
type: ConfigType;
|
||||
};
|
||||
|
||||
/**
|
||||
* Flattens nested objects that were incorrectly created by periods in INI section names.
|
||||
* For example: { system: { with: { periods: {...} } } } -> { "system.with.periods": {...} }
|
||||
*/
|
||||
const flattenPeriodSections = (obj: Record<string, any>, prefix = ''): Record<string, any> => {
|
||||
const result: Record<string, any> = {};
|
||||
const isNestedObject = (value: unknown) =>
|
||||
Boolean(value && typeof value === 'object' && !Array.isArray(value));
|
||||
// prevent prototype pollution/injection
|
||||
const isUnsafeKey = (k: string) => k === '__proto__' || k === 'prototype' || k === 'constructor';
|
||||
|
||||
for (const [key, value] of Object.entries(obj)) {
|
||||
if (isUnsafeKey(key)) continue;
|
||||
const fullKey = prefix ? `${prefix}.${key}` : key;
|
||||
|
||||
if (!isNestedObject(value)) {
|
||||
result[fullKey] = value;
|
||||
continue;
|
||||
}
|
||||
|
||||
const section = {};
|
||||
const nestedObjs = {};
|
||||
let hasSectionProps = false;
|
||||
|
||||
for (const [propKey, propValue] of Object.entries(value)) {
|
||||
if (isUnsafeKey(propKey)) continue;
|
||||
if (isNestedObject(propValue)) {
|
||||
nestedObjs[propKey] = propValue;
|
||||
} else {
|
||||
section[propKey] = propValue;
|
||||
hasSectionProps = true;
|
||||
}
|
||||
}
|
||||
|
||||
// Process direct properties first to maintain order
|
||||
if (hasSectionProps) {
|
||||
result[fullKey] = section;
|
||||
}
|
||||
|
||||
// Then process nested objects
|
||||
if (Object.keys(nestedObjs).length > 0) {
|
||||
Object.assign(result, flattenPeriodSections(nestedObjs, fullKey));
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* Converts the following
|
||||
* ```
|
||||
@@ -127,6 +175,8 @@ export const parseConfig = <T extends Record<string, any>>(
|
||||
let data: Record<string, any>;
|
||||
try {
|
||||
data = parseIni(fileContents);
|
||||
// Fix nested objects created by periods in section names
|
||||
data = flattenPeriodSections(data);
|
||||
} catch (error) {
|
||||
throw new AppError(
|
||||
`Failed to parse config file: ${error instanceof Error ? error.message : String(error)}`
|
||||
|
||||
17
api/src/core/utils/validation/enum-validator.ts
Normal file
17
api/src/core/utils/validation/enum-validator.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
export function isValidEnumValue<T extends Record<string, string | number>>(
|
||||
value: unknown,
|
||||
enumObject: T
|
||||
): value is T[keyof T] {
|
||||
if (value == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return Object.values(enumObject).includes(value as T[keyof T]);
|
||||
}
|
||||
|
||||
export function validateEnumValue<T extends Record<string, string | number>>(
|
||||
value: unknown,
|
||||
enumObject: T
|
||||
): T[keyof T] | undefined {
|
||||
return isValidEnumValue(value, enumObject) ? (value as T[keyof T]) : undefined;
|
||||
}
|
||||
@@ -13,7 +13,7 @@ const isGuiMode = async (): Promise<boolean> => {
|
||||
// exitCode 0 means process was found, 1 means not found
|
||||
return exitCode === 0;
|
||||
} catch (error) {
|
||||
internalLogger.error('Error checking GUI mode: %s', error);
|
||||
internalLogger.error('Error checking GUI mode: %o', error as object);
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
437
api/src/core/utils/validation/validation-processor.test.ts
Normal file
437
api/src/core/utils/validation/validation-processor.test.ts
Normal file
@@ -0,0 +1,437 @@
|
||||
import { describe, expect, it } from 'vitest';
|
||||
|
||||
import type { ValidationResult } from '@app/core/utils/validation/validation-processor.js';
|
||||
import {
|
||||
createValidationProcessor,
|
||||
ResultInterpreters,
|
||||
} from '@app/core/utils/validation/validation-processor.js';
|
||||
|
||||
describe('ValidationProcessor', () => {
|
||||
type TestInput = { value: number; text: string };
|
||||
|
||||
it('should process all validation steps when no errors occur', () => {
|
||||
const steps = [
|
||||
{
|
||||
name: 'positiveValue',
|
||||
validator: (input: TestInput) => input.value > 0,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
{
|
||||
name: 'nonEmptyText',
|
||||
validator: (input: TestInput) => input.text.length > 0,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
] as const;
|
||||
|
||||
const processor = createValidationProcessor({
|
||||
steps,
|
||||
});
|
||||
|
||||
const result = processor({ value: 5, text: 'hello' }, { failFast: false });
|
||||
|
||||
expect(result.isValid).toBe(true);
|
||||
expect(result.errors).toEqual({});
|
||||
});
|
||||
|
||||
it('should collect all errors when failFast is disabled', () => {
|
||||
const steps = [
|
||||
{
|
||||
name: 'positiveValue',
|
||||
validator: (input: TestInput) => input.value > 0,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
{
|
||||
name: 'nonEmptyText',
|
||||
validator: (input: TestInput) => input.text.length > 0,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
] as const;
|
||||
|
||||
const processor = createValidationProcessor({
|
||||
steps,
|
||||
});
|
||||
|
||||
const result = processor({ value: -1, text: '' }, { failFast: false });
|
||||
|
||||
expect(result.isValid).toBe(false);
|
||||
expect(result.errors.positiveValue).toBe(false);
|
||||
expect(result.errors.nonEmptyText).toBe(false);
|
||||
});
|
||||
|
||||
it('should stop at first error when failFast is enabled', () => {
|
||||
const steps = [
|
||||
{
|
||||
name: 'positiveValue',
|
||||
validator: (input: TestInput) => input.value > 0,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
{
|
||||
name: 'nonEmptyText',
|
||||
validator: (input: TestInput) => input.text.length > 0,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
] as const;
|
||||
|
||||
const processor = createValidationProcessor({
|
||||
steps,
|
||||
});
|
||||
|
||||
const result = processor({ value: -1, text: '' }, { failFast: true });
|
||||
|
||||
expect(result.isValid).toBe(false);
|
||||
expect(result.errors.positiveValue).toBe(false);
|
||||
expect(result.errors.nonEmptyText).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should always fail fast on steps marked with alwaysFailFast', () => {
|
||||
const steps = [
|
||||
{
|
||||
name: 'criticalCheck',
|
||||
validator: (input: TestInput) => input.value !== 0,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
alwaysFailFast: true,
|
||||
},
|
||||
{
|
||||
name: 'nonEmptyText',
|
||||
validator: (input: TestInput) => input.text.length > 0,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
] as const;
|
||||
|
||||
const processor = createValidationProcessor({
|
||||
steps,
|
||||
});
|
||||
|
||||
const result = processor({ value: 0, text: '' }, { failFast: false });
|
||||
|
||||
expect(result.isValid).toBe(false);
|
||||
expect(result.errors.criticalCheck).toBe(false);
|
||||
expect(result.errors.nonEmptyText).toBeUndefined(); // Should not be executed
|
||||
});
|
||||
|
||||
it('should work with different result interpreters', () => {
|
||||
const steps = [
|
||||
{
|
||||
name: 'arrayResult',
|
||||
validator: (input: TestInput) => [1, 2, 3],
|
||||
isError: ResultInterpreters.errorList,
|
||||
},
|
||||
{
|
||||
name: 'nullableResult',
|
||||
validator: (input: TestInput) => (input.value > 0 ? null : 'error'),
|
||||
isError: ResultInterpreters.nullableIsSuccess,
|
||||
},
|
||||
] as const;
|
||||
|
||||
const processor = createValidationProcessor({
|
||||
steps,
|
||||
});
|
||||
|
||||
const result = processor({ value: -1, text: 'test' }, { failFast: false });
|
||||
|
||||
expect(result.isValid).toBe(false);
|
||||
expect(result.errors.arrayResult).toEqual([1, 2, 3]);
|
||||
expect(result.errors.nullableResult).toBe('error');
|
||||
});
|
||||
|
||||
it('should handle 0-arity validators', () => {
|
||||
const processor = createValidationProcessor({
|
||||
steps: [
|
||||
{
|
||||
name: 'zeroArityValidator',
|
||||
validator: () => true,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
{
|
||||
name: 'zeroArityValidator2',
|
||||
validator: () => false,
|
||||
isError: ResultInterpreters.booleanMeansFailure,
|
||||
},
|
||||
] as const,
|
||||
});
|
||||
|
||||
const result = processor(null);
|
||||
expect(result.isValid).toBe(true);
|
||||
});
|
||||
|
||||
it('should work with custom result interpreter', () => {
|
||||
const steps = [
|
||||
{
|
||||
name: 'customCheck',
|
||||
validator: (input: TestInput) => ({ isOk: input.value > 0, code: 'VALUE_CHECK' }),
|
||||
isError: ResultInterpreters.custom((result: { isOk: boolean }) => !result.isOk),
|
||||
},
|
||||
] as const;
|
||||
|
||||
const processor = createValidationProcessor({ steps });
|
||||
|
||||
const validResult = processor({ value: 5, text: 'test' });
|
||||
expect(validResult.isValid).toBe(true);
|
||||
expect(validResult.errors).toEqual({});
|
||||
|
||||
const invalidResult = processor({ value: -1, text: 'test' });
|
||||
expect(invalidResult.isValid).toBe(false);
|
||||
expect(invalidResult.errors.customCheck).toEqual({ isOk: false, code: 'VALUE_CHECK' });
|
||||
});
|
||||
|
||||
it('should work with validationProcessor result interpreter', () => {
|
||||
const innerProcessor = createValidationProcessor({
|
||||
steps: [
|
||||
{
|
||||
name: 'innerCheck',
|
||||
validator: (val: number) => val > 0,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
] as const,
|
||||
});
|
||||
|
||||
const outerProcessor = createValidationProcessor({
|
||||
steps: [
|
||||
{
|
||||
name: 'nestedValidation',
|
||||
validator: (input: TestInput) => innerProcessor(input.value),
|
||||
isError: ResultInterpreters.validationProcessor,
|
||||
},
|
||||
] as const,
|
||||
});
|
||||
|
||||
const validResult = outerProcessor({ value: 5, text: 'test' });
|
||||
expect(validResult.isValid).toBe(true);
|
||||
|
||||
const invalidResult = outerProcessor({ value: -1, text: 'test' });
|
||||
expect(invalidResult.isValid).toBe(false);
|
||||
expect(invalidResult.errors.nestedValidation).toMatchObject({ isValid: false });
|
||||
});
|
||||
|
||||
it('should handle empty steps array', () => {
|
||||
const processor = createValidationProcessor<readonly []>({
|
||||
steps: [],
|
||||
});
|
||||
|
||||
const result = processor('any input' as never);
|
||||
expect(result.isValid).toBe(true);
|
||||
expect(result.errors).toEqual({});
|
||||
});
|
||||
|
||||
it('should throw when validators throw errors', () => {
|
||||
const steps = [
|
||||
{
|
||||
name: 'throwingValidator',
|
||||
validator: (input: TestInput) => {
|
||||
if (input.value === 0) {
|
||||
throw new Error('Division by zero');
|
||||
}
|
||||
return true;
|
||||
},
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
] as const;
|
||||
|
||||
const processor = createValidationProcessor({ steps });
|
||||
|
||||
expect(() => processor({ value: 0, text: 'test' })).toThrow('Division by zero');
|
||||
});
|
||||
|
||||
describe('complex validation scenarios', () => {
|
||||
it('should handle multi-type validation results', () => {
|
||||
type ComplexInput = {
|
||||
email: string;
|
||||
age: number;
|
||||
tags: string[];
|
||||
};
|
||||
|
||||
const steps = [
|
||||
{
|
||||
name: 'emailFormat',
|
||||
validator: (input: ComplexInput) =>
|
||||
/\S+@\S+\.\S+/.test(input.email) ? null : 'Invalid email format',
|
||||
isError: ResultInterpreters.nullableIsSuccess,
|
||||
},
|
||||
{
|
||||
name: 'ageRange',
|
||||
validator: (input: ComplexInput) => input.age >= 18 && input.age <= 120,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
{
|
||||
name: 'tagValidation',
|
||||
validator: (input: ComplexInput) => {
|
||||
const invalidTags = input.tags.filter((tag) => tag.length < 2);
|
||||
return invalidTags;
|
||||
},
|
||||
isError: ResultInterpreters.errorList,
|
||||
},
|
||||
] as const;
|
||||
|
||||
const processor = createValidationProcessor({ steps });
|
||||
|
||||
const validInput: ComplexInput = {
|
||||
email: 'user@example.com',
|
||||
age: 25,
|
||||
tags: ['valid', 'tags', 'here'],
|
||||
};
|
||||
const validResult = processor(validInput);
|
||||
expect(validResult.isValid).toBe(true);
|
||||
|
||||
const invalidInput: ComplexInput = {
|
||||
email: 'invalid-email',
|
||||
age: 150,
|
||||
tags: ['ok', 'a', 'b', 'valid'],
|
||||
};
|
||||
const invalidResult = processor(invalidInput, { failFast: false });
|
||||
expect(invalidResult.isValid).toBe(false);
|
||||
expect(invalidResult.errors.emailFormat).toBe('Invalid email format');
|
||||
expect(invalidResult.errors.ageRange).toBe(false);
|
||||
expect(invalidResult.errors.tagValidation).toEqual(['a', 'b']);
|
||||
});
|
||||
|
||||
it('should preserve type safety with heterogeneous result types', () => {
|
||||
const steps = [
|
||||
{
|
||||
name: 'stringResult',
|
||||
validator: () => 'error message',
|
||||
isError: (result: string) => result.length > 0,
|
||||
},
|
||||
{
|
||||
name: 'numberResult',
|
||||
validator: () => 42,
|
||||
isError: (result: number) => result !== 0,
|
||||
},
|
||||
{
|
||||
name: 'objectResult',
|
||||
validator: () => ({ code: 'ERR_001', severity: 'high' }),
|
||||
isError: (result: { code: string; severity: string }) => true,
|
||||
},
|
||||
] as const;
|
||||
|
||||
const processor = createValidationProcessor({ steps });
|
||||
const result = processor(null, { failFast: false });
|
||||
|
||||
expect(result.isValid).toBe(false);
|
||||
expect(result.errors.stringResult).toBe('error message');
|
||||
expect(result.errors.numberResult).toBe(42);
|
||||
expect(result.errors.objectResult).toEqual({ code: 'ERR_001', severity: 'high' });
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases', () => {
|
||||
it('should handle undefined vs null in nullable interpreter', () => {
|
||||
const steps = [
|
||||
{
|
||||
name: 'nullCheck',
|
||||
validator: () => null,
|
||||
isError: ResultInterpreters.nullableIsSuccess,
|
||||
},
|
||||
{
|
||||
name: 'undefinedCheck',
|
||||
validator: () => undefined,
|
||||
isError: ResultInterpreters.nullableIsSuccess,
|
||||
},
|
||||
{
|
||||
name: 'zeroCheck',
|
||||
validator: () => 0,
|
||||
isError: ResultInterpreters.nullableIsSuccess,
|
||||
},
|
||||
{
|
||||
name: 'falseCheck',
|
||||
validator: () => false,
|
||||
isError: ResultInterpreters.nullableIsSuccess,
|
||||
},
|
||||
] as const;
|
||||
|
||||
const processor = createValidationProcessor({ steps });
|
||||
const result = processor(null, { failFast: false });
|
||||
|
||||
expect(result.isValid).toBe(false);
|
||||
expect(result.errors.nullCheck).toBeUndefined();
|
||||
expect(result.errors.undefinedCheck).toBeUndefined();
|
||||
expect(result.errors.zeroCheck).toBe(0);
|
||||
expect(result.errors.falseCheck).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle very long validation chains', () => {
|
||||
// Test the real-world scenario of dynamically generated validation steps
|
||||
// Note: This demonstrates a limitation of the current type system -
|
||||
// dynamic step generation loses strict typing but still works at runtime
|
||||
type StepInput = { value: number };
|
||||
|
||||
const steps = Array.from({ length: 50 }, (_, i) => ({
|
||||
name: `step${i}`,
|
||||
validator: (input: StepInput) => input.value > i,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
}));
|
||||
|
||||
// For dynamic steps, we need to use a type assertion since TypeScript
|
||||
// can't infer the literal string union from Array.from()
|
||||
const processor = createValidationProcessor({
|
||||
steps,
|
||||
});
|
||||
|
||||
const result = processor({ value: 25 }, { failFast: false });
|
||||
expect(result.isValid).toBe(false);
|
||||
|
||||
const errorCount = Object.keys(result.errors).length;
|
||||
expect(errorCount).toBe(25);
|
||||
});
|
||||
|
||||
it('should handle validation by sum typing their inputs', () => {
|
||||
const processor = createValidationProcessor({
|
||||
steps: [
|
||||
{
|
||||
name: 'step1',
|
||||
validator: ({ age }: { age: number }) => age > 18,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
{
|
||||
name: 'step2',
|
||||
validator: ({ name }: { name: string }) => name.length > 0,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const result = processor({ age: 25, name: 'John' });
|
||||
expect(result.isValid).toBe(true);
|
||||
|
||||
const result2 = processor({ age: 15, name: '' });
|
||||
expect(result2.isValid).toBe(false);
|
||||
});
|
||||
|
||||
it('should allow wider types as processor inputs', () => {
|
||||
const sumProcessor = createValidationProcessor({
|
||||
steps: [
|
||||
{
|
||||
name: 'step1',
|
||||
validator: ({ age }: { age: number }) => age > 18,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
{
|
||||
name: 'step2',
|
||||
validator: ({ name }: { name: string }) => name.length > 0,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
],
|
||||
});
|
||||
type Person = { age: number; name: string };
|
||||
const groupProcessor = createValidationProcessor({
|
||||
steps: [
|
||||
{
|
||||
name: 'step1',
|
||||
validator: ({ age }: Person) => age > 18,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
{
|
||||
name: 'step2',
|
||||
validator: ({ name }: Person) => name.length > 0,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const result = sumProcessor({ age: 25, name: 'John', favoriteColor: 'red' });
|
||||
expect(result.isValid).toBe(true);
|
||||
|
||||
const result2 = groupProcessor({ name: '', favoriteColor: 'red', age: 15 });
|
||||
expect(result2.isValid).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
230
api/src/core/utils/validation/validation-processor.ts
Normal file
230
api/src/core/utils/validation/validation-processor.ts
Normal file
@@ -0,0 +1,230 @@
|
||||
/**
|
||||
* @fileoverview Type-safe sequential validation processor
|
||||
*
|
||||
* This module provides a flexible validation system that allows you to chain multiple
|
||||
* validation steps together in a type-safe manner. It supports both fail-fast and
|
||||
* continue-on-error modes, with comprehensive error collection and reporting.
|
||||
*
|
||||
* Key features:
|
||||
* - Type-safe validation pipeline creation
|
||||
* - Sequential validation step execution
|
||||
* - Configurable fail-fast behavior (global or per-step)
|
||||
* - Comprehensive error collection with typed results
|
||||
* - Helper functions for common validation result interpretations
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const validator = createValidationProcessor({
|
||||
* steps: [
|
||||
* {
|
||||
* name: 'required',
|
||||
* validator: (input: string) => input.length > 0,
|
||||
* isError: ResultInterpreters.booleanMeansSuccess
|
||||
* },
|
||||
* {
|
||||
* name: 'email',
|
||||
* validator: (input: string) => /\S+@\S+\.\S+/.test(input),
|
||||
* isError: ResultInterpreters.booleanMeansSuccess
|
||||
* }
|
||||
* ]
|
||||
* });
|
||||
*
|
||||
* const result = validator('user@example.com');
|
||||
* if (!result.isValid) {
|
||||
* console.log('Validation errors:', result.errors);
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
|
||||
export type ValidationStepConfig<TInput, TResult, TName extends string = string> = {
|
||||
name: TName;
|
||||
validator: (input: TInput) => TResult;
|
||||
isError: (result: TResult) => boolean;
|
||||
alwaysFailFast?: boolean;
|
||||
};
|
||||
|
||||
export interface ValidationPipelineConfig {
|
||||
failFast?: boolean;
|
||||
}
|
||||
|
||||
export type ValidationPipelineDefinition<
|
||||
TInput,
|
||||
TSteps extends readonly ValidationStepConfig<TInput, any, string>[],
|
||||
> = {
|
||||
steps: TSteps;
|
||||
};
|
||||
|
||||
export type ExtractStepResults<TSteps extends readonly ValidationStepConfig<any, any, string>[]> = {
|
||||
[K in TSteps[number]['name']]: Extract<TSteps[number], { name: K }> extends ValidationStepConfig<
|
||||
any,
|
||||
infer R,
|
||||
K
|
||||
>
|
||||
? R
|
||||
: never;
|
||||
};
|
||||
|
||||
export type ValidationResult<TSteps extends readonly ValidationStepConfig<any, any, string>[]> = {
|
||||
isValid: boolean;
|
||||
errors: Partial<ExtractStepResults<TSteps>>;
|
||||
};
|
||||
|
||||
// Util: convert a union to an intersection
|
||||
type UnionToIntersection<U> = (U extends any ? (arg: U) => void : never) extends (arg: infer I) => void
|
||||
? I
|
||||
: never;
|
||||
|
||||
// Extract the *intersection* of all input types required by the steps. This guarantees that
|
||||
// the resulting processor knows about every property that any individual step relies on.
|
||||
// We purposely compute an intersection (not a union) so that all required fields are present.
|
||||
type ExtractInputType<TSteps extends readonly ValidationStepConfig<any, any, string>[]> =
|
||||
UnionToIntersection<
|
||||
TSteps[number] extends ValidationStepConfig<infer TInput, any, string> ? TInput : never
|
||||
>;
|
||||
|
||||
/**
|
||||
* Creates a type-safe validation processor that executes a series of validation steps
|
||||
* sequentially and collects errors from failed validations.
|
||||
*
|
||||
* This function returns a validation processor that can be called with input data
|
||||
* and an optional configuration object. The processor will run each validation step
|
||||
* in order, collecting any errors that occur.
|
||||
*
|
||||
* @template TSteps - A readonly array of validation step configurations that defines
|
||||
* the validation pipeline. The type is constrained to ensure type safety
|
||||
* across all steps and their results.
|
||||
*
|
||||
* @param definition - The validation pipeline definition
|
||||
* @param definition.steps - An array of validation step configurations. Each step must have:
|
||||
* - `name`: A unique string identifier for the step
|
||||
* - `validator`: A function that takes input and returns a validation result
|
||||
* - `isError`: A function that determines if the validation result represents an error
|
||||
* - `alwaysFailFast`: Optional flag to always stop execution on this step's failure
|
||||
*
|
||||
* @returns A validation processor function that accepts:
|
||||
* - `input`: The data to validate (type inferred from the first validation step)
|
||||
* - `config`: Optional configuration object with:
|
||||
* - `failFast`: If true, stops execution on first error (unless overridden by step config)
|
||||
*
|
||||
* @example Basic usage with string validation
|
||||
* ```typescript
|
||||
* const nameValidator = createValidationProcessor({
|
||||
* steps: [
|
||||
* {
|
||||
* name: 'required',
|
||||
* validator: (input: string) => input.trim().length > 0,
|
||||
* isError: ResultInterpreters.booleanMeansSuccess
|
||||
* },
|
||||
* {
|
||||
* name: 'minLength',
|
||||
* validator: (input: string) => input.length >= 2,
|
||||
* isError: ResultInterpreters.booleanMeansSuccess
|
||||
* },
|
||||
* {
|
||||
* name: 'maxLength',
|
||||
* validator: (input: string) => input.length <= 50,
|
||||
* isError: ResultInterpreters.booleanMeansSuccess
|
||||
* }
|
||||
* ]
|
||||
* });
|
||||
*
|
||||
* const result = nameValidator('John');
|
||||
* // result.isValid: boolean
|
||||
* // result.errors: { required?: boolean, minLength?: boolean, maxLength?: boolean }
|
||||
* ```
|
||||
*
|
||||
* @example Complex validation with custom error types
|
||||
* ```typescript
|
||||
* type ValidationError = { message: string; code: string };
|
||||
*
|
||||
* const userValidator = createValidationProcessor({
|
||||
* steps: [
|
||||
* {
|
||||
* name: 'email',
|
||||
* validator: (user: { email: string }) =>
|
||||
* /\S+@\S+\.\S+/.test(user.email)
|
||||
* ? null
|
||||
* : { message: 'Invalid email format', code: 'INVALID_EMAIL' },
|
||||
* isError: (result): result is ValidationError => result !== null
|
||||
* },
|
||||
* {
|
||||
* name: 'age',
|
||||
* validator: (user: { age: number }) =>
|
||||
* user.age >= 18
|
||||
* ? null
|
||||
* : { message: 'Must be 18 or older', code: 'UNDERAGE' },
|
||||
* isError: (result): result is ValidationError => result !== null,
|
||||
* alwaysFailFast: true // Stop immediately if age validation fails
|
||||
* }
|
||||
* ]
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
* @example Using fail-fast mode
|
||||
* ```typescript
|
||||
* const result = validator(input, { failFast: true });
|
||||
* // Stops on first error, even if subsequent steps would also fail
|
||||
* ```
|
||||
*
|
||||
* @since 1.0.0
|
||||
*/
|
||||
export function createValidationProcessor<
|
||||
const TSteps extends readonly ValidationStepConfig<any, any, string>[],
|
||||
>(definition: { steps: TSteps }) {
|
||||
// Determine the base input type required by all steps (intersection).
|
||||
type BaseInput = ExtractInputType<TSteps>;
|
||||
|
||||
// Helper: widen input type for object literals while keeping regular objects assignable.
|
||||
type InputWithExtras = BaseInput extends object
|
||||
? BaseInput | (BaseInput & Record<string, unknown>)
|
||||
: BaseInput;
|
||||
|
||||
return function processValidation(
|
||||
input: InputWithExtras,
|
||||
config: ValidationPipelineConfig = {}
|
||||
): ValidationResult<TSteps> {
|
||||
const errors: Partial<ExtractStepResults<TSteps>> = {};
|
||||
let hasErrors = false;
|
||||
|
||||
for (const step of definition.steps) {
|
||||
const result = step.validator(input as BaseInput);
|
||||
const isError = step.isError(result);
|
||||
|
||||
if (isError) {
|
||||
hasErrors = true;
|
||||
(errors as any)[step.name] = result;
|
||||
|
||||
// Always fail fast for steps marked as such, or when global failFast is enabled
|
||||
if (step.alwaysFailFast || config.failFast) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
isValid: !hasErrors,
|
||||
errors,
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
/** Helper functions for common result interpretations */
|
||||
export const ResultInterpreters = {
|
||||
/** For boolean results: true = success, false = error */
|
||||
booleanMeansSuccess: (result: boolean): boolean => !result,
|
||||
|
||||
/** For boolean results: false = success, true = error */
|
||||
booleanMeansFailure: (result: boolean): boolean => result,
|
||||
|
||||
/** For nullable results: null/undefined = success, anything else = error */
|
||||
nullableIsSuccess: <T>(result: T | null | undefined): boolean => result != null,
|
||||
|
||||
/** For array results: empty = success, non-empty = error */
|
||||
errorList: <T>(result: T[]): boolean => result.length > 0,
|
||||
|
||||
/** For custom predicate */
|
||||
custom: <T>(predicate: (result: T) => boolean) => predicate,
|
||||
|
||||
/** Interpreting the result of a validation processor */
|
||||
validationProcessor: (result: { isValid: boolean }) => !result.isValid,
|
||||
} as const;
|
||||
@@ -92,6 +92,7 @@ export const LOG_LEVEL = process.env.LOG_LEVEL
|
||||
: process.env.ENVIRONMENT === 'production'
|
||||
? 'INFO'
|
||||
: 'DEBUG';
|
||||
export const SUPPRESS_LOGS = process.env.SUPPRESS_LOGS === 'true';
|
||||
export const MOTHERSHIP_GRAPHQL_LINK = process.env.MOTHERSHIP_GRAPHQL_LINK
|
||||
? process.env.MOTHERSHIP_GRAPHQL_LINK
|
||||
: ENVIRONMENT === 'staging'
|
||||
@@ -101,7 +102,12 @@ export const MOTHERSHIP_GRAPHQL_LINK = process.env.MOTHERSHIP_GRAPHQL_LINK
|
||||
export const PM2_HOME = process.env.PM2_HOME ?? join(homedir(), '.pm2');
|
||||
export const PM2_PATH = join(import.meta.dirname, '../../', 'node_modules', 'pm2', 'bin', 'pm2');
|
||||
export const ECOSYSTEM_PATH = join(import.meta.dirname, '../../', 'ecosystem.config.json');
|
||||
export const LOGS_DIR = process.env.LOGS_DIR ?? '/var/log/unraid-api';
|
||||
export const PATHS_LOGS_DIR =
|
||||
process.env.PATHS_LOGS_DIR ?? process.env.LOGS_DIR ?? '/var/log/unraid-api';
|
||||
export const PATHS_LOGS_FILE = process.env.PATHS_LOGS_FILE ?? '/var/log/graphql-api.log';
|
||||
|
||||
export const PATHS_CONFIG_MODULES =
|
||||
process.env.PATHS_CONFIG_MODULES ?? '/boot/config/plugins/dynamix.my.servers/configs';
|
||||
|
||||
export const PATHS_LOCAL_SESSION_FILE =
|
||||
process.env.PATHS_LOCAL_SESSION_FILE ?? '/var/run/unraid-api/local-session';
|
||||
|
||||
@@ -99,7 +99,7 @@ export const viteNodeApp = async () => {
|
||||
|
||||
asyncExitHook(
|
||||
async (signal) => {
|
||||
logger.info('Exiting with signal %s', signal);
|
||||
logger.info('Exiting with signal %d', signal);
|
||||
await server?.close?.();
|
||||
// If port is unix socket, delete socket before exiting
|
||||
unlinkUnixPort();
|
||||
|
||||
@@ -94,7 +94,7 @@ export const run = async (channel: string, mutation: string, options: RunOptions
|
||||
);
|
||||
}
|
||||
} else {
|
||||
logger.debug('Error: %s', error);
|
||||
logger.debug('Error: %o', error as object);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
@@ -62,7 +62,7 @@ export class StateManager {
|
||||
emhttpLogger.error(
|
||||
'Failed to load state file: [%s]\nerror: %o',
|
||||
stateFile,
|
||||
error
|
||||
error as object
|
||||
);
|
||||
}
|
||||
} else {
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { CacheModule } from '@nestjs/cache-manager';
|
||||
import { Test } from '@nestjs/testing';
|
||||
|
||||
import { describe, expect, it } from 'vitest';
|
||||
@@ -9,7 +10,7 @@ describe('Module Dependencies Integration', () => {
|
||||
let module;
|
||||
try {
|
||||
module = await Test.createTestingModule({
|
||||
imports: [RestModule],
|
||||
imports: [CacheModule.register({ isGlobal: true }), RestModule],
|
||||
}).compile();
|
||||
|
||||
expect(module).toBeDefined();
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { CacheModule } from '@nestjs/cache-manager';
|
||||
import { Module } from '@nestjs/common';
|
||||
import { APP_GUARD } from '@nestjs/core';
|
||||
import { ScheduleModule } from '@nestjs/schedule';
|
||||
import { ThrottlerModule } from '@nestjs/throttler';
|
||||
|
||||
import { AuthZGuard } from 'nest-authz';
|
||||
@@ -23,23 +24,25 @@ import { UnraidFileModifierModule } from '@app/unraid-api/unraid-file-modifier/u
|
||||
GlobalDepsModule,
|
||||
LegacyConfigModule,
|
||||
PubSubModule,
|
||||
ScheduleModule.forRoot(),
|
||||
LoggerModule.forRoot({
|
||||
pinoHttp: {
|
||||
logger: apiLogger,
|
||||
autoLogging: false,
|
||||
timestamp: false,
|
||||
...(LOG_LEVEL !== 'TRACE'
|
||||
? {
|
||||
serializers: {
|
||||
req: (req) => ({
|
||||
id: req.id,
|
||||
method: req.method,
|
||||
url: req.url,
|
||||
remoteAddress: req.remoteAddress,
|
||||
}),
|
||||
},
|
||||
}
|
||||
: {}),
|
||||
serializers: {
|
||||
req: () => undefined,
|
||||
res: () => undefined,
|
||||
},
|
||||
formatters: {
|
||||
log: (obj) => {
|
||||
// Map NestJS context to Pino context field for pino-pretty
|
||||
if (obj.context && !obj.logger) {
|
||||
return { ...obj, logger: obj.context };
|
||||
}
|
||||
return obj;
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
AuthModule,
|
||||
|
||||
@@ -2,15 +2,14 @@ import { Logger } from '@nestjs/common';
|
||||
import { readdir, readFile, writeFile } from 'fs/promises';
|
||||
import { join } from 'path';
|
||||
|
||||
import { Resource, Role } from '@unraid/shared/graphql.model.js';
|
||||
import { AuthAction, Resource, Role } from '@unraid/shared/graphql.model.js';
|
||||
import { ensureDir, ensureDirSync } from 'fs-extra';
|
||||
import { AuthActionVerb } from 'nest-authz';
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { environment } from '@app/environment.js';
|
||||
import { getters } from '@app/store/index.js';
|
||||
import { ApiKeyService } from '@app/unraid-api/auth/api-key.service.js';
|
||||
import { ApiKey, ApiKeyWithSecret } from '@app/unraid-api/graph/resolvers/api-key/api-key.model.js';
|
||||
import { ApiKey } from '@app/unraid-api/graph/resolvers/api-key/api-key.model.js';
|
||||
|
||||
// Mock the store and its modules
|
||||
vi.mock('@app/store/index.js', () => ({
|
||||
@@ -48,28 +47,14 @@ describe('ApiKeyService', () => {
|
||||
|
||||
const mockApiKey: ApiKey = {
|
||||
id: 'test-api-id',
|
||||
key: 'test-secret-key',
|
||||
name: 'Test API Key',
|
||||
description: 'Test API Key Description',
|
||||
roles: [Role.GUEST],
|
||||
permissions: [
|
||||
{
|
||||
resource: Resource.CONNECT,
|
||||
actions: [AuthActionVerb.READ],
|
||||
},
|
||||
],
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
const mockApiKeyWithSecret: ApiKeyWithSecret = {
|
||||
id: 'test-api-id',
|
||||
key: 'test-api-key',
|
||||
name: 'Test API Key',
|
||||
description: 'Test API Key Description',
|
||||
roles: [Role.GUEST],
|
||||
permissions: [
|
||||
{
|
||||
resource: Resource.CONNECT,
|
||||
actions: [AuthActionVerb.READ],
|
||||
actions: [AuthAction.READ_ANY],
|
||||
},
|
||||
],
|
||||
createdAt: new Date().toISOString(),
|
||||
@@ -130,21 +115,23 @@ describe('ApiKeyService', () => {
|
||||
});
|
||||
|
||||
describe('create', () => {
|
||||
it('should create ApiKeyWithSecret with generated key', async () => {
|
||||
it('should create ApiKey with generated key', async () => {
|
||||
const saveSpy = vi.spyOn(apiKeyService, 'saveApiKey').mockResolvedValue();
|
||||
const { key, id, description, roles } = mockApiKeyWithSecret;
|
||||
const { id, description, roles } = mockApiKey;
|
||||
const name = 'Test API Key';
|
||||
|
||||
const result = await apiKeyService.create({ name, description: description ?? '', roles });
|
||||
|
||||
expect(result).toMatchObject({
|
||||
id,
|
||||
key,
|
||||
name: name,
|
||||
description,
|
||||
roles,
|
||||
createdAt: expect.any(String),
|
||||
});
|
||||
expect(result.key).toBeDefined();
|
||||
expect(typeof result.key).toBe('string');
|
||||
expect(result.key.length).toBeGreaterThan(0);
|
||||
|
||||
expect(saveSpy).toHaveBeenCalledWith(result);
|
||||
});
|
||||
@@ -177,8 +164,8 @@ describe('ApiKeyService', () => {
|
||||
describe('findAll', () => {
|
||||
it('should return all API keys', async () => {
|
||||
vi.spyOn(apiKeyService, 'loadAllFromDisk').mockResolvedValue([
|
||||
mockApiKeyWithSecret,
|
||||
{ ...mockApiKeyWithSecret, id: 'second-id' },
|
||||
mockApiKey,
|
||||
{ ...mockApiKey, id: 'second-id' },
|
||||
]);
|
||||
await apiKeyService.onModuleInit();
|
||||
|
||||
@@ -191,7 +178,7 @@ describe('ApiKeyService', () => {
|
||||
permissions: [
|
||||
{
|
||||
resource: Resource.CONNECT,
|
||||
actions: [AuthActionVerb.READ],
|
||||
actions: [AuthAction.READ_ANY],
|
||||
},
|
||||
],
|
||||
};
|
||||
@@ -202,7 +189,7 @@ describe('ApiKeyService', () => {
|
||||
permissions: [
|
||||
{
|
||||
resource: Resource.CONNECT,
|
||||
actions: [AuthActionVerb.READ],
|
||||
actions: [AuthAction.READ_ANY],
|
||||
},
|
||||
],
|
||||
};
|
||||
@@ -219,17 +206,17 @@ describe('ApiKeyService', () => {
|
||||
|
||||
describe('findById', () => {
|
||||
it('should return API key by id when found', async () => {
|
||||
vi.spyOn(apiKeyService, 'loadAllFromDisk').mockResolvedValue([mockApiKeyWithSecret]);
|
||||
vi.spyOn(apiKeyService, 'loadAllFromDisk').mockResolvedValue([mockApiKey]);
|
||||
await apiKeyService.onModuleInit();
|
||||
|
||||
const result = await apiKeyService.findById(mockApiKeyWithSecret.id);
|
||||
const result = await apiKeyService.findById(mockApiKey.id);
|
||||
|
||||
expect(result).toMatchObject({ ...mockApiKey, createdAt: expect.any(String) });
|
||||
});
|
||||
|
||||
it('should return null if API key not found', async () => {
|
||||
vi.spyOn(apiKeyService, 'loadAllFromDisk').mockResolvedValue([
|
||||
{ ...mockApiKeyWithSecret, id: 'different-id' },
|
||||
{ ...mockApiKey, id: 'different-id' },
|
||||
]);
|
||||
await apiKeyService.onModuleInit();
|
||||
|
||||
@@ -239,21 +226,21 @@ describe('ApiKeyService', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('findByIdWithSecret', () => {
|
||||
it('should return API key with secret when found', async () => {
|
||||
vi.spyOn(apiKeyService, 'loadAllFromDisk').mockResolvedValue([mockApiKeyWithSecret]);
|
||||
describe('findById', () => {
|
||||
it('should return API key when found', async () => {
|
||||
vi.spyOn(apiKeyService, 'loadAllFromDisk').mockResolvedValue([mockApiKey]);
|
||||
await apiKeyService.onModuleInit();
|
||||
|
||||
const result = await apiKeyService.findByIdWithSecret(mockApiKeyWithSecret.id);
|
||||
const result = await apiKeyService.findById(mockApiKey.id);
|
||||
|
||||
expect(result).toEqual(mockApiKeyWithSecret);
|
||||
expect(result).toEqual(mockApiKey);
|
||||
});
|
||||
|
||||
it('should return null when API key not found', async () => {
|
||||
vi.spyOn(apiKeyService, 'loadAllFromDisk').mockResolvedValue([]);
|
||||
await apiKeyService.onModuleInit();
|
||||
|
||||
const result = await apiKeyService.findByIdWithSecret('non-existent-id');
|
||||
const result = await apiKeyService.findById('non-existent-id');
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
@@ -274,23 +261,20 @@ describe('ApiKeyService', () => {
|
||||
|
||||
describe('findByKey', () => {
|
||||
it('should return API key by key value when multiple keys exist', async () => {
|
||||
const differentKey = { ...mockApiKeyWithSecret, key: 'different-key' };
|
||||
vi.spyOn(apiKeyService, 'loadAllFromDisk').mockResolvedValue([
|
||||
differentKey,
|
||||
mockApiKeyWithSecret,
|
||||
]);
|
||||
const differentKey = { ...mockApiKey, key: 'different-key' };
|
||||
vi.spyOn(apiKeyService, 'loadAllFromDisk').mockResolvedValue([differentKey, mockApiKey]);
|
||||
|
||||
await apiKeyService.onModuleInit();
|
||||
|
||||
const result = await apiKeyService.findByKey(mockApiKeyWithSecret.key);
|
||||
const result = await apiKeyService.findByKey(mockApiKey.key);
|
||||
|
||||
expect(result).toEqual(mockApiKeyWithSecret);
|
||||
expect(result).toEqual(mockApiKey);
|
||||
});
|
||||
|
||||
it('should return null if key not found in any file', async () => {
|
||||
vi.spyOn(apiKeyService, 'loadAllFromDisk').mockResolvedValue([
|
||||
{ ...mockApiKeyWithSecret, key: 'different-key-1' },
|
||||
{ ...mockApiKeyWithSecret, key: 'different-key-2' },
|
||||
{ ...mockApiKey, key: 'different-key-1' },
|
||||
{ ...mockApiKey, key: 'different-key-2' },
|
||||
]);
|
||||
await apiKeyService.onModuleInit();
|
||||
|
||||
@@ -314,21 +298,21 @@ describe('ApiKeyService', () => {
|
||||
it('should save API key to file', async () => {
|
||||
vi.mocked(writeFile).mockResolvedValue(undefined);
|
||||
|
||||
await apiKeyService.saveApiKey(mockApiKeyWithSecret);
|
||||
await apiKeyService.saveApiKey(mockApiKey);
|
||||
|
||||
const writeFileCalls = vi.mocked(writeFile).mock.calls;
|
||||
|
||||
expect(writeFileCalls.length).toBe(1);
|
||||
|
||||
const [filePath, fileContent] = writeFileCalls[0] ?? [];
|
||||
const expectedPath = join(mockBasePath, `${mockApiKeyWithSecret.id}.json`);
|
||||
const expectedPath = join(mockBasePath, `${mockApiKey.id}.json`);
|
||||
|
||||
expect(filePath).toBe(expectedPath);
|
||||
|
||||
if (typeof fileContent === 'string') {
|
||||
const savedApiKey = JSON.parse(fileContent);
|
||||
|
||||
expect(savedApiKey).toEqual(mockApiKeyWithSecret);
|
||||
expect(savedApiKey).toEqual(mockApiKey);
|
||||
} else {
|
||||
throw new Error('File content should be a string');
|
||||
}
|
||||
@@ -337,16 +321,16 @@ describe('ApiKeyService', () => {
|
||||
it('should throw GraphQLError on write error', async () => {
|
||||
vi.mocked(writeFile).mockRejectedValue(new Error('Write failed'));
|
||||
|
||||
await expect(apiKeyService.saveApiKey(mockApiKeyWithSecret)).rejects.toThrow(
|
||||
await expect(apiKeyService.saveApiKey(mockApiKey)).rejects.toThrow(
|
||||
'Failed to save API key: Write failed'
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw GraphQLError on invalid API key structure', async () => {
|
||||
const invalidApiKey = {
|
||||
...mockApiKeyWithSecret,
|
||||
...mockApiKey,
|
||||
name: '', // Invalid: name cannot be empty
|
||||
} as ApiKeyWithSecret;
|
||||
} as ApiKey;
|
||||
|
||||
await expect(apiKeyService.saveApiKey(invalidApiKey)).rejects.toThrow(
|
||||
'Failed to save API key: Invalid data structure'
|
||||
@@ -355,10 +339,10 @@ describe('ApiKeyService', () => {
|
||||
|
||||
it('should throw GraphQLError when roles and permissions array is empty', async () => {
|
||||
const invalidApiKey = {
|
||||
...mockApiKeyWithSecret,
|
||||
...mockApiKey,
|
||||
permissions: [],
|
||||
roles: [],
|
||||
} as ApiKeyWithSecret;
|
||||
} as ApiKey;
|
||||
|
||||
await expect(apiKeyService.saveApiKey(invalidApiKey)).rejects.toThrow(
|
||||
'At least one of permissions or roles must be specified'
|
||||
@@ -367,9 +351,9 @@ describe('ApiKeyService', () => {
|
||||
});
|
||||
|
||||
describe('update', () => {
|
||||
let updateMockApiKey: ApiKeyWithSecret;
|
||||
let updateMockApiKey: ApiKey;
|
||||
|
||||
beforeEach(() => {
|
||||
beforeEach(async () => {
|
||||
// Create a fresh copy of the mock data for update tests
|
||||
updateMockApiKey = {
|
||||
id: 'test-api-id',
|
||||
@@ -380,15 +364,17 @@ describe('ApiKeyService', () => {
|
||||
permissions: [
|
||||
{
|
||||
resource: Resource.CONNECT,
|
||||
actions: [AuthActionVerb.READ],
|
||||
actions: [AuthAction.READ_ANY],
|
||||
},
|
||||
],
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
vi.spyOn(apiKeyService, 'loadAllFromDisk').mockResolvedValue([updateMockApiKey]);
|
||||
// Initialize the memoryApiKeys with the test data
|
||||
// The loadAllFromDisk mock will be called by onModuleInit
|
||||
vi.spyOn(apiKeyService, 'loadAllFromDisk').mockResolvedValue([{ ...updateMockApiKey }]);
|
||||
vi.spyOn(apiKeyService, 'saveApiKey').mockResolvedValue();
|
||||
apiKeyService.onModuleInit();
|
||||
await apiKeyService.onModuleInit();
|
||||
});
|
||||
|
||||
it('should update name and description', async () => {
|
||||
@@ -400,7 +386,6 @@ describe('ApiKeyService', () => {
|
||||
name: updatedName,
|
||||
description: updatedDescription,
|
||||
});
|
||||
|
||||
expect(result.name).toBe(updatedName);
|
||||
expect(result.description).toBe(updatedDescription);
|
||||
expect(result.roles).toEqual(updateMockApiKey.roles);
|
||||
@@ -427,7 +412,7 @@ describe('ApiKeyService', () => {
|
||||
const updatedPermissions = [
|
||||
{
|
||||
resource: Resource.CONNECT,
|
||||
actions: [AuthActionVerb.READ, AuthActionVerb.UPDATE],
|
||||
actions: [AuthAction.READ_ANY, AuthAction.UPDATE_ANY],
|
||||
},
|
||||
];
|
||||
|
||||
@@ -474,7 +459,7 @@ describe('ApiKeyService', () => {
|
||||
});
|
||||
|
||||
describe('loadAllFromDisk', () => {
|
||||
let loadMockApiKey: ApiKeyWithSecret;
|
||||
let loadMockApiKey: ApiKey;
|
||||
|
||||
beforeEach(() => {
|
||||
// Create a fresh copy of the mock data for loadAllFromDisk tests
|
||||
@@ -487,7 +472,7 @@ describe('ApiKeyService', () => {
|
||||
permissions: [
|
||||
{
|
||||
resource: Resource.CONNECT,
|
||||
actions: [AuthActionVerb.READ],
|
||||
actions: [AuthAction.READ_ANY],
|
||||
},
|
||||
],
|
||||
createdAt: new Date().toISOString(),
|
||||
@@ -550,15 +535,62 @@ describe('ApiKeyService', () => {
|
||||
key: 'unique-key',
|
||||
});
|
||||
});
|
||||
|
||||
it('should normalize permission actions to lowercase when loading from disk', async () => {
|
||||
const apiKeyWithMixedCaseActions = {
|
||||
...loadMockApiKey,
|
||||
permissions: [
|
||||
{
|
||||
resource: Resource.DOCKER,
|
||||
actions: ['READ:ANY', 'Update:Any', 'create:any', 'DELETE:ANY'], // Mixed case actions
|
||||
},
|
||||
{
|
||||
resource: Resource.ARRAY,
|
||||
actions: ['Read:Any'], // Mixed case
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
vi.mocked(readdir).mockResolvedValue(['key1.json'] as any);
|
||||
vi.mocked(readFile).mockResolvedValueOnce(JSON.stringify(apiKeyWithMixedCaseActions));
|
||||
|
||||
const result = await apiKeyService.loadAllFromDisk();
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
// All actions should be normalized to lowercase
|
||||
expect(result[0].permissions[0].actions).toEqual([
|
||||
AuthAction.READ_ANY,
|
||||
AuthAction.UPDATE_ANY,
|
||||
AuthAction.CREATE_ANY,
|
||||
AuthAction.DELETE_ANY,
|
||||
]);
|
||||
expect(result[0].permissions[1].actions).toEqual([AuthAction.READ_ANY]);
|
||||
});
|
||||
|
||||
it('should normalize roles to uppercase when loading from disk', async () => {
|
||||
const apiKeyWithMixedCaseRoles = {
|
||||
...loadMockApiKey,
|
||||
roles: ['admin', 'Viewer', 'CONNECT'], // Mixed case roles
|
||||
};
|
||||
|
||||
vi.mocked(readdir).mockResolvedValue(['key1.json'] as any);
|
||||
vi.mocked(readFile).mockResolvedValueOnce(JSON.stringify(apiKeyWithMixedCaseRoles));
|
||||
|
||||
const result = await apiKeyService.loadAllFromDisk();
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
// All roles should be normalized to uppercase
|
||||
expect(result[0].roles).toEqual(['ADMIN', 'VIEWER', 'CONNECT']);
|
||||
});
|
||||
});
|
||||
|
||||
describe('loadApiKeyFile', () => {
|
||||
it('should load and parse a valid API key file', async () => {
|
||||
vi.mocked(readFile).mockResolvedValue(JSON.stringify(mockApiKeyWithSecret));
|
||||
vi.mocked(readFile).mockResolvedValue(JSON.stringify(mockApiKey));
|
||||
|
||||
const result = await apiKeyService['loadApiKeyFile']('test.json');
|
||||
|
||||
expect(result).toEqual(mockApiKeyWithSecret);
|
||||
expect(result).toEqual(mockApiKey);
|
||||
expect(readFile).toHaveBeenCalledWith(join(mockBasePath, 'test.json'), 'utf8');
|
||||
});
|
||||
|
||||
@@ -592,7 +624,7 @@ describe('ApiKeyService', () => {
|
||||
expect.stringContaining('Error validating API key file test.json')
|
||||
);
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
expect.stringContaining('An instance of ApiKeyWithSecret has failed the validation')
|
||||
expect.stringContaining('An instance of ApiKey has failed the validation')
|
||||
);
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(expect.stringContaining('property key'));
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(expect.stringContaining('property id'));
|
||||
@@ -603,5 +635,150 @@ describe('ApiKeyService', () => {
|
||||
expect.stringContaining('property permissions')
|
||||
);
|
||||
});
|
||||
|
||||
it('should normalize legacy action formats when loading API keys', async () => {
|
||||
const legacyApiKey = {
|
||||
...mockApiKey,
|
||||
permissions: [
|
||||
{
|
||||
resource: Resource.DOCKER,
|
||||
actions: ['create', 'READ', 'Update', 'DELETE'], // Mixed case legacy verbs
|
||||
},
|
||||
{
|
||||
resource: Resource.VMS,
|
||||
actions: ['READ_ANY', 'UPDATE_OWN'], // GraphQL enum style
|
||||
},
|
||||
{
|
||||
resource: Resource.CONNECT,
|
||||
actions: ['read:own', 'update:any'], // Casbin colon format
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
vi.mocked(readFile).mockResolvedValue(JSON.stringify(legacyApiKey));
|
||||
|
||||
const result = await apiKeyService['loadApiKeyFile']('legacy.json');
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result?.permissions).toEqual([
|
||||
{
|
||||
resource: Resource.DOCKER,
|
||||
actions: [
|
||||
AuthAction.CREATE_ANY,
|
||||
AuthAction.READ_ANY,
|
||||
AuthAction.UPDATE_ANY,
|
||||
AuthAction.DELETE_ANY,
|
||||
],
|
||||
},
|
||||
{
|
||||
resource: Resource.VMS,
|
||||
actions: [AuthAction.READ_ANY, AuthAction.UPDATE_OWN],
|
||||
},
|
||||
{
|
||||
resource: Resource.CONNECT,
|
||||
actions: [AuthAction.READ_OWN, AuthAction.UPDATE_ANY],
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('convertRolesStringArrayToRoles', () => {
|
||||
beforeEach(async () => {
|
||||
vi.mocked(getters.paths).mockReturnValue({
|
||||
'auth-keys': mockBasePath,
|
||||
} as ReturnType<typeof getters.paths>);
|
||||
|
||||
// Create a fresh mock logger for each test
|
||||
mockLogger = {
|
||||
log: vi.fn(),
|
||||
error: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
verbose: vi.fn(),
|
||||
};
|
||||
|
||||
apiKeyService = new ApiKeyService();
|
||||
// Replace the logger with our mock
|
||||
(apiKeyService as any).logger = mockLogger;
|
||||
});
|
||||
|
||||
it('should convert uppercase role strings to Role enum values', () => {
|
||||
const roles = ['ADMIN', 'CONNECT', 'VIEWER'];
|
||||
const result = apiKeyService.convertRolesStringArrayToRoles(roles);
|
||||
|
||||
expect(result).toEqual([Role.ADMIN, Role.CONNECT, Role.VIEWER]);
|
||||
});
|
||||
|
||||
it('should convert lowercase role strings to Role enum values', () => {
|
||||
const roles = ['admin', 'connect', 'guest'];
|
||||
const result = apiKeyService.convertRolesStringArrayToRoles(roles);
|
||||
|
||||
expect(result).toEqual([Role.ADMIN, Role.CONNECT, Role.GUEST]);
|
||||
});
|
||||
|
||||
it('should convert mixed case role strings to Role enum values', () => {
|
||||
const roles = ['Admin', 'CoNnEcT', 'ViEwEr'];
|
||||
const result = apiKeyService.convertRolesStringArrayToRoles(roles);
|
||||
|
||||
expect(result).toEqual([Role.ADMIN, Role.CONNECT, Role.VIEWER]);
|
||||
});
|
||||
|
||||
it('should handle roles with whitespace', () => {
|
||||
const roles = [' ADMIN ', ' CONNECT ', 'VIEWER '];
|
||||
const result = apiKeyService.convertRolesStringArrayToRoles(roles);
|
||||
|
||||
expect(result).toEqual([Role.ADMIN, Role.CONNECT, Role.VIEWER]);
|
||||
});
|
||||
|
||||
it('should filter out invalid roles and warn', () => {
|
||||
const roles = ['ADMIN', 'INVALID_ROLE', 'VIEWER', 'ANOTHER_INVALID'];
|
||||
const result = apiKeyService.convertRolesStringArrayToRoles(roles);
|
||||
|
||||
expect(result).toEqual([Role.ADMIN, Role.VIEWER]);
|
||||
expect(mockLogger.warn).toHaveBeenCalledWith(
|
||||
'Ignoring invalid roles: INVALID_ROLE, ANOTHER_INVALID'
|
||||
);
|
||||
});
|
||||
|
||||
it('should return empty array when all roles are invalid', () => {
|
||||
const roles = ['INVALID1', 'INVALID2', 'INVALID3'];
|
||||
const result = apiKeyService.convertRolesStringArrayToRoles(roles);
|
||||
|
||||
expect(result).toEqual([]);
|
||||
expect(mockLogger.warn).toHaveBeenCalledWith(
|
||||
'Ignoring invalid roles: INVALID1, INVALID2, INVALID3'
|
||||
);
|
||||
});
|
||||
|
||||
it('should return empty array for empty input', () => {
|
||||
const result = apiKeyService.convertRolesStringArrayToRoles([]);
|
||||
|
||||
expect(result).toEqual([]);
|
||||
expect(mockLogger.warn).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle all valid Role enum values', () => {
|
||||
const roles = Object.values(Role);
|
||||
const result = apiKeyService.convertRolesStringArrayToRoles(roles);
|
||||
|
||||
expect(result).toEqual(Object.values(Role));
|
||||
expect(mockLogger.warn).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should deduplicate roles', () => {
|
||||
const roles = ['ADMIN', 'admin', 'ADMIN', 'VIEWER', 'viewer'];
|
||||
const result = apiKeyService.convertRolesStringArrayToRoles(roles);
|
||||
|
||||
// Note: Current implementation doesn't deduplicate, but this test documents the behavior
|
||||
expect(result).toEqual([Role.ADMIN, Role.ADMIN, Role.ADMIN, Role.VIEWER, Role.VIEWER]);
|
||||
});
|
||||
|
||||
it('should handle mixed valid and invalid roles correctly', () => {
|
||||
const roles = ['ADMIN', 'invalid', 'CONNECT', 'bad_role', 'GUEST', 'VIEWER'];
|
||||
const result = apiKeyService.convertRolesStringArrayToRoles(roles);
|
||||
|
||||
expect(result).toEqual([Role.ADMIN, Role.CONNECT, Role.GUEST, Role.VIEWER]);
|
||||
expect(mockLogger.warn).toHaveBeenCalledWith('Ignoring invalid roles: invalid, bad_role');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -3,12 +3,12 @@ import crypto from 'crypto';
|
||||
import { readdir, readFile, unlink, writeFile } from 'fs/promises';
|
||||
import { join } from 'path';
|
||||
|
||||
import { Resource, Role } from '@unraid/shared/graphql.model.js';
|
||||
import { AuthAction, Resource, Role } from '@unraid/shared/graphql.model.js';
|
||||
import { normalizeLegacyActions } from '@unraid/shared/util/permissions.js';
|
||||
import { watch } from 'chokidar';
|
||||
import { ValidationError } from 'class-validator';
|
||||
import { ensureDirSync } from 'fs-extra';
|
||||
import { GraphQLError } from 'graphql';
|
||||
import { AuthActionVerb } from 'nest-authz';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
|
||||
import { environment } from '@app/environment.js';
|
||||
@@ -16,7 +16,6 @@ import { getters } from '@app/store/index.js';
|
||||
import {
|
||||
AddPermissionInput,
|
||||
ApiKey,
|
||||
ApiKeyWithSecret,
|
||||
Permission,
|
||||
} from '@app/unraid-api/graph/resolvers/api-key/api-key.model.js';
|
||||
import { validateObject } from '@app/unraid-api/graph/resolvers/validation.utils.js';
|
||||
@@ -26,7 +25,7 @@ import { batchProcess } from '@app/utils.js';
|
||||
export class ApiKeyService implements OnModuleInit {
|
||||
private readonly logger = new Logger(ApiKeyService.name);
|
||||
protected readonly basePath: string;
|
||||
protected memoryApiKeys: Array<ApiKeyWithSecret> = [];
|
||||
protected memoryApiKeys: Array<ApiKey> = [];
|
||||
private static readonly validRoles: Set<Role> = new Set(Object.values(Role));
|
||||
|
||||
constructor() {
|
||||
@@ -36,23 +35,31 @@ export class ApiKeyService implements OnModuleInit {
|
||||
|
||||
async onModuleInit() {
|
||||
this.memoryApiKeys = await this.loadAllFromDisk();
|
||||
await this.cleanupLegacyInternalKeys();
|
||||
if (environment.IS_MAIN_PROCESS) {
|
||||
this.setupWatch();
|
||||
}
|
||||
}
|
||||
|
||||
public convertApiKeyWithSecretToApiKey(key: ApiKeyWithSecret): ApiKey {
|
||||
const { key: _, ...rest } = key;
|
||||
return rest;
|
||||
private async cleanupLegacyInternalKeys() {
|
||||
const legacyNames = ['CliInternal', 'ConnectInternal'];
|
||||
const keysToDelete = this.memoryApiKeys.filter((key) => legacyNames.includes(key.name));
|
||||
|
||||
if (keysToDelete.length > 0) {
|
||||
try {
|
||||
await this.deleteApiKeys(keysToDelete.map((key) => key.id));
|
||||
this.logger.log(`Cleaned up ${keysToDelete.length} legacy internal keys`);
|
||||
} catch (error) {
|
||||
this.logger.debug(
|
||||
error,
|
||||
`Failed to delete legacy internal keys: ${keysToDelete.map((key) => key.name).join(', ')}`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public async findAll(): Promise<ApiKey[]> {
|
||||
return Promise.all(
|
||||
this.memoryApiKeys.map(async (key) => {
|
||||
const keyWithoutSecret = this.convertApiKeyWithSecretToApiKey(key);
|
||||
return keyWithoutSecret;
|
||||
})
|
||||
);
|
||||
return this.memoryApiKeys;
|
||||
}
|
||||
|
||||
private setupWatch() {
|
||||
@@ -76,17 +83,18 @@ export class ApiKeyService implements OnModuleInit {
|
||||
public getAllValidPermissions(): Permission[] {
|
||||
return Object.values(Resource).map((res) => ({
|
||||
resource: res,
|
||||
actions: Object.values(AuthActionVerb),
|
||||
actions: Object.values(AuthAction),
|
||||
}));
|
||||
}
|
||||
|
||||
public convertPermissionsStringArrayToPermissions(permissions: string[]): Permission[] {
|
||||
return permissions.reduce<Array<Permission>>((acc, permission) => {
|
||||
const [resource, action] = permission.split(':');
|
||||
const [resource, ...actionParts] = permission.split(':');
|
||||
const action = actionParts.join(':'); // Handle actions like "read:any"
|
||||
const validatedResource = Resource[resource.toUpperCase() as keyof typeof Resource] ?? null;
|
||||
// Pull the actual enum value from the graphql schema
|
||||
const validatedAction =
|
||||
AuthActionVerb[action.toUpperCase() as keyof typeof AuthActionVerb] ?? null;
|
||||
AuthAction[action.toUpperCase().replace(':', '_') as keyof typeof AuthAction] ?? null;
|
||||
if (validatedAction && validatedResource) {
|
||||
const existingEntry = acc.find((p) => p.resource === validatedResource);
|
||||
if (existingEntry) {
|
||||
@@ -102,9 +110,25 @@ export class ApiKeyService implements OnModuleInit {
|
||||
}
|
||||
|
||||
public convertRolesStringArrayToRoles(roles: string[]): Role[] {
|
||||
return roles
|
||||
.map((roleStr) => Role[roleStr.trim().toUpperCase() as keyof typeof Role])
|
||||
.filter(Boolean);
|
||||
const validRoles: Role[] = [];
|
||||
const invalidRoles: string[] = [];
|
||||
|
||||
for (const roleStr of roles) {
|
||||
const upperRole = roleStr.trim().toUpperCase();
|
||||
const role = Role[upperRole as keyof typeof Role];
|
||||
|
||||
if (role && ApiKeyService.validRoles.has(role)) {
|
||||
validRoles.push(role);
|
||||
} else {
|
||||
invalidRoles.push(roleStr);
|
||||
}
|
||||
}
|
||||
|
||||
if (invalidRoles.length > 0) {
|
||||
this.logger.warn(`Ignoring invalid roles: ${invalidRoles.join(', ')}`);
|
||||
}
|
||||
|
||||
return validRoles;
|
||||
}
|
||||
|
||||
async create({
|
||||
@@ -119,7 +143,7 @@ export class ApiKeyService implements OnModuleInit {
|
||||
roles?: Role[];
|
||||
permissions?: Permission[] | AddPermissionInput[];
|
||||
overwrite?: boolean;
|
||||
}): Promise<ApiKeyWithSecret> {
|
||||
}): Promise<ApiKey> {
|
||||
const trimmedName = name?.trim();
|
||||
const sanitizedName = this.sanitizeName(trimmedName);
|
||||
|
||||
@@ -139,7 +163,7 @@ export class ApiKeyService implements OnModuleInit {
|
||||
if (!overwrite && existingKey) {
|
||||
return existingKey;
|
||||
}
|
||||
const apiKey: Partial<ApiKeyWithSecret> = {
|
||||
const apiKey: Partial<ApiKey> = {
|
||||
id: uuidv4(),
|
||||
key: this.generateApiKey(),
|
||||
name: sanitizedName,
|
||||
@@ -152,18 +176,18 @@ export class ApiKeyService implements OnModuleInit {
|
||||
// Update createdAt date
|
||||
apiKey.createdAt = new Date().toISOString();
|
||||
|
||||
await this.saveApiKey(apiKey as ApiKeyWithSecret);
|
||||
await this.saveApiKey(apiKey as ApiKey);
|
||||
|
||||
return apiKey as ApiKeyWithSecret;
|
||||
return apiKey as ApiKey;
|
||||
}
|
||||
|
||||
async loadAllFromDisk(): Promise<ApiKeyWithSecret[]> {
|
||||
async loadAllFromDisk(): Promise<ApiKey[]> {
|
||||
const files = await readdir(this.basePath).catch((error) => {
|
||||
this.logger.error(`Failed to read API key directory: ${error}`);
|
||||
throw new Error('Failed to list API keys');
|
||||
});
|
||||
|
||||
const apiKeys: ApiKeyWithSecret[] = [];
|
||||
const apiKeys: ApiKey[] = [];
|
||||
const jsonFiles = files.filter((file) => file.includes('.json'));
|
||||
|
||||
for (const file of jsonFiles) {
|
||||
@@ -186,7 +210,7 @@ export class ApiKeyService implements OnModuleInit {
|
||||
* @param file The file to load
|
||||
* @returns The API key with secret
|
||||
*/
|
||||
private async loadApiKeyFile(file: string): Promise<ApiKeyWithSecret | null> {
|
||||
private async loadApiKeyFile(file: string): Promise<ApiKey | null> {
|
||||
try {
|
||||
const content = await readFile(join(this.basePath, file), 'utf8');
|
||||
|
||||
@@ -196,7 +220,17 @@ export class ApiKeyService implements OnModuleInit {
|
||||
if (parsedContent.roles) {
|
||||
parsedContent.roles = parsedContent.roles.map((role: string) => role.toUpperCase());
|
||||
}
|
||||
return await validateObject(ApiKeyWithSecret, parsedContent);
|
||||
|
||||
// Normalize permission actions to AuthAction enum values
|
||||
// Uses shared helper to handle all legacy formats
|
||||
if (parsedContent.permissions) {
|
||||
parsedContent.permissions = parsedContent.permissions.map((permission: any) => ({
|
||||
...permission,
|
||||
actions: normalizeLegacyActions(permission.actions || []),
|
||||
}));
|
||||
}
|
||||
|
||||
return await validateObject(ApiKey, parsedContent);
|
||||
} catch (error) {
|
||||
if (error instanceof SyntaxError) {
|
||||
this.logger.error(`Corrupted key file: ${file}`);
|
||||
@@ -216,12 +250,7 @@ export class ApiKeyService implements OnModuleInit {
|
||||
|
||||
async findById(id: string): Promise<ApiKey | null> {
|
||||
try {
|
||||
const key = this.findByField('id', id);
|
||||
|
||||
if (key) {
|
||||
return this.convertApiKeyWithSecretToApiKey(key);
|
||||
}
|
||||
return null;
|
||||
return this.findByField('id', id);
|
||||
} catch (error) {
|
||||
if (error instanceof ValidationError) {
|
||||
this.logApiKeyValidationError(id, error);
|
||||
@@ -231,17 +260,13 @@ export class ApiKeyService implements OnModuleInit {
|
||||
}
|
||||
}
|
||||
|
||||
public findByIdWithSecret(id: string): ApiKeyWithSecret | null {
|
||||
return this.findByField('id', id);
|
||||
}
|
||||
|
||||
public findByField(field: keyof ApiKeyWithSecret, value: string): ApiKeyWithSecret | null {
|
||||
public findByField(field: keyof ApiKey, value: string): ApiKey | null {
|
||||
if (!value) return null;
|
||||
|
||||
return this.memoryApiKeys.find((k) => k[field] === value) ?? null;
|
||||
}
|
||||
|
||||
findByKey(key: string): ApiKeyWithSecret | null {
|
||||
findByKey(key: string): ApiKey | null {
|
||||
return this.findByField('key', key);
|
||||
}
|
||||
|
||||
@@ -254,9 +279,9 @@ export class ApiKeyService implements OnModuleInit {
|
||||
Errors: ${JSON.stringify(error.constraints, null, 2)}`);
|
||||
}
|
||||
|
||||
public async saveApiKey(apiKey: ApiKeyWithSecret): Promise<void> {
|
||||
public async saveApiKey(apiKey: ApiKey): Promise<void> {
|
||||
try {
|
||||
const validatedApiKey = await validateObject(ApiKeyWithSecret, apiKey);
|
||||
const validatedApiKey = await validateObject(ApiKey, apiKey);
|
||||
if (!validatedApiKey.permissions?.length && !validatedApiKey.roles?.length) {
|
||||
throw new GraphQLError('At least one of permissions or roles must be specified');
|
||||
}
|
||||
@@ -266,7 +291,7 @@ export class ApiKeyService implements OnModuleInit {
|
||||
.reduce((acc, key) => {
|
||||
acc[key] = validatedApiKey[key];
|
||||
return acc;
|
||||
}, {} as ApiKeyWithSecret);
|
||||
}, {} as ApiKey);
|
||||
|
||||
await writeFile(
|
||||
join(this.basePath, `${validatedApiKey.id}.json`),
|
||||
@@ -334,8 +359,8 @@ export class ApiKeyService implements OnModuleInit {
|
||||
description?: string;
|
||||
roles?: Role[];
|
||||
permissions?: Permission[] | AddPermissionInput[];
|
||||
}): Promise<ApiKeyWithSecret> {
|
||||
const apiKey = this.findByIdWithSecret(id);
|
||||
}): Promise<ApiKey> {
|
||||
const apiKey = await this.findById(id);
|
||||
if (!apiKey) {
|
||||
throw new GraphQLError('API key not found');
|
||||
}
|
||||
@@ -345,13 +370,15 @@ export class ApiKeyService implements OnModuleInit {
|
||||
if (description !== undefined) {
|
||||
apiKey.description = description;
|
||||
}
|
||||
if (roles) {
|
||||
if (roles !== undefined) {
|
||||
// Handle both empty array (to clear roles) and populated array
|
||||
if (roles.some((role) => !ApiKeyService.validRoles.has(role))) {
|
||||
throw new GraphQLError('Invalid role specified');
|
||||
}
|
||||
apiKey.roles = roles;
|
||||
}
|
||||
if (permissions) {
|
||||
if (permissions !== undefined) {
|
||||
// Handle both empty array (to clear permissions) and populated array
|
||||
apiKey.permissions = permissions;
|
||||
}
|
||||
await this.saveApiKey(apiKey);
|
||||
|
||||
@@ -11,13 +11,19 @@ import { BASE_POLICY, CASBIN_MODEL } from '@app/unraid-api/auth/casbin/index.js'
|
||||
import { CookieService, SESSION_COOKIE_CONFIG } from '@app/unraid-api/auth/cookie.service.js';
|
||||
import { UserCookieStrategy } from '@app/unraid-api/auth/cookie.strategy.js';
|
||||
import { ServerHeaderStrategy } from '@app/unraid-api/auth/header.strategy.js';
|
||||
import { AdminKeyService } from '@app/unraid-api/cli/admin-key.service.js';
|
||||
import { LocalSessionLifecycleService } from '@app/unraid-api/auth/local-session-lifecycle.service.js';
|
||||
import { LocalSessionService } from '@app/unraid-api/auth/local-session.service.js';
|
||||
import { LocalSessionStrategy } from '@app/unraid-api/auth/local-session.strategy.js';
|
||||
import { getRequest } from '@app/utils.js';
|
||||
|
||||
@Module({
|
||||
imports: [
|
||||
PassportModule.register({
|
||||
defaultStrategy: [ServerHeaderStrategy.key, UserCookieStrategy.key],
|
||||
defaultStrategy: [
|
||||
ServerHeaderStrategy.key,
|
||||
LocalSessionStrategy.key,
|
||||
UserCookieStrategy.key,
|
||||
],
|
||||
}),
|
||||
CasbinModule,
|
||||
AuthZModule.register({
|
||||
@@ -51,10 +57,12 @@ import { getRequest } from '@app/utils.js';
|
||||
providers: [
|
||||
AuthService,
|
||||
ApiKeyService,
|
||||
AdminKeyService,
|
||||
ServerHeaderStrategy,
|
||||
LocalSessionStrategy,
|
||||
UserCookieStrategy,
|
||||
CookieService,
|
||||
LocalSessionService,
|
||||
LocalSessionLifecycleService,
|
||||
{
|
||||
provide: SESSION_COOKIE_CONFIG,
|
||||
useValue: CookieService.defaultOpts(),
|
||||
@@ -65,8 +73,11 @@ import { getRequest } from '@app/utils.js';
|
||||
ApiKeyService,
|
||||
PassportModule,
|
||||
ServerHeaderStrategy,
|
||||
LocalSessionStrategy,
|
||||
UserCookieStrategy,
|
||||
CookieService,
|
||||
LocalSessionService,
|
||||
LocalSessionLifecycleService,
|
||||
AuthZModule,
|
||||
],
|
||||
})
|
||||
|
||||
@@ -1,14 +1,15 @@
|
||||
import { UnauthorizedException } from '@nestjs/common';
|
||||
|
||||
import { Resource, Role } from '@unraid/shared/graphql.model.js';
|
||||
import { AuthAction, Resource, Role } from '@unraid/shared/graphql.model.js';
|
||||
import { newEnforcer } from 'casbin';
|
||||
import { AuthActionVerb, AuthZService } from 'nest-authz';
|
||||
import { AuthZService } from 'nest-authz';
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { ApiKeyService } from '@app/unraid-api/auth/api-key.service.js';
|
||||
import { AuthService } from '@app/unraid-api/auth/auth.service.js';
|
||||
import { CookieService } from '@app/unraid-api/auth/cookie.service.js';
|
||||
import { ApiKey, ApiKeyWithSecret } from '@app/unraid-api/graph/resolvers/api-key/api-key.model.js';
|
||||
import { LocalSessionService } from '@app/unraid-api/auth/local-session.service.js';
|
||||
import { ApiKey } from '@app/unraid-api/graph/resolvers/api-key/api-key.model.js';
|
||||
import { UserAccount } from '@app/unraid-api/graph/user/user.model.js';
|
||||
import { FastifyRequest } from '@app/unraid-api/types/fastify.js';
|
||||
|
||||
@@ -17,17 +18,9 @@ describe('AuthService', () => {
|
||||
let apiKeyService: ApiKeyService;
|
||||
let authzService: AuthZService;
|
||||
let cookieService: CookieService;
|
||||
let localSessionService: LocalSessionService;
|
||||
|
||||
const mockApiKey: ApiKey = {
|
||||
id: '10f356da-1e9e-43b8-9028-a26a645539a6',
|
||||
name: 'Test API Key',
|
||||
description: 'Test API Key Description',
|
||||
roles: [Role.GUEST, Role.CONNECT],
|
||||
createdAt: new Date().toISOString(),
|
||||
permissions: [],
|
||||
};
|
||||
|
||||
const mockApiKeyWithSecret: ApiKeyWithSecret = {
|
||||
id: 'test-api-id',
|
||||
key: 'test-api-key',
|
||||
name: 'Test API Key',
|
||||
@@ -36,7 +29,7 @@ describe('AuthService', () => {
|
||||
permissions: [
|
||||
{
|
||||
resource: Resource.CONNECT,
|
||||
actions: [AuthActionVerb.READ.toUpperCase()],
|
||||
actions: [AuthAction.READ_ANY],
|
||||
},
|
||||
],
|
||||
createdAt: new Date().toISOString(),
|
||||
@@ -64,7 +57,10 @@ describe('AuthService', () => {
|
||||
apiKeyService = new ApiKeyService();
|
||||
authzService = new AuthZService(enforcer);
|
||||
cookieService = new CookieService();
|
||||
authService = new AuthService(cookieService, apiKeyService, authzService);
|
||||
localSessionService = {
|
||||
validateLocalSession: vi.fn(),
|
||||
} as any;
|
||||
authService = new AuthService(cookieService, apiKeyService, localSessionService, authzService);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
@@ -98,6 +94,43 @@ describe('AuthService', () => {
|
||||
);
|
||||
});
|
||||
|
||||
it('should validate API key with only permissions (no roles)', async () => {
|
||||
const apiKeyWithOnlyPermissions: ApiKey = {
|
||||
...mockApiKey,
|
||||
roles: [], // No roles, only permissions
|
||||
permissions: [
|
||||
{
|
||||
resource: Resource.DOCKER,
|
||||
actions: [AuthAction.READ_ANY, AuthAction.UPDATE_ANY],
|
||||
},
|
||||
{
|
||||
resource: Resource.VMS,
|
||||
actions: [AuthAction.READ_ANY],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
vi.spyOn(apiKeyService, 'findByKey').mockResolvedValue(apiKeyWithOnlyPermissions);
|
||||
vi.spyOn(authService, 'syncApiKeyRoles').mockResolvedValue(undefined);
|
||||
vi.spyOn(authService, 'syncApiKeyPermissions').mockResolvedValue(undefined);
|
||||
vi.spyOn(authzService, 'getRolesForUser').mockResolvedValue([]);
|
||||
|
||||
const result = await authService.validateApiKeyCasbin('test-api-key');
|
||||
|
||||
expect(result).toEqual({
|
||||
id: apiKeyWithOnlyPermissions.id,
|
||||
name: apiKeyWithOnlyPermissions.name,
|
||||
description: apiKeyWithOnlyPermissions.description,
|
||||
roles: [],
|
||||
permissions: apiKeyWithOnlyPermissions.permissions,
|
||||
});
|
||||
expect(authService.syncApiKeyRoles).toHaveBeenCalledWith(apiKeyWithOnlyPermissions.id, []);
|
||||
expect(authService.syncApiKeyPermissions).toHaveBeenCalledWith(
|
||||
apiKeyWithOnlyPermissions.id,
|
||||
apiKeyWithOnlyPermissions.permissions
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw UnauthorizedException when session user is missing', async () => {
|
||||
vi.spyOn(cookieService, 'hasValidAuthCookie').mockResolvedValue(true);
|
||||
vi.spyOn(authService, 'getSessionUser').mockResolvedValue(null as unknown as UserAccount);
|
||||
@@ -195,10 +228,6 @@ describe('AuthService', () => {
|
||||
};
|
||||
|
||||
vi.spyOn(apiKeyService, 'findById').mockResolvedValue(mockApiKeyWithoutRole);
|
||||
vi.spyOn(apiKeyService, 'findByIdWithSecret').mockResolvedValue({
|
||||
...mockApiKeyWithSecret,
|
||||
roles: [Role.ADMIN],
|
||||
});
|
||||
vi.spyOn(apiKeyService, 'saveApiKey').mockResolvedValue();
|
||||
vi.spyOn(authzService, 'addRoleForUser').mockResolvedValue(true);
|
||||
|
||||
@@ -206,9 +235,8 @@ describe('AuthService', () => {
|
||||
|
||||
expect(result).toBe(true);
|
||||
expect(apiKeyService.findById).toHaveBeenCalledWith(apiKeyId);
|
||||
expect(apiKeyService.findByIdWithSecret).toHaveBeenCalledWith(apiKeyId);
|
||||
expect(apiKeyService.saveApiKey).toHaveBeenCalledWith({
|
||||
...mockApiKeyWithSecret,
|
||||
...mockApiKeyWithoutRole,
|
||||
roles: [Role.ADMIN, role],
|
||||
});
|
||||
expect(authzService.addRoleForUser).toHaveBeenCalledWith(apiKeyId, role);
|
||||
@@ -226,13 +254,8 @@ describe('AuthService', () => {
|
||||
describe('removeRoleFromApiKey', () => {
|
||||
it('should remove role from API key', async () => {
|
||||
const apiKey = { ...mockApiKey, roles: [Role.ADMIN, Role.GUEST] };
|
||||
const apiKeyWithSecret = {
|
||||
...mockApiKeyWithSecret,
|
||||
roles: [Role.ADMIN, Role.GUEST],
|
||||
};
|
||||
|
||||
vi.spyOn(apiKeyService, 'findById').mockResolvedValue(apiKey);
|
||||
vi.spyOn(apiKeyService, 'findByIdWithSecret').mockResolvedValue(apiKeyWithSecret);
|
||||
vi.spyOn(apiKeyService, 'saveApiKey').mockResolvedValue();
|
||||
vi.spyOn(authzService, 'deleteRoleForUser').mockResolvedValue(true);
|
||||
|
||||
@@ -240,9 +263,8 @@ describe('AuthService', () => {
|
||||
|
||||
expect(result).toBe(true);
|
||||
expect(apiKeyService.findById).toHaveBeenCalledWith(apiKey.id);
|
||||
expect(apiKeyService.findByIdWithSecret).toHaveBeenCalledWith(apiKey.id);
|
||||
expect(apiKeyService.saveApiKey).toHaveBeenCalledWith({
|
||||
...apiKeyWithSecret,
|
||||
...apiKey,
|
||||
roles: [Role.GUEST],
|
||||
});
|
||||
expect(authzService.deleteRoleForUser).toHaveBeenCalledWith(apiKey.id, Role.ADMIN);
|
||||
@@ -256,4 +278,229 @@ describe('AuthService', () => {
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('VIEWER role API_KEY access restriction', () => {
|
||||
it('should deny VIEWER role access to API_KEY resource', async () => {
|
||||
// Test that VIEWER role cannot access API_KEY resource
|
||||
const mockCasbinPermissions = Object.values(Resource)
|
||||
.filter((resource) => resource !== Resource.API_KEY)
|
||||
.map((resource) => ['VIEWER', resource, AuthAction.READ_ANY]);
|
||||
|
||||
vi.spyOn(authzService, 'getImplicitPermissionsForUser').mockResolvedValue(
|
||||
mockCasbinPermissions
|
||||
);
|
||||
|
||||
const result = await authService.getImplicitPermissionsForRole(Role.VIEWER);
|
||||
|
||||
// VIEWER should have read access to all resources EXCEPT API_KEY
|
||||
expect(result).toBeInstanceOf(Map);
|
||||
expect(result.size).toBeGreaterThan(0);
|
||||
|
||||
// Should NOT have API_KEY in the permissions
|
||||
expect(result.has(Resource.API_KEY)).toBe(false);
|
||||
|
||||
// Should have read access to other resources
|
||||
expect(result.get(Resource.DOCKER)).toEqual([AuthAction.READ_ANY]);
|
||||
expect(result.get(Resource.ARRAY)).toEqual([AuthAction.READ_ANY]);
|
||||
expect(result.get(Resource.CONFIG)).toEqual([AuthAction.READ_ANY]);
|
||||
expect(result.get(Resource.ME)).toEqual([AuthAction.READ_ANY]);
|
||||
});
|
||||
|
||||
it('should allow ADMIN role access to API_KEY resource', async () => {
|
||||
// Test that ADMIN role CAN access API_KEY resource
|
||||
const mockCasbinPermissions = [
|
||||
['ADMIN', '*', '*'], // Admin has wildcard access
|
||||
];
|
||||
|
||||
vi.spyOn(authzService, 'getImplicitPermissionsForUser').mockResolvedValue(
|
||||
mockCasbinPermissions
|
||||
);
|
||||
|
||||
const result = await authService.getImplicitPermissionsForRole(Role.ADMIN);
|
||||
|
||||
// ADMIN should have access to API_KEY through wildcard
|
||||
expect(result).toBeInstanceOf(Map);
|
||||
expect(result.has(Resource.API_KEY)).toBe(true);
|
||||
expect(result.get(Resource.API_KEY)).toContain(AuthAction.CREATE_ANY);
|
||||
expect(result.get(Resource.API_KEY)).toContain(AuthAction.READ_ANY);
|
||||
expect(result.get(Resource.API_KEY)).toContain(AuthAction.UPDATE_ANY);
|
||||
expect(result.get(Resource.API_KEY)).toContain(AuthAction.DELETE_ANY);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getImplicitPermissionsForRole', () => {
|
||||
it('should return permissions for a role', async () => {
|
||||
const mockCasbinPermissions = [
|
||||
['ADMIN', 'DOCKER', 'READ'],
|
||||
['ADMIN', 'DOCKER', 'UPDATE'],
|
||||
['ADMIN', 'VMS', 'READ'],
|
||||
];
|
||||
|
||||
vi.spyOn(authzService, 'getImplicitPermissionsForUser').mockResolvedValue(
|
||||
mockCasbinPermissions
|
||||
);
|
||||
|
||||
const result = await authService.getImplicitPermissionsForRole(Role.ADMIN);
|
||||
|
||||
expect(result).toBeInstanceOf(Map);
|
||||
expect(result.size).toBe(2);
|
||||
expect(result.get(Resource.DOCKER)).toEqual([AuthAction.READ_ANY, AuthAction.UPDATE_ANY]);
|
||||
expect(result.get(Resource.VMS)).toEqual([AuthAction.READ_ANY]);
|
||||
});
|
||||
|
||||
it('should handle wildcard permissions for admin role', async () => {
|
||||
const mockCasbinPermissions = [
|
||||
['ADMIN', '*', '*'],
|
||||
['ADMIN', 'ME', 'READ'], // Inherited from GUEST
|
||||
];
|
||||
|
||||
vi.spyOn(authzService, 'getImplicitPermissionsForUser').mockResolvedValue(
|
||||
mockCasbinPermissions
|
||||
);
|
||||
|
||||
const result = await authService.getImplicitPermissionsForRole(Role.ADMIN);
|
||||
|
||||
expect(result).toBeInstanceOf(Map);
|
||||
expect(result.size).toBeGreaterThan(0);
|
||||
// Should have expanded CRUD actions with proper format for all resources
|
||||
expect(result.get(Resource.DOCKER)).toContain(AuthAction.CREATE_ANY);
|
||||
expect(result.get(Resource.DOCKER)).toContain(AuthAction.READ_ANY);
|
||||
expect(result.get(Resource.DOCKER)).toContain(AuthAction.UPDATE_ANY);
|
||||
expect(result.get(Resource.DOCKER)).toContain(AuthAction.DELETE_ANY);
|
||||
expect(result.get(Resource.VMS)).toContain(AuthAction.CREATE_ANY);
|
||||
expect(result.get(Resource.VMS)).toContain(AuthAction.READ_ANY);
|
||||
expect(result.get(Resource.VMS)).toContain(AuthAction.UPDATE_ANY);
|
||||
expect(result.get(Resource.VMS)).toContain(AuthAction.DELETE_ANY);
|
||||
expect(result.get(Resource.ME)).toContain(AuthAction.READ_ANY);
|
||||
expect(result.get(Resource.ME)).toContain(AuthAction.CREATE_ANY); // Also gets CRUD from wildcard
|
||||
expect(result.has('*' as any)).toBe(false); // Still shouldn't have literal wildcard
|
||||
});
|
||||
|
||||
it('should handle connect role with wildcard resource and specific action', async () => {
|
||||
const mockCasbinPermissions = [
|
||||
['CONNECT', '*', 'READ'],
|
||||
['CONNECT', 'CONNECT__REMOTE_ACCESS', 'UPDATE'],
|
||||
['CONNECT', 'ME', 'READ'], // Inherited from GUEST
|
||||
];
|
||||
|
||||
vi.spyOn(authzService, 'getImplicitPermissionsForUser').mockResolvedValue(
|
||||
mockCasbinPermissions
|
||||
);
|
||||
|
||||
const result = await authService.getImplicitPermissionsForRole(Role.CONNECT);
|
||||
|
||||
expect(result).toBeInstanceOf(Map);
|
||||
expect(result.size).toBeGreaterThan(0);
|
||||
// All resources should have READ
|
||||
expect(result.get(Resource.DOCKER)).toContain(AuthAction.READ_ANY);
|
||||
expect(result.get(Resource.VMS)).toContain(AuthAction.READ_ANY);
|
||||
expect(result.get(Resource.ARRAY)).toContain(AuthAction.READ_ANY);
|
||||
// CONNECT__REMOTE_ACCESS should have both READ and UPDATE
|
||||
expect(result.get(Resource.CONNECT__REMOTE_ACCESS)).toContain(AuthAction.READ_ANY);
|
||||
expect(result.get(Resource.CONNECT__REMOTE_ACCESS)).toContain(AuthAction.UPDATE_ANY);
|
||||
});
|
||||
|
||||
it('should expand resource-specific wildcard actions to CRUD', async () => {
|
||||
const mockCasbinPermissions = [
|
||||
['DOCKER_MANAGER', 'DOCKER', '*'],
|
||||
['DOCKER_MANAGER', 'ARRAY', 'READ'],
|
||||
];
|
||||
|
||||
vi.spyOn(authzService, 'getImplicitPermissionsForUser').mockResolvedValue(
|
||||
mockCasbinPermissions
|
||||
);
|
||||
|
||||
const result = await authService.getImplicitPermissionsForRole(Role.ADMIN);
|
||||
|
||||
expect(result).toBeInstanceOf(Map);
|
||||
// Docker should have all CRUD actions with proper format
|
||||
expect(result.get(Resource.DOCKER)).toEqual(
|
||||
expect.arrayContaining([
|
||||
AuthAction.CREATE_ANY,
|
||||
AuthAction.READ_ANY,
|
||||
AuthAction.UPDATE_ANY,
|
||||
AuthAction.DELETE_ANY,
|
||||
])
|
||||
);
|
||||
// Array should only have READ
|
||||
expect(result.get(Resource.ARRAY)).toEqual([AuthAction.READ_ANY]);
|
||||
});
|
||||
|
||||
it('should skip invalid resources', async () => {
|
||||
const mockCasbinPermissions = [
|
||||
['ADMIN', 'INVALID_RESOURCE', 'READ'],
|
||||
['ADMIN', 'DOCKER', 'UPDATE'],
|
||||
['ADMIN', '', 'READ'],
|
||||
] as string[][];
|
||||
|
||||
vi.spyOn(authzService, 'getImplicitPermissionsForUser').mockResolvedValue(
|
||||
mockCasbinPermissions
|
||||
);
|
||||
|
||||
const result = await authService.getImplicitPermissionsForRole(Role.ADMIN);
|
||||
|
||||
expect(result).toBeInstanceOf(Map);
|
||||
expect(result.size).toBe(1);
|
||||
expect(result.get(Resource.DOCKER)).toEqual([AuthAction.UPDATE_ANY]);
|
||||
});
|
||||
|
||||
it('should handle empty permissions', async () => {
|
||||
vi.spyOn(authzService, 'getImplicitPermissionsForUser').mockResolvedValue([]);
|
||||
|
||||
const result = await authService.getImplicitPermissionsForRole(Role.ADMIN);
|
||||
|
||||
expect(result).toBeInstanceOf(Map);
|
||||
expect(result.size).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle malformed permission entries', async () => {
|
||||
const mockCasbinPermissions = [
|
||||
['ADMIN'], // Too short
|
||||
['ADMIN', 'DOCKER'], // Missing action
|
||||
['ADMIN', 'DOCKER', 'READ', 'EXTRA'], // Extra fields are ok
|
||||
['ADMIN', 'VMS', 'UPDATE'],
|
||||
];
|
||||
|
||||
vi.spyOn(authzService, 'getImplicitPermissionsForUser').mockResolvedValue(
|
||||
mockCasbinPermissions
|
||||
);
|
||||
|
||||
const result = await authService.getImplicitPermissionsForRole(Role.ADMIN);
|
||||
|
||||
expect(result).toBeInstanceOf(Map);
|
||||
expect(result.size).toBe(2);
|
||||
expect(result.get(Resource.DOCKER)).toEqual([AuthAction.READ_ANY]);
|
||||
expect(result.get(Resource.VMS)).toEqual([AuthAction.UPDATE_ANY]);
|
||||
});
|
||||
|
||||
it('should not duplicate actions for the same resource', async () => {
|
||||
const mockCasbinPermissions = [
|
||||
['ADMIN', 'DOCKER', 'READ'],
|
||||
['ADMIN', 'DOCKER', 'READ'],
|
||||
['ADMIN', 'DOCKER', 'UPDATE'],
|
||||
['ADMIN', 'DOCKER', 'UPDATE'],
|
||||
];
|
||||
|
||||
vi.spyOn(authzService, 'getImplicitPermissionsForUser').mockResolvedValue(
|
||||
mockCasbinPermissions
|
||||
);
|
||||
|
||||
const result = await authService.getImplicitPermissionsForRole(Role.ADMIN);
|
||||
|
||||
expect(result).toBeInstanceOf(Map);
|
||||
expect(result.size).toBe(1);
|
||||
expect(result.get(Resource.DOCKER)).toEqual([AuthAction.READ_ANY, AuthAction.UPDATE_ANY]);
|
||||
});
|
||||
|
||||
it('should handle errors gracefully', async () => {
|
||||
vi.spyOn(authzService, 'getImplicitPermissionsForUser').mockRejectedValue(
|
||||
new Error('Casbin error')
|
||||
);
|
||||
|
||||
const result = await authService.getImplicitPermissionsForRole(Role.ADMIN);
|
||||
|
||||
expect(result).toBeInstanceOf(Map);
|
||||
expect(result.size).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,11 +1,19 @@
|
||||
import { Injectable, Logger, UnauthorizedException } from '@nestjs/common';
|
||||
import { timingSafeEqual } from 'node:crypto';
|
||||
|
||||
import { Role } from '@unraid/shared/graphql.model.js';
|
||||
import { AuthAction, Resource, Role } from '@unraid/shared/graphql.model.js';
|
||||
import {
|
||||
convertPermissionSetsToArrays,
|
||||
expandWildcardAction,
|
||||
parseActionToAuthAction,
|
||||
reconcileWildcardPermissions,
|
||||
} from '@unraid/shared/util/permissions.js';
|
||||
import { AuthZService } from 'nest-authz';
|
||||
|
||||
import { getters } from '@app/store/index.js';
|
||||
import { ApiKeyService } from '@app/unraid-api/auth/api-key.service.js';
|
||||
import { CookieService } from '@app/unraid-api/auth/cookie.service.js';
|
||||
import { LocalSessionService } from '@app/unraid-api/auth/local-session.service.js';
|
||||
import { Permission } from '@app/unraid-api/graph/resolvers/api-key/api-key.model.js';
|
||||
import { UserAccount } from '@app/unraid-api/graph/user/user.model.js';
|
||||
import { FastifyRequest } from '@app/unraid-api/types/fastify.js';
|
||||
@@ -18,6 +26,7 @@ export class AuthService {
|
||||
constructor(
|
||||
private cookieService: CookieService,
|
||||
private apiKeyService: ApiKeyService,
|
||||
private localSessionService: LocalSessionService,
|
||||
private authzService: AuthZService
|
||||
) {}
|
||||
|
||||
@@ -83,6 +92,30 @@ export class AuthService {
|
||||
}
|
||||
}
|
||||
|
||||
async validateLocalSession(localSessionToken: string): Promise<UserAccount> {
|
||||
try {
|
||||
const isValid = await this.localSessionService.validateLocalSession(localSessionToken);
|
||||
|
||||
if (!isValid) {
|
||||
throw new UnauthorizedException('Invalid local session token');
|
||||
}
|
||||
|
||||
// Local session has admin privileges
|
||||
const user = await this.getLocalSessionUser();
|
||||
|
||||
// Sync the user's roles before checking them
|
||||
await this.syncUserRoles(user.id, user.roles);
|
||||
|
||||
// Now get the updated roles
|
||||
const existingRoles = await this.authzService.getRolesForUser(user.id);
|
||||
this.logger.debug(`Local session user ${user.id} has roles: ${existingRoles}`);
|
||||
|
||||
return user;
|
||||
} catch (error: unknown) {
|
||||
handleAuthError(this.logger, 'Failed to validate local session', error);
|
||||
}
|
||||
}
|
||||
|
||||
public async syncApiKeyRoles(apiKeyId: string, roles: string[]): Promise<void> {
|
||||
try {
|
||||
// Get existing roles and convert to Set
|
||||
@@ -111,14 +144,38 @@ export class AuthService {
|
||||
await this.authzService.deletePermissionsForUser(apiKeyId);
|
||||
|
||||
// Create array of permission-action pairs for processing
|
||||
const permissionActions = permissions.flatMap((permission) =>
|
||||
(permission.actions || []).map((action) => ({
|
||||
resource: permission.resource,
|
||||
action,
|
||||
}))
|
||||
);
|
||||
// Filter out any permissions with empty or undefined resources
|
||||
const permissionActions = permissions
|
||||
.filter((permission) => permission.resource && permission.resource.trim() !== '')
|
||||
.flatMap((permission) =>
|
||||
(permission.actions || [])
|
||||
.filter((action) => action && String(action).trim() !== '')
|
||||
.flatMap((action) => {
|
||||
const actionStr = String(action);
|
||||
// Handle wildcard - expand to all CRUD actions
|
||||
if (actionStr === '*' || actionStr.toLowerCase() === '*') {
|
||||
return expandWildcardAction().map((expandedAction) => ({
|
||||
resource: permission.resource,
|
||||
action: expandedAction,
|
||||
}));
|
||||
}
|
||||
|
||||
const { errors, errorOccured } = await batchProcess(
|
||||
// Use the shared helper to parse and validate the action
|
||||
const parsedAction = parseActionToAuthAction(actionStr);
|
||||
|
||||
// Only include valid AuthAction values
|
||||
return parsedAction
|
||||
? [
|
||||
{
|
||||
resource: permission.resource,
|
||||
action: parsedAction,
|
||||
},
|
||||
]
|
||||
: [];
|
||||
})
|
||||
);
|
||||
|
||||
const { errors, errorOccurred: errorOccured } = await batchProcess(
|
||||
permissionActions,
|
||||
({ resource, action }) =>
|
||||
this.authzService.addPermissionForUser(apiKeyId, resource, action)
|
||||
@@ -144,15 +201,12 @@ export class AuthService {
|
||||
}
|
||||
|
||||
try {
|
||||
if (!apiKey.roles) {
|
||||
apiKey.roles = [];
|
||||
}
|
||||
if (!apiKey.roles.includes(role)) {
|
||||
const apiKeyWithSecret = await this.apiKeyService.findByIdWithSecret(apiKeyId);
|
||||
|
||||
if (!apiKeyWithSecret) {
|
||||
throw new UnauthorizedException('API key not found with secret');
|
||||
}
|
||||
|
||||
apiKeyWithSecret.roles.push(role);
|
||||
await this.apiKeyService.saveApiKey(apiKeyWithSecret);
|
||||
apiKey.roles.push(role);
|
||||
await this.apiKeyService.saveApiKey(apiKey);
|
||||
await this.authzService.addRoleForUser(apiKeyId, role);
|
||||
}
|
||||
|
||||
@@ -174,14 +228,11 @@ export class AuthService {
|
||||
}
|
||||
|
||||
try {
|
||||
const apiKeyWithSecret = await this.apiKeyService.findByIdWithSecret(apiKeyId);
|
||||
|
||||
if (!apiKeyWithSecret) {
|
||||
throw new UnauthorizedException('API key not found with secret');
|
||||
if (!apiKey.roles) {
|
||||
apiKey.roles = [];
|
||||
}
|
||||
|
||||
apiKeyWithSecret.roles = apiKeyWithSecret.roles.filter((r) => r !== role);
|
||||
await this.apiKeyService.saveApiKey(apiKeyWithSecret);
|
||||
apiKey.roles = apiKey.roles.filter((r) => r !== role);
|
||||
await this.apiKeyService.saveApiKey(apiKey);
|
||||
await this.authzService.deleteRoleForUser(apiKeyId, role);
|
||||
|
||||
return true;
|
||||
@@ -224,7 +275,67 @@ export class AuthService {
|
||||
}
|
||||
|
||||
public validateCsrfToken(token?: string): boolean {
|
||||
return Boolean(token) && token === getters.emhttp().var.csrfToken;
|
||||
if (!token) return false;
|
||||
const csrfToken = getters.emhttp().var.csrfToken;
|
||||
if (!csrfToken) return false;
|
||||
return timingSafeEqual(Buffer.from(token, 'utf-8'), Buffer.from(csrfToken, 'utf-8'));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get implicit permissions for a role (including inherited permissions)
|
||||
*/
|
||||
public async getImplicitPermissionsForRole(role: Role): Promise<Map<Resource, AuthAction[]>> {
|
||||
// Use Set internally for efficient deduplication, with '*' as a special key for wildcards
|
||||
const permissionsWithSets = new Map<Resource | '*', Set<AuthAction>>();
|
||||
|
||||
// Load permissions from Casbin, defaulting to empty array on error
|
||||
let casbinPermissions: string[][] = [];
|
||||
try {
|
||||
casbinPermissions = await this.authzService.getImplicitPermissionsForUser(role);
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to get permissions for role ${role}:`, error);
|
||||
}
|
||||
|
||||
// Parse the Casbin permissions format: [["role", "resource", "action"], ...]
|
||||
for (const perm of casbinPermissions) {
|
||||
if (perm.length < 3) continue;
|
||||
|
||||
const resourceStr = perm[1];
|
||||
const action = perm[2];
|
||||
|
||||
if (!resourceStr) continue;
|
||||
|
||||
// Skip invalid resources (except wildcard)
|
||||
if (resourceStr !== '*' && !Object.values(Resource).includes(resourceStr as Resource)) {
|
||||
this.logger.debug(`Skipping invalid resource from Casbin: ${resourceStr}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Initialize Set if needed
|
||||
if (!permissionsWithSets.has(resourceStr as Resource | '*')) {
|
||||
permissionsWithSets.set(resourceStr as Resource | '*', new Set());
|
||||
}
|
||||
|
||||
const actionsSet = permissionsWithSets.get(resourceStr as Resource | '*')!;
|
||||
|
||||
// Handle wildcard or parse to valid AuthAction
|
||||
if (action === '*') {
|
||||
// Expand wildcard action to CRUD operations
|
||||
expandWildcardAction().forEach((a) => actionsSet.add(a));
|
||||
} else {
|
||||
// Use shared helper to parse and validate action
|
||||
const parsedAction = parseActionToAuthAction(action);
|
||||
if (parsedAction) {
|
||||
actionsSet.add(parsedAction);
|
||||
} else {
|
||||
this.logger.debug(`Skipping invalid action from Casbin: ${action}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Reconcile wildcard permissions and convert to final format
|
||||
reconcileWildcardPermissions(permissionsWithSets);
|
||||
return convertPermissionSetsToArrays(permissionsWithSets);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -234,7 +345,7 @@ export class AuthService {
|
||||
* @returns a service account that represents the user session (i.e. a webgui user).
|
||||
*/
|
||||
async getSessionUser(): Promise<UserAccount> {
|
||||
this.logger.debug('getSessionUser called!');
|
||||
this.logger.verbose('getSessionUser called!');
|
||||
return {
|
||||
id: '-1',
|
||||
description: 'Session receives administrator permissions',
|
||||
@@ -243,4 +354,21 @@ export class AuthService {
|
||||
permissions: [],
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a user object representing a local session.
|
||||
* Note: Does NOT perform validation.
|
||||
*
|
||||
* @returns a service account that represents the local session user (i.e. CLI/system operations).
|
||||
*/
|
||||
async getLocalSessionUser(): Promise<UserAccount> {
|
||||
this.logger.verbose('getLocalSessionUser called!');
|
||||
return {
|
||||
id: '-2',
|
||||
description: 'Local session receives administrator permissions for CLI/system operations',
|
||||
name: 'local-admin',
|
||||
roles: [Role.ADMIN],
|
||||
permissions: [],
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,6 +13,7 @@ import type { FastifyRequest } from '@app/unraid-api/types/fastify.js';
|
||||
import { apiLogger } from '@app/core/log.js';
|
||||
import { UserCookieStrategy } from '@app/unraid-api/auth/cookie.strategy.js';
|
||||
import { ServerHeaderStrategy } from '@app/unraid-api/auth/header.strategy.js';
|
||||
import { LocalSessionStrategy } from '@app/unraid-api/auth/local-session.strategy.js';
|
||||
import { IS_PUBLIC_ENDPOINT_KEY } from '@app/unraid-api/auth/public.decorator.js';
|
||||
|
||||
/**
|
||||
@@ -37,7 +38,7 @@ type GraphQLContext =
|
||||
|
||||
@Injectable()
|
||||
export class AuthenticationGuard
|
||||
extends AuthGuard([ServerHeaderStrategy.key, UserCookieStrategy.key])
|
||||
extends AuthGuard([ServerHeaderStrategy.key, LocalSessionStrategy.key, UserCookieStrategy.key])
|
||||
implements CanActivate
|
||||
{
|
||||
protected logger = new Logger(AuthenticationGuard.name);
|
||||
|
||||
@@ -12,7 +12,7 @@ g = _, _
|
||||
e = some(where (p.eft == allow))
|
||||
|
||||
[matchers]
|
||||
m = (regexMatch(r.sub, p.sub) || g(r.sub, p.sub)) && \
|
||||
regexMatch(lower(r.obj), lower(p.obj)) && \
|
||||
(regexMatch(lower(r.act), lower(p.act)) || p.act == '*' || regexMatch(lower(r.act), lower(concat(p.act, ':.*'))))
|
||||
m = (r.sub == p.sub || g(r.sub, p.sub)) && \
|
||||
(r.obj == p.obj || p.obj == '*') && \
|
||||
(r.act == p.act || p.act == '*')
|
||||
`;
|
||||
|
||||
566
api/src/unraid-api/auth/casbin/permissions-comprehensive.spec.ts
Normal file
566
api/src/unraid-api/auth/casbin/permissions-comprehensive.spec.ts
Normal file
@@ -0,0 +1,566 @@
|
||||
import { AuthAction, Resource, Role } from '@unraid/shared/graphql.model.js';
|
||||
import { Model as CasbinModel, newEnforcer, StringAdapter } from 'casbin';
|
||||
import { beforeEach, describe, expect, it } from 'vitest';
|
||||
|
||||
import { CASBIN_MODEL } from '@app/unraid-api/auth/casbin/model.js';
|
||||
import { BASE_POLICY } from '@app/unraid-api/auth/casbin/policy.js';
|
||||
|
||||
describe('Comprehensive Casbin Permissions Tests', () => {
|
||||
describe('All UsePermissions decorator combinations', () => {
|
||||
// Test all resource/action combinations used in the codebase
|
||||
const testCases = [
|
||||
// API_KEY permissions
|
||||
{
|
||||
resource: Resource.API_KEY,
|
||||
action: AuthAction.READ_ANY,
|
||||
allowedRoles: [Role.ADMIN],
|
||||
deniedRoles: [Role.VIEWER, Role.GUEST, Role.CONNECT],
|
||||
},
|
||||
{
|
||||
resource: Resource.API_KEY,
|
||||
action: AuthAction.CREATE_ANY,
|
||||
allowedRoles: [Role.ADMIN],
|
||||
deniedRoles: [Role.VIEWER, Role.GUEST, Role.CONNECT],
|
||||
},
|
||||
{
|
||||
resource: Resource.API_KEY,
|
||||
action: AuthAction.UPDATE_ANY,
|
||||
allowedRoles: [Role.ADMIN],
|
||||
deniedRoles: [Role.VIEWER, Role.GUEST, Role.CONNECT],
|
||||
},
|
||||
{
|
||||
resource: Resource.API_KEY,
|
||||
action: AuthAction.DELETE_ANY,
|
||||
allowedRoles: [Role.ADMIN],
|
||||
deniedRoles: [Role.VIEWER, Role.GUEST, Role.CONNECT],
|
||||
},
|
||||
|
||||
// PERMISSION resource (for listing possible permissions)
|
||||
{
|
||||
resource: Resource.PERMISSION,
|
||||
action: AuthAction.READ_ANY,
|
||||
allowedRoles: [Role.ADMIN, Role.VIEWER, Role.CONNECT],
|
||||
deniedRoles: [Role.GUEST],
|
||||
},
|
||||
|
||||
// ARRAY permissions
|
||||
{
|
||||
resource: Resource.ARRAY,
|
||||
action: AuthAction.READ_ANY,
|
||||
allowedRoles: [Role.ADMIN, Role.VIEWER, Role.CONNECT],
|
||||
deniedRoles: [Role.GUEST],
|
||||
},
|
||||
{
|
||||
resource: Resource.ARRAY,
|
||||
action: AuthAction.UPDATE_ANY,
|
||||
allowedRoles: [Role.ADMIN],
|
||||
deniedRoles: [Role.VIEWER, Role.GUEST],
|
||||
},
|
||||
|
||||
// CONFIG permissions
|
||||
{
|
||||
resource: Resource.CONFIG,
|
||||
action: AuthAction.READ_ANY,
|
||||
allowedRoles: [Role.ADMIN, Role.VIEWER, Role.CONNECT],
|
||||
deniedRoles: [Role.GUEST],
|
||||
},
|
||||
{
|
||||
resource: Resource.CONFIG,
|
||||
action: AuthAction.UPDATE_ANY,
|
||||
allowedRoles: [Role.ADMIN],
|
||||
deniedRoles: [Role.VIEWER, Role.GUEST, Role.CONNECT],
|
||||
},
|
||||
|
||||
// DOCKER permissions
|
||||
{
|
||||
resource: Resource.DOCKER,
|
||||
action: AuthAction.READ_ANY,
|
||||
allowedRoles: [Role.ADMIN, Role.VIEWER, Role.CONNECT],
|
||||
deniedRoles: [Role.GUEST],
|
||||
},
|
||||
{
|
||||
resource: Resource.DOCKER,
|
||||
action: AuthAction.UPDATE_ANY,
|
||||
allowedRoles: [Role.ADMIN],
|
||||
deniedRoles: [Role.VIEWER, Role.GUEST],
|
||||
},
|
||||
|
||||
// VMS permissions
|
||||
{
|
||||
resource: Resource.VMS,
|
||||
action: AuthAction.READ_ANY,
|
||||
allowedRoles: [Role.ADMIN, Role.VIEWER, Role.CONNECT],
|
||||
deniedRoles: [Role.GUEST],
|
||||
},
|
||||
{
|
||||
resource: Resource.VMS,
|
||||
action: AuthAction.UPDATE_ANY,
|
||||
allowedRoles: [Role.ADMIN],
|
||||
deniedRoles: [Role.VIEWER, Role.GUEST],
|
||||
},
|
||||
|
||||
// FLASH permissions (includes rclone operations)
|
||||
{
|
||||
resource: Resource.FLASH,
|
||||
action: AuthAction.READ_ANY,
|
||||
allowedRoles: [Role.ADMIN, Role.VIEWER, Role.CONNECT],
|
||||
deniedRoles: [Role.GUEST],
|
||||
},
|
||||
{
|
||||
resource: Resource.FLASH,
|
||||
action: AuthAction.CREATE_ANY,
|
||||
allowedRoles: [Role.ADMIN],
|
||||
deniedRoles: [Role.VIEWER, Role.GUEST, Role.CONNECT],
|
||||
},
|
||||
{
|
||||
resource: Resource.FLASH,
|
||||
action: AuthAction.DELETE_ANY,
|
||||
allowedRoles: [Role.ADMIN],
|
||||
deniedRoles: [Role.VIEWER, Role.GUEST, Role.CONNECT],
|
||||
},
|
||||
|
||||
// INFO permissions (system information)
|
||||
{
|
||||
resource: Resource.INFO,
|
||||
action: AuthAction.READ_ANY,
|
||||
allowedRoles: [Role.ADMIN, Role.VIEWER, Role.CONNECT],
|
||||
deniedRoles: [Role.GUEST],
|
||||
},
|
||||
|
||||
// LOGS permissions
|
||||
{
|
||||
resource: Resource.LOGS,
|
||||
action: AuthAction.READ_ANY,
|
||||
allowedRoles: [Role.ADMIN, Role.VIEWER, Role.CONNECT],
|
||||
deniedRoles: [Role.GUEST],
|
||||
},
|
||||
|
||||
// ME permissions (current user info)
|
||||
{
|
||||
resource: Resource.ME,
|
||||
action: AuthAction.READ_ANY,
|
||||
allowedRoles: [Role.ADMIN, Role.VIEWER, Role.CONNECT, Role.GUEST],
|
||||
deniedRoles: [],
|
||||
},
|
||||
|
||||
// NOTIFICATIONS permissions
|
||||
{
|
||||
resource: Resource.NOTIFICATIONS,
|
||||
action: AuthAction.READ_ANY,
|
||||
allowedRoles: [Role.ADMIN, Role.VIEWER, Role.CONNECT],
|
||||
deniedRoles: [Role.GUEST],
|
||||
},
|
||||
|
||||
// Other read-only resources for VIEWER
|
||||
{
|
||||
resource: Resource.DISK,
|
||||
action: AuthAction.READ_ANY,
|
||||
allowedRoles: [Role.ADMIN, Role.VIEWER, Role.CONNECT],
|
||||
deniedRoles: [Role.GUEST],
|
||||
},
|
||||
{
|
||||
resource: Resource.DISPLAY,
|
||||
action: AuthAction.READ_ANY,
|
||||
allowedRoles: [Role.ADMIN, Role.VIEWER, Role.CONNECT],
|
||||
deniedRoles: [Role.GUEST],
|
||||
},
|
||||
{
|
||||
resource: Resource.ONLINE,
|
||||
action: AuthAction.READ_ANY,
|
||||
allowedRoles: [Role.ADMIN, Role.VIEWER, Role.CONNECT],
|
||||
deniedRoles: [Role.GUEST],
|
||||
},
|
||||
{
|
||||
resource: Resource.OWNER,
|
||||
action: AuthAction.READ_ANY,
|
||||
allowedRoles: [Role.ADMIN, Role.VIEWER, Role.CONNECT],
|
||||
deniedRoles: [Role.GUEST],
|
||||
},
|
||||
{
|
||||
resource: Resource.REGISTRATION,
|
||||
action: AuthAction.READ_ANY,
|
||||
allowedRoles: [Role.ADMIN, Role.VIEWER, Role.CONNECT],
|
||||
deniedRoles: [Role.GUEST],
|
||||
},
|
||||
{
|
||||
resource: Resource.SERVERS,
|
||||
action: AuthAction.READ_ANY,
|
||||
allowedRoles: [Role.ADMIN, Role.VIEWER, Role.CONNECT],
|
||||
deniedRoles: [Role.GUEST],
|
||||
},
|
||||
{
|
||||
resource: Resource.SERVICES,
|
||||
action: AuthAction.READ_ANY,
|
||||
allowedRoles: [Role.ADMIN, Role.VIEWER, Role.CONNECT],
|
||||
deniedRoles: [Role.GUEST],
|
||||
},
|
||||
{
|
||||
resource: Resource.SHARE,
|
||||
action: AuthAction.READ_ANY,
|
||||
allowedRoles: [Role.ADMIN, Role.VIEWER, Role.CONNECT],
|
||||
deniedRoles: [Role.GUEST],
|
||||
},
|
||||
{
|
||||
resource: Resource.VARS,
|
||||
action: AuthAction.READ_ANY,
|
||||
allowedRoles: [Role.ADMIN, Role.VIEWER, Role.CONNECT],
|
||||
deniedRoles: [Role.GUEST],
|
||||
},
|
||||
{
|
||||
resource: Resource.CUSTOMIZATIONS,
|
||||
action: AuthAction.READ_ANY,
|
||||
allowedRoles: [Role.ADMIN, Role.VIEWER, Role.CONNECT],
|
||||
deniedRoles: [Role.GUEST],
|
||||
},
|
||||
{
|
||||
resource: Resource.ACTIVATION_CODE,
|
||||
action: AuthAction.READ_ANY,
|
||||
allowedRoles: [Role.ADMIN, Role.VIEWER, Role.CONNECT],
|
||||
deniedRoles: [Role.GUEST],
|
||||
},
|
||||
|
||||
// CONNECT special permission for remote access
|
||||
{
|
||||
resource: Resource.CONNECT__REMOTE_ACCESS,
|
||||
action: AuthAction.READ_ANY,
|
||||
allowedRoles: [Role.ADMIN, Role.VIEWER, Role.CONNECT],
|
||||
deniedRoles: [Role.GUEST],
|
||||
},
|
||||
{
|
||||
resource: Resource.CONNECT__REMOTE_ACCESS,
|
||||
action: AuthAction.UPDATE_ANY,
|
||||
allowedRoles: [Role.ADMIN, Role.CONNECT],
|
||||
deniedRoles: [Role.VIEWER, Role.GUEST],
|
||||
},
|
||||
];
|
||||
|
||||
testCases.forEach(({ resource, action, allowedRoles, deniedRoles }) => {
|
||||
describe(`${resource} - ${action}`, () => {
|
||||
let enforcer: any;
|
||||
|
||||
beforeEach(async () => {
|
||||
const model = new CasbinModel();
|
||||
model.loadModelFromText(CASBIN_MODEL);
|
||||
const adapter = new StringAdapter(BASE_POLICY);
|
||||
enforcer = await newEnforcer(model, adapter);
|
||||
});
|
||||
|
||||
allowedRoles.forEach((role) => {
|
||||
it(`should allow ${role} to ${action} ${resource}`, async () => {
|
||||
const result = await enforcer.enforce(role, resource, action);
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
deniedRoles.forEach((role) => {
|
||||
it(`should deny ${role} to ${action} ${resource}`, async () => {
|
||||
const result = await enforcer.enforce(role, resource, action);
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Action matching and normalization', () => {
|
||||
let enforcer: any;
|
||||
|
||||
beforeEach(async () => {
|
||||
const model = new CasbinModel();
|
||||
model.loadModelFromText(CASBIN_MODEL);
|
||||
const adapter = new StringAdapter(BASE_POLICY);
|
||||
enforcer = await newEnforcer(model, adapter);
|
||||
});
|
||||
|
||||
it('should match actions exactly as stored (uppercase)', async () => {
|
||||
// Our policies store actions as uppercase (e.g., 'READ_ANY')
|
||||
// The matcher now requires exact matching for security
|
||||
|
||||
// Uppercase actions should work
|
||||
const adminUpperResult = await enforcer.enforce(
|
||||
Role.ADMIN,
|
||||
Resource.DOCKER,
|
||||
AuthAction.READ_ANY
|
||||
);
|
||||
expect(adminUpperResult).toBe(true);
|
||||
|
||||
const viewerUpperResult = await enforcer.enforce(
|
||||
Role.VIEWER,
|
||||
Resource.DOCKER,
|
||||
AuthAction.READ_ANY
|
||||
);
|
||||
expect(viewerUpperResult).toBe(true);
|
||||
|
||||
// For non-wildcard roles, lowercase actions won't match
|
||||
const viewerLowerResult = await enforcer.enforce(Role.VIEWER, Resource.DOCKER, 'read:any');
|
||||
expect(viewerLowerResult).toBe(false);
|
||||
|
||||
// Mixed case won't match for VIEWER either
|
||||
const viewerMixedResult = await enforcer.enforce(Role.VIEWER, Resource.DOCKER, 'Read_Any');
|
||||
expect(viewerMixedResult).toBe(false);
|
||||
|
||||
// GUEST also requires exact lowercase
|
||||
const guestUpperResult = await enforcer.enforce(Role.GUEST, Resource.ME, 'READ:ANY');
|
||||
expect(guestUpperResult).toBe(false);
|
||||
|
||||
const guestLowerResult = await enforcer.enforce(
|
||||
Role.GUEST,
|
||||
Resource.ME,
|
||||
AuthAction.READ_ANY
|
||||
);
|
||||
expect(guestLowerResult).toBe(true);
|
||||
});
|
||||
|
||||
it('should allow wildcard actions for ADMIN regardless of case', async () => {
|
||||
// ADMIN has wildcard permissions (*, *, *) which match any action
|
||||
const adminWildcardActions = [
|
||||
'read:any',
|
||||
'create:any',
|
||||
'update:any',
|
||||
'delete:any',
|
||||
'READ:ANY', // Even uppercase works due to wildcard
|
||||
'ANYTHING', // Any action works due to wildcard
|
||||
];
|
||||
|
||||
for (const action of adminWildcardActions) {
|
||||
const result = await enforcer.enforce(Role.ADMIN, Resource.DOCKER, action);
|
||||
expect(result).toBe(true);
|
||||
}
|
||||
});
|
||||
|
||||
it('should NOT match different actions even with correct case', async () => {
|
||||
// VIEWER should not be able to UPDATE even with correct lowercase
|
||||
const result = await enforcer.enforce(Role.VIEWER, Resource.DOCKER, AuthAction.UPDATE_ANY);
|
||||
expect(result).toBe(false);
|
||||
|
||||
// VIEWER should not be able to DELETE
|
||||
const deleteResult = await enforcer.enforce(
|
||||
Role.VIEWER,
|
||||
Resource.DOCKER,
|
||||
AuthAction.DELETE_ANY
|
||||
);
|
||||
expect(deleteResult).toBe(false);
|
||||
|
||||
// VIEWER should not be able to CREATE
|
||||
const createResult = await enforcer.enforce(
|
||||
Role.VIEWER,
|
||||
Resource.DOCKER,
|
||||
AuthAction.CREATE_ANY
|
||||
);
|
||||
expect(createResult).toBe(false);
|
||||
});
|
||||
|
||||
it('should ensure actions are normalized when stored', async () => {
|
||||
// This test documents that our auth service normalizes actions to uppercase
|
||||
// when syncing permissions, ensuring consistency
|
||||
|
||||
// The BASE_POLICY uses AuthAction.READ_ANY which is 'READ_ANY' (uppercase)
|
||||
expect(BASE_POLICY).toContain('READ_ANY');
|
||||
expect(BASE_POLICY).not.toContain('read:any');
|
||||
|
||||
// All our stored policies should be uppercase
|
||||
const policies = await enforcer.getPolicy();
|
||||
for (const policy of policies) {
|
||||
const action = policy[2]; // Third element is the action
|
||||
if (action && action !== '*') {
|
||||
// All non-wildcard actions should be uppercase
|
||||
expect(action).toBe(action.toUpperCase());
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('Wildcard permissions', () => {
|
||||
let enforcer: any;
|
||||
|
||||
beforeEach(async () => {
|
||||
const model = new CasbinModel();
|
||||
model.loadModelFromText(CASBIN_MODEL);
|
||||
const adapter = new StringAdapter(BASE_POLICY);
|
||||
enforcer = await newEnforcer(model, adapter);
|
||||
});
|
||||
|
||||
it('should allow ADMIN wildcard access to all resources and actions', async () => {
|
||||
const resources = Object.values(Resource);
|
||||
const actions = [
|
||||
AuthAction.READ_ANY,
|
||||
AuthAction.CREATE_ANY,
|
||||
AuthAction.UPDATE_ANY,
|
||||
AuthAction.DELETE_ANY,
|
||||
];
|
||||
|
||||
for (const resource of resources) {
|
||||
for (const action of actions) {
|
||||
const result = await enforcer.enforce(Role.ADMIN, resource, action);
|
||||
expect(result).toBe(true);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
it('should allow CONNECT read access to most resources but NOT API_KEY', async () => {
|
||||
const resources = Object.values(Resource).filter(
|
||||
(r) => r !== Resource.CONNECT__REMOTE_ACCESS && r !== Resource.API_KEY
|
||||
);
|
||||
|
||||
for (const resource of resources) {
|
||||
// Should be able to read most resources
|
||||
const readResult = await enforcer.enforce(Role.CONNECT, resource, AuthAction.READ_ANY);
|
||||
expect(readResult).toBe(true);
|
||||
|
||||
// Should NOT be able to write (except CONNECT__REMOTE_ACCESS)
|
||||
const updateResult = await enforcer.enforce(
|
||||
Role.CONNECT,
|
||||
resource,
|
||||
AuthAction.UPDATE_ANY
|
||||
);
|
||||
expect(updateResult).toBe(false);
|
||||
}
|
||||
|
||||
// CONNECT should NOT be able to read API_KEY
|
||||
const apiKeyRead = await enforcer.enforce(
|
||||
Role.CONNECT,
|
||||
Resource.API_KEY,
|
||||
AuthAction.READ_ANY
|
||||
);
|
||||
expect(apiKeyRead).toBe(false);
|
||||
|
||||
// CONNECT should NOT be able to perform any action on API_KEY
|
||||
const apiKeyCreate = await enforcer.enforce(
|
||||
Role.CONNECT,
|
||||
Resource.API_KEY,
|
||||
AuthAction.CREATE_ANY
|
||||
);
|
||||
expect(apiKeyCreate).toBe(false);
|
||||
const apiKeyUpdate = await enforcer.enforce(
|
||||
Role.CONNECT,
|
||||
Resource.API_KEY,
|
||||
AuthAction.UPDATE_ANY
|
||||
);
|
||||
expect(apiKeyUpdate).toBe(false);
|
||||
const apiKeyDelete = await enforcer.enforce(
|
||||
Role.CONNECT,
|
||||
Resource.API_KEY,
|
||||
AuthAction.DELETE_ANY
|
||||
);
|
||||
expect(apiKeyDelete).toBe(false);
|
||||
|
||||
// Special case: CONNECT can update CONNECT__REMOTE_ACCESS
|
||||
const remoteAccessUpdate = await enforcer.enforce(
|
||||
Role.CONNECT,
|
||||
Resource.CONNECT__REMOTE_ACCESS,
|
||||
AuthAction.UPDATE_ANY
|
||||
);
|
||||
expect(remoteAccessUpdate).toBe(true);
|
||||
});
|
||||
|
||||
it('should explicitly deny CONNECT role from accessing API_KEY to prevent secret exposure', async () => {
|
||||
// CONNECT should NOT be able to read API_KEY (which would expose secrets)
|
||||
const apiKeyRead = await enforcer.enforce(
|
||||
Role.CONNECT,
|
||||
Resource.API_KEY,
|
||||
AuthAction.READ_ANY
|
||||
);
|
||||
expect(apiKeyRead).toBe(false);
|
||||
|
||||
// Verify all API_KEY operations are denied for CONNECT
|
||||
const actions = ['create:any', 'read:any', 'update:any', 'delete:any'];
|
||||
for (const action of actions) {
|
||||
const result = await enforcer.enforce(Role.CONNECT, Resource.API_KEY, action);
|
||||
expect(result).toBe(false);
|
||||
}
|
||||
|
||||
// Verify ADMIN can still access API_KEY
|
||||
const adminApiKeyRead = await enforcer.enforce(
|
||||
Role.ADMIN,
|
||||
Resource.API_KEY,
|
||||
AuthAction.READ_ANY
|
||||
);
|
||||
expect(adminApiKeyRead).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Role inheritance', () => {
|
||||
let enforcer: any;
|
||||
|
||||
beforeEach(async () => {
|
||||
const model = new CasbinModel();
|
||||
model.loadModelFromText(CASBIN_MODEL);
|
||||
const adapter = new StringAdapter(BASE_POLICY);
|
||||
enforcer = await newEnforcer(model, adapter);
|
||||
});
|
||||
|
||||
it('should inherit GUEST permissions for VIEWER', async () => {
|
||||
// VIEWER inherits from GUEST, so should have ME access
|
||||
const result = await enforcer.enforce(Role.VIEWER, Resource.ME, AuthAction.READ_ANY);
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it('should inherit GUEST permissions for CONNECT', async () => {
|
||||
// CONNECT inherits from GUEST, so should have ME access
|
||||
const result = await enforcer.enforce(Role.CONNECT, Resource.ME, AuthAction.READ_ANY);
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it('should inherit GUEST permissions for ADMIN', async () => {
|
||||
// ADMIN inherits from GUEST, so should have ME access
|
||||
const result = await enforcer.enforce(Role.ADMIN, Resource.ME, AuthAction.READ_ANY);
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge cases and security', () => {
|
||||
it('should deny access with empty action', async () => {
|
||||
const model = new CasbinModel();
|
||||
model.loadModelFromText(CASBIN_MODEL);
|
||||
const adapter = new StringAdapter(BASE_POLICY);
|
||||
const enforcer = await newEnforcer(model, adapter);
|
||||
|
||||
const result = await enforcer.enforce(Role.VIEWER, Resource.DOCKER, '');
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('should deny access with empty resource', async () => {
|
||||
const model = new CasbinModel();
|
||||
model.loadModelFromText(CASBIN_MODEL);
|
||||
const adapter = new StringAdapter(BASE_POLICY);
|
||||
const enforcer = await newEnforcer(model, adapter);
|
||||
|
||||
const result = await enforcer.enforce(Role.VIEWER, '', AuthAction.READ_ANY);
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('should deny access with undefined role', async () => {
|
||||
const model = new CasbinModel();
|
||||
model.loadModelFromText(CASBIN_MODEL);
|
||||
const adapter = new StringAdapter(BASE_POLICY);
|
||||
const enforcer = await newEnforcer(model, adapter);
|
||||
|
||||
const result = await enforcer.enforce(
|
||||
'UNDEFINED_ROLE',
|
||||
Resource.DOCKER,
|
||||
AuthAction.READ_ANY
|
||||
);
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('should deny access with malformed action', async () => {
|
||||
const model = new CasbinModel();
|
||||
model.loadModelFromText(CASBIN_MODEL);
|
||||
const adapter = new StringAdapter(BASE_POLICY);
|
||||
const enforcer = await newEnforcer(model, adapter);
|
||||
|
||||
const malformedActions = [
|
||||
'read', // Missing possession
|
||||
':any', // Missing verb
|
||||
'read:', // Empty possession
|
||||
'read:own', // Different possession format
|
||||
'READ', // Uppercase without possession
|
||||
];
|
||||
|
||||
for (const action of malformedActions) {
|
||||
const result = await enforcer.enforce(Role.VIEWER, Resource.DOCKER, action);
|
||||
expect(result).toBe(false);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
147
api/src/unraid-api/auth/casbin/policy.spec.ts
Normal file
147
api/src/unraid-api/auth/casbin/policy.spec.ts
Normal file
@@ -0,0 +1,147 @@
|
||||
import { AuthAction, Resource, Role } from '@unraid/shared/graphql.model.js';
|
||||
import { Model as CasbinModel, newEnforcer, StringAdapter } from 'casbin';
|
||||
import { describe, expect, it } from 'vitest';
|
||||
|
||||
import { CASBIN_MODEL } from '@app/unraid-api/auth/casbin/model.js';
|
||||
import { BASE_POLICY } from '@app/unraid-api/auth/casbin/policy.js';
|
||||
|
||||
describe('Casbin Policy - VIEWER role restrictions', () => {
|
||||
it('should validate matcher does not allow empty policies', async () => {
|
||||
// Test that empty policies don't match everything
|
||||
const model = new CasbinModel();
|
||||
model.loadModelFromText(CASBIN_MODEL);
|
||||
|
||||
// Test with a policy that has an empty object
|
||||
const emptyPolicy = `p, VIEWER, , ${AuthAction.READ_ANY}`;
|
||||
const adapter = new StringAdapter(emptyPolicy);
|
||||
const enforcer = await newEnforcer(model, adapter);
|
||||
|
||||
// Empty policy should not match a real resource
|
||||
const canReadApiKey = await enforcer.enforce(Role.VIEWER, Resource.API_KEY, AuthAction.READ_ANY);
|
||||
expect(canReadApiKey).toBe(false);
|
||||
});
|
||||
|
||||
it('should deny VIEWER role access to API_KEY resource', async () => {
|
||||
// Create enforcer with actual policy
|
||||
const model = new CasbinModel();
|
||||
model.loadModelFromText(CASBIN_MODEL);
|
||||
const adapter = new StringAdapter(BASE_POLICY);
|
||||
const enforcer = await newEnforcer(model, adapter);
|
||||
|
||||
// Test that VIEWER cannot access API_KEY with any action
|
||||
const canReadApiKey = await enforcer.enforce(Role.VIEWER, Resource.API_KEY, AuthAction.READ_ANY);
|
||||
const canCreateApiKey = await enforcer.enforce(
|
||||
Role.VIEWER,
|
||||
Resource.API_KEY,
|
||||
AuthAction.CREATE_ANY
|
||||
);
|
||||
const canUpdateApiKey = await enforcer.enforce(
|
||||
Role.VIEWER,
|
||||
Resource.API_KEY,
|
||||
AuthAction.UPDATE_ANY
|
||||
);
|
||||
const canDeleteApiKey = await enforcer.enforce(
|
||||
Role.VIEWER,
|
||||
Resource.API_KEY,
|
||||
AuthAction.DELETE_ANY
|
||||
);
|
||||
|
||||
expect(canReadApiKey).toBe(false);
|
||||
expect(canCreateApiKey).toBe(false);
|
||||
expect(canUpdateApiKey).toBe(false);
|
||||
expect(canDeleteApiKey).toBe(false);
|
||||
});
|
||||
|
||||
it('should allow VIEWER role access to other resources', async () => {
|
||||
// Create enforcer with actual policy
|
||||
const model = new CasbinModel();
|
||||
model.loadModelFromText(CASBIN_MODEL);
|
||||
const adapter = new StringAdapter(BASE_POLICY);
|
||||
const enforcer = await newEnforcer(model, adapter);
|
||||
|
||||
// Test that VIEWER can read other resources
|
||||
const canReadDocker = await enforcer.enforce(Role.VIEWER, Resource.DOCKER, AuthAction.READ_ANY);
|
||||
const canReadArray = await enforcer.enforce(Role.VIEWER, Resource.ARRAY, AuthAction.READ_ANY);
|
||||
const canReadConfig = await enforcer.enforce(Role.VIEWER, Resource.CONFIG, AuthAction.READ_ANY);
|
||||
const canReadVms = await enforcer.enforce(Role.VIEWER, Resource.VMS, AuthAction.READ_ANY);
|
||||
|
||||
expect(canReadDocker).toBe(true);
|
||||
expect(canReadArray).toBe(true);
|
||||
expect(canReadConfig).toBe(true);
|
||||
expect(canReadVms).toBe(true);
|
||||
|
||||
// But VIEWER cannot write to these resources
|
||||
const canUpdateDocker = await enforcer.enforce(
|
||||
Role.VIEWER,
|
||||
Resource.DOCKER,
|
||||
AuthAction.UPDATE_ANY
|
||||
);
|
||||
const canDeleteArray = await enforcer.enforce(
|
||||
Role.VIEWER,
|
||||
Resource.ARRAY,
|
||||
AuthAction.DELETE_ANY
|
||||
);
|
||||
|
||||
expect(canUpdateDocker).toBe(false);
|
||||
expect(canDeleteArray).toBe(false);
|
||||
});
|
||||
|
||||
it('should allow ADMIN role full access to API_KEY resource', async () => {
|
||||
// Create enforcer with actual policy
|
||||
const model = new CasbinModel();
|
||||
model.loadModelFromText(CASBIN_MODEL);
|
||||
const adapter = new StringAdapter(BASE_POLICY);
|
||||
const enforcer = await newEnforcer(model, adapter);
|
||||
|
||||
// Test that ADMIN can access API_KEY with all actions
|
||||
const canReadApiKey = await enforcer.enforce(Role.ADMIN, Resource.API_KEY, AuthAction.READ_ANY);
|
||||
const canCreateApiKey = await enforcer.enforce(
|
||||
Role.ADMIN,
|
||||
Resource.API_KEY,
|
||||
AuthAction.CREATE_ANY
|
||||
);
|
||||
const canUpdateApiKey = await enforcer.enforce(
|
||||
Role.ADMIN,
|
||||
Resource.API_KEY,
|
||||
AuthAction.UPDATE_ANY
|
||||
);
|
||||
const canDeleteApiKey = await enforcer.enforce(
|
||||
Role.ADMIN,
|
||||
Resource.API_KEY,
|
||||
AuthAction.DELETE_ANY
|
||||
);
|
||||
|
||||
expect(canReadApiKey).toBe(true);
|
||||
expect(canCreateApiKey).toBe(true);
|
||||
expect(canUpdateApiKey).toBe(true);
|
||||
expect(canDeleteApiKey).toBe(true);
|
||||
});
|
||||
|
||||
it('should ensure VIEWER permissions exclude API_KEY in generated policy', () => {
|
||||
// Verify that the generated policy string doesn't contain VIEWER + API_KEY combination
|
||||
expect(BASE_POLICY).toContain(`p, ${Role.VIEWER}, ${Resource.DOCKER}, ${AuthAction.READ_ANY}`);
|
||||
expect(BASE_POLICY).toContain(`p, ${Role.VIEWER}, ${Resource.ARRAY}, ${AuthAction.READ_ANY}`);
|
||||
expect(BASE_POLICY).not.toContain(
|
||||
`p, ${Role.VIEWER}, ${Resource.API_KEY}, ${AuthAction.READ_ANY}`
|
||||
);
|
||||
|
||||
// Count VIEWER permissions - should be total resources minus API_KEY
|
||||
const viewerPermissionLines = BASE_POLICY.split('\n').filter((line) =>
|
||||
line.startsWith(`p, ${Role.VIEWER},`)
|
||||
);
|
||||
const totalResources = Object.values(Resource).length;
|
||||
expect(viewerPermissionLines.length).toBe(totalResources - 1); // All resources except API_KEY
|
||||
});
|
||||
|
||||
it('should inherit GUEST permissions for VIEWER role', async () => {
|
||||
// Create enforcer with actual policy
|
||||
const model = new CasbinModel();
|
||||
model.loadModelFromText(CASBIN_MODEL);
|
||||
const adapter = new StringAdapter(BASE_POLICY);
|
||||
const enforcer = await newEnforcer(model, adapter);
|
||||
|
||||
// VIEWER inherits from GUEST, so should have access to ME resource
|
||||
const canReadMe = await enforcer.enforce(Role.VIEWER, Resource.ME, AuthAction.READ_ANY);
|
||||
expect(canReadMe).toBe(true);
|
||||
});
|
||||
});
|
||||
@@ -1,18 +1,26 @@
|
||||
import { Resource, Role } from '@unraid/shared/graphql.model.js';
|
||||
import { AuthAction } from 'nest-authz';
|
||||
import { AuthAction, Resource, Role } from '@unraid/shared/graphql.model.js';
|
||||
|
||||
// Generate VIEWER permissions for all resources except API_KEY
|
||||
const viewerPermissions = Object.values(Resource)
|
||||
.filter((resource) => resource !== Resource.API_KEY)
|
||||
.map((resource) => `p, ${Role.VIEWER}, ${resource}, ${AuthAction.READ_ANY}`)
|
||||
.join('\n');
|
||||
|
||||
export const BASE_POLICY = `
|
||||
# Admin permissions
|
||||
# Admin permissions - full access
|
||||
p, ${Role.ADMIN}, *, *
|
||||
|
||||
# Connect Permissions
|
||||
p, ${Role.CONNECT}, *, ${AuthAction.READ_ANY}
|
||||
# Connect permissions - inherits from VIEWER plus can manage remote access
|
||||
p, ${Role.CONNECT}, ${Resource.CONNECT__REMOTE_ACCESS}, ${AuthAction.UPDATE_ANY}
|
||||
|
||||
# Guest permissions
|
||||
# Guest permissions - basic profile access
|
||||
p, ${Role.GUEST}, ${Resource.ME}, ${AuthAction.READ_ANY}
|
||||
|
||||
# Viewer permissions - read-only access to all resources except API_KEY
|
||||
${viewerPermissions}
|
||||
|
||||
# Role inheritance
|
||||
g, ${Role.ADMIN}, ${Role.GUEST}
|
||||
g, ${Role.CONNECT}, ${Role.GUEST}
|
||||
g, ${Role.CONNECT}, ${Role.VIEWER}
|
||||
g, ${Role.VIEWER}, ${Role.GUEST}
|
||||
`;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { Inject, Injectable, Logger } from '@nestjs/common';
|
||||
import { readFile } from 'fs/promises';
|
||||
import { readdir, readFile } from 'fs/promises';
|
||||
import { join } from 'path';
|
||||
|
||||
import { fileExists } from '@app/core/utils/files/file-exists.js';
|
||||
@@ -9,7 +9,7 @@ import { batchProcess } from '@app/utils.js';
|
||||
/** token for dependency injection of a session cookie options object */
|
||||
export const SESSION_COOKIE_CONFIG = 'SESSION_COOKIE_CONFIG';
|
||||
|
||||
type SessionCookieConfig = {
|
||||
export type SessionCookieConfig = {
|
||||
namePrefix: string;
|
||||
sessionDir: string;
|
||||
secure: boolean;
|
||||
@@ -68,13 +68,17 @@ export class CookieService {
|
||||
}
|
||||
try {
|
||||
const sessionData = await readFile(sessionFile, 'ascii');
|
||||
return sessionData.includes('unraid_login') && sessionData.includes('unraid_user');
|
||||
return this.isSessionValid(sessionData);
|
||||
} catch (e) {
|
||||
this.logger.error(e, 'Error reading session file');
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private isSessionValid(sessionData: string): boolean {
|
||||
return sessionData.includes('unraid_login') && sessionData.includes('unraid_user');
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a session id, returns the full path to the session file on disk.
|
||||
*
|
||||
@@ -91,4 +95,33 @@ export class CookieService {
|
||||
const sanitizedSessionId = sessionId.replace(/[^a-zA-Z0-9]/g, '');
|
||||
return join(this.opts.sessionDir, `sess_${sanitizedSessionId}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the active session id, if any.
|
||||
* @returns the active session id, if any, or null if no active session is found.
|
||||
*/
|
||||
async getActiveSession(): Promise<string | null> {
|
||||
let sessionFiles: string[] = [];
|
||||
try {
|
||||
sessionFiles = await readdir(this.opts.sessionDir);
|
||||
} catch (e) {
|
||||
this.logger.warn(e, 'Error reading session directory');
|
||||
return null;
|
||||
}
|
||||
for (const sessionFile of sessionFiles) {
|
||||
if (!sessionFile.startsWith('sess_')) {
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
const sessionData = await readFile(join(this.opts.sessionDir, sessionFile), 'ascii');
|
||||
if (this.isSessionValid(sessionData)) {
|
||||
return sessionFile.replace('sess_', '');
|
||||
}
|
||||
} catch {
|
||||
// Ignore unreadable files and continue scanning
|
||||
continue;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
21
api/src/unraid-api/auth/local-session-lifecycle.service.ts
Normal file
21
api/src/unraid-api/auth/local-session-lifecycle.service.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
import { Injectable, OnModuleInit } from '@nestjs/common';
|
||||
|
||||
import { LocalSessionService } from '@app/unraid-api/auth/local-session.service.js';
|
||||
|
||||
/**
|
||||
* Service for managing the lifecycle of the local session.
|
||||
*
|
||||
* Used for tying the local session's lifecycle to the API's life, rather
|
||||
* than the LocalSessionService's lifecycle, since it may also be used by
|
||||
* other applications, like the CLI.
|
||||
*
|
||||
* This service is only used in the API, and not in the CLI.
|
||||
*/
|
||||
@Injectable()
|
||||
export class LocalSessionLifecycleService implements OnModuleInit {
|
||||
constructor(private readonly localSessionService: LocalSessionService) {}
|
||||
|
||||
async onModuleInit() {
|
||||
await this.localSessionService.generateLocalSession();
|
||||
}
|
||||
}
|
||||
97
api/src/unraid-api/auth/local-session.service.ts
Normal file
97
api/src/unraid-api/auth/local-session.service.ts
Normal file
@@ -0,0 +1,97 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { randomBytes, timingSafeEqual } from 'crypto';
|
||||
import { chmod, mkdir, readFile, unlink, writeFile } from 'fs/promises';
|
||||
import { dirname } from 'path';
|
||||
|
||||
import { PATHS_LOCAL_SESSION_FILE } from '@app/environment.js';
|
||||
|
||||
/**
|
||||
* Service that manages a local session file for internal CLI/system authentication.
|
||||
* Creates a secure token on startup that can be used for local system operations.
|
||||
*/
|
||||
@Injectable()
|
||||
export class LocalSessionService {
|
||||
private readonly logger = new Logger(LocalSessionService.name);
|
||||
private sessionToken: string | null = null;
|
||||
private static readonly SESSION_FILE_PATH = PATHS_LOCAL_SESSION_FILE;
|
||||
|
||||
/**
|
||||
* Generate a secure local session token and write it to file
|
||||
*/
|
||||
async generateLocalSession(): Promise<void> {
|
||||
// Generate a cryptographically secure random token
|
||||
this.sessionToken = randomBytes(32).toString('hex');
|
||||
|
||||
try {
|
||||
// Ensure directory exists
|
||||
await mkdir(dirname(LocalSessionService.getSessionFilePath()), { recursive: true });
|
||||
|
||||
// Write token to file
|
||||
await writeFile(LocalSessionService.getSessionFilePath(), this.sessionToken, {
|
||||
encoding: 'utf-8',
|
||||
mode: 0o600, // Owner read/write only
|
||||
});
|
||||
|
||||
// Ensure proper permissions (redundant but explicit)
|
||||
// Check if file exists first to handle race conditions in test environments
|
||||
await chmod(LocalSessionService.getSessionFilePath(), 0o600).catch((error) => {
|
||||
this.logger.warn(error, 'Failed to set permissions on local session file');
|
||||
});
|
||||
|
||||
this.logger.debug(`Local session written to ${LocalSessionService.getSessionFilePath()}`);
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to write local session: ${error}`);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Read and return the current local session token from file
|
||||
*/
|
||||
public async getLocalSession(): Promise<string | null> {
|
||||
try {
|
||||
return await readFile(LocalSessionService.getSessionFilePath(), 'utf-8');
|
||||
} catch (error) {
|
||||
this.logger.warn(error, 'Local session file not found or not readable');
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate if a given token matches the current local session
|
||||
*/
|
||||
public async validateLocalSession(token: string): Promise<boolean> {
|
||||
// Coerce inputs to strings (or empty string if undefined)
|
||||
const tokenStr = token || '';
|
||||
const currentToken = await this.getLocalSession();
|
||||
const currentTokenStr = currentToken || '';
|
||||
|
||||
// Early return if either is empty
|
||||
if (!tokenStr || !currentTokenStr) return false;
|
||||
|
||||
// Create buffers
|
||||
const tokenBuffer = Buffer.from(tokenStr, 'utf-8');
|
||||
const currentTokenBuffer = Buffer.from(currentTokenStr, 'utf-8');
|
||||
|
||||
// Check length equality first to prevent timingSafeEqual from throwing
|
||||
if (tokenBuffer.length !== currentTokenBuffer.length) return false;
|
||||
|
||||
// Use constant-time comparison to prevent timing attacks
|
||||
return timingSafeEqual(tokenBuffer, currentTokenBuffer);
|
||||
}
|
||||
|
||||
public async deleteLocalSession(): Promise<void> {
|
||||
try {
|
||||
await unlink(LocalSessionService.getSessionFilePath());
|
||||
} catch (error) {
|
||||
this.logger.error(error, 'Error deleting local session file');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the file path for the local session (useful for external readers)
|
||||
*/
|
||||
public static getSessionFilePath(): string {
|
||||
return LocalSessionService.SESSION_FILE_PATH;
|
||||
}
|
||||
}
|
||||
46
api/src/unraid-api/auth/local-session.strategy.ts
Normal file
46
api/src/unraid-api/auth/local-session.strategy.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { PassportStrategy } from '@nestjs/passport';
|
||||
|
||||
import { Strategy } from 'passport-custom';
|
||||
|
||||
import { AuthService } from '@app/unraid-api/auth/auth.service.js';
|
||||
import { UserAccount } from '@app/unraid-api/graph/user/user.model.js';
|
||||
import { FastifyRequest } from '@app/unraid-api/types/fastify.js';
|
||||
|
||||
/**
|
||||
* Passport strategy for local session authentication.
|
||||
* Validates the x-local-session header for internal CLI/system operations.
|
||||
*/
|
||||
@Injectable()
|
||||
export class LocalSessionStrategy extends PassportStrategy(Strategy, 'local-session') {
|
||||
static readonly key = 'local-session';
|
||||
private readonly logger = new Logger(LocalSessionStrategy.name);
|
||||
|
||||
constructor(private readonly authService: AuthService) {
|
||||
super();
|
||||
}
|
||||
|
||||
async validate(request: FastifyRequest): Promise<UserAccount | null> {
|
||||
try {
|
||||
const localSessionToken = request.headers['x-local-session'] as string;
|
||||
|
||||
if (!localSessionToken) {
|
||||
this.logger.verbose('No local session token found in request headers');
|
||||
return null;
|
||||
}
|
||||
|
||||
this.logger.verbose('Attempting to validate local session token');
|
||||
const user = await this.authService.validateLocalSession(localSessionToken);
|
||||
|
||||
if (user) {
|
||||
this.logger.verbose(`Local session authenticated user: ${user.name}`);
|
||||
return user;
|
||||
}
|
||||
|
||||
return null;
|
||||
} catch (error) {
|
||||
this.logger.verbose(error, `Local session validation failed`);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,80 +0,0 @@
|
||||
import { Injectable, Logger, Optional } from '@nestjs/common';
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
|
||||
import type { SsoUserService as ISsoUserService } from '@unraid/shared/services/sso.js';
|
||||
import { GraphQLError } from 'graphql/error/GraphQLError.js';
|
||||
|
||||
import type { ApiConfig } from '@app/unraid-api/config/api-config.module.js';
|
||||
import { UnraidFileModificationService } from '@app/unraid-api/unraid-file-modifier/unraid-file-modifier.service.js';
|
||||
|
||||
@Injectable()
|
||||
export class SsoUserService implements ISsoUserService {
|
||||
private readonly logger = new Logger(SsoUserService.name);
|
||||
private ssoSubIdsConfigKey = 'api.ssoSubIds';
|
||||
|
||||
constructor(
|
||||
private readonly configService: ConfigService,
|
||||
@Optional() private readonly fileModificationService?: UnraidFileModificationService
|
||||
) {}
|
||||
|
||||
/**
|
||||
* Get the current list of SSO user IDs
|
||||
* @returns Array of SSO user IDs
|
||||
*/
|
||||
async getSsoUsers(): Promise<string[]> {
|
||||
const ssoSubIds = this.configService.getOrThrow<ApiConfig['ssoSubIds']>(this.ssoSubIdsConfigKey);
|
||||
return ssoSubIds;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the complete list of SSO user IDs
|
||||
* @param userIds - The list of SSO user IDs to set
|
||||
* @returns true if a restart is required, false otherwise
|
||||
*/
|
||||
async setSsoUsers(userIds: string[]): Promise<boolean> {
|
||||
const currentUsers = await this.getSsoUsers();
|
||||
const currentUserSet = new Set(currentUsers);
|
||||
const newUserSet = new Set(userIds);
|
||||
|
||||
// If there's no change, no need to update
|
||||
if (newUserSet.symmetricDifference(currentUserSet).size === 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Validate user IDs
|
||||
const uuidRegex =
|
||||
/^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/;
|
||||
const invalidUserIds = userIds.filter((id) => !uuidRegex.test(id));
|
||||
if (invalidUserIds.length > 0) {
|
||||
throw new GraphQLError(`Invalid SSO user ID's: ${invalidUserIds.join(', ')}`);
|
||||
}
|
||||
|
||||
// Update the config
|
||||
this.configService.set(this.ssoSubIdsConfigKey, userIds);
|
||||
|
||||
// Handle file modification if available
|
||||
if (this.fileModificationService) {
|
||||
// If going from 0 to 1+ users, apply the SSO modification
|
||||
if (currentUserSet.size === 0 && newUserSet.size > 0) {
|
||||
try {
|
||||
await this.fileModificationService.applyModificationById('sso');
|
||||
this.logger.log('Applied SSO file modification after adding SSO users');
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to apply SSO file modification', error);
|
||||
}
|
||||
}
|
||||
// If going from 1+ to 0 users, rollback the SSO modification
|
||||
else if (currentUserSet.size > 0 && newUserSet.size === 0) {
|
||||
try {
|
||||
await this.fileModificationService.rollbackModificationById('sso');
|
||||
this.logger.log('Rolled back SSO file modification after removing all SSO users');
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to rollback SSO file modification', error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// No restart required - file modifications are applied immediately
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -1,154 +0,0 @@
|
||||
import { Test } from '@nestjs/testing';
|
||||
|
||||
import { InquirerService } from 'nest-commander';
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { CliInternalClientService } from '@app/unraid-api/cli/internal-client.service.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
import { RestartCommand } from '@app/unraid-api/cli/restart.command.js';
|
||||
import { AddSSOUserCommand } from '@app/unraid-api/cli/sso/add-sso-user.command.js';
|
||||
|
||||
// Mock services
|
||||
const mockInternalClient = {
|
||||
getClient: vi.fn(),
|
||||
};
|
||||
|
||||
const mockLogger = {
|
||||
info: vi.fn(),
|
||||
error: vi.fn(),
|
||||
};
|
||||
|
||||
const mockRestartCommand = {
|
||||
run: vi.fn(),
|
||||
};
|
||||
|
||||
const mockInquirerService = {
|
||||
prompt: vi.fn(),
|
||||
};
|
||||
|
||||
describe('AddSSOUserCommand', () => {
|
||||
let command: AddSSOUserCommand;
|
||||
|
||||
beforeEach(async () => {
|
||||
const module = await Test.createTestingModule({
|
||||
providers: [
|
||||
AddSSOUserCommand,
|
||||
{ provide: CliInternalClientService, useValue: mockInternalClient },
|
||||
{ provide: LogService, useValue: mockLogger },
|
||||
{ provide: RestartCommand, useValue: mockRestartCommand },
|
||||
{ provide: InquirerService, useValue: mockInquirerService },
|
||||
],
|
||||
}).compile();
|
||||
|
||||
command = module.get<AddSSOUserCommand>(AddSSOUserCommand);
|
||||
|
||||
// Clear mocks
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should add a new SSO user successfully', async () => {
|
||||
const mockClient = {
|
||||
query: vi.fn().mockResolvedValue({
|
||||
data: {
|
||||
settings: {
|
||||
api: {
|
||||
ssoSubIds: ['existing-user-id'],
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
mutate: vi.fn().mockResolvedValue({
|
||||
data: {
|
||||
updateSettings: {
|
||||
restartRequired: false,
|
||||
values: {},
|
||||
},
|
||||
},
|
||||
}),
|
||||
};
|
||||
|
||||
mockInternalClient.getClient.mockResolvedValue(mockClient);
|
||||
mockInquirerService.prompt.mockResolvedValue({
|
||||
disclaimer: 'y',
|
||||
username: 'new-user-id',
|
||||
});
|
||||
|
||||
await command.run([]);
|
||||
|
||||
expect(mockClient.query).toHaveBeenCalled();
|
||||
expect(mockClient.mutate).toHaveBeenCalledWith({
|
||||
mutation: expect.anything(),
|
||||
variables: {
|
||||
input: {
|
||||
api: {
|
||||
ssoSubIds: ['existing-user-id', 'new-user-id'],
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(mockLogger.info).toHaveBeenCalledWith('User added: new-user-id');
|
||||
expect(mockLogger.info).not.toHaveBeenCalledWith('Restarting the API');
|
||||
expect(mockRestartCommand.run).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not add user if disclaimer is not accepted', async () => {
|
||||
const mockClient = {
|
||||
query: vi.fn(),
|
||||
mutate: vi.fn(),
|
||||
};
|
||||
|
||||
mockInternalClient.getClient.mockResolvedValue(mockClient);
|
||||
mockInquirerService.prompt.mockResolvedValue({
|
||||
disclaimer: 'n',
|
||||
username: 'new-user-id',
|
||||
});
|
||||
|
||||
await command.run([]);
|
||||
|
||||
expect(mockClient.query).not.toHaveBeenCalled();
|
||||
expect(mockClient.mutate).not.toHaveBeenCalled();
|
||||
expect(mockRestartCommand.run).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not add user if user already exists', async () => {
|
||||
const mockClient = {
|
||||
query: vi.fn().mockResolvedValue({
|
||||
data: {
|
||||
settings: {
|
||||
api: {
|
||||
ssoSubIds: ['existing-user-id'],
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
mutate: vi.fn(),
|
||||
};
|
||||
|
||||
mockInternalClient.getClient.mockResolvedValue(mockClient);
|
||||
mockInquirerService.prompt.mockResolvedValue({
|
||||
disclaimer: 'y',
|
||||
username: 'existing-user-id',
|
||||
});
|
||||
|
||||
await command.run([]);
|
||||
|
||||
expect(mockClient.query).toHaveBeenCalled();
|
||||
expect(mockClient.mutate).not.toHaveBeenCalled();
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
'User existing-user-id already exists in SSO users'
|
||||
);
|
||||
expect(mockRestartCommand.run).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle errors gracefully', async () => {
|
||||
mockInternalClient.getClient.mockRejectedValue(new Error('Connection failed'));
|
||||
mockInquirerService.prompt.mockResolvedValue({
|
||||
disclaimer: 'y',
|
||||
username: 'new-user-id',
|
||||
});
|
||||
|
||||
await command.run([]);
|
||||
|
||||
expect(mockLogger.error).toHaveBeenCalledWith('Error adding user:', expect.any(Error));
|
||||
});
|
||||
});
|
||||
192
api/src/unraid-api/cli/__test__/api-key.command.test.ts
Normal file
192
api/src/unraid-api/cli/__test__/api-key.command.test.ts
Normal file
@@ -0,0 +1,192 @@
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
|
||||
import { InquirerService } from 'nest-commander';
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { ApiKeyService } from '@app/unraid-api/auth/api-key.service.js';
|
||||
import { AddApiKeyQuestionSet } from '@app/unraid-api/cli/apikey/add-api-key.questions.js';
|
||||
import { ApiKeyCommand } from '@app/unraid-api/cli/apikey/api-key.command.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
|
||||
describe('ApiKeyCommand', () => {
|
||||
let command: ApiKeyCommand;
|
||||
let apiKeyService: ApiKeyService;
|
||||
let logService: LogService;
|
||||
let inquirerService: InquirerService;
|
||||
let questionSet: AddApiKeyQuestionSet;
|
||||
|
||||
beforeEach(async () => {
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [
|
||||
ApiKeyCommand,
|
||||
AddApiKeyQuestionSet,
|
||||
{
|
||||
provide: ApiKeyService,
|
||||
useValue: {
|
||||
findByField: vi.fn(),
|
||||
create: vi.fn(),
|
||||
findAll: vi.fn(),
|
||||
deleteApiKeys: vi.fn(),
|
||||
convertRolesStringArrayToRoles: vi.fn((roles) => roles),
|
||||
convertPermissionsStringArrayToPermissions: vi.fn((perms) => perms),
|
||||
getAllValidPermissions: vi.fn(() => []),
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: LogService,
|
||||
useValue: {
|
||||
log: vi.fn(),
|
||||
error: vi.fn(),
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: InquirerService,
|
||||
useValue: {
|
||||
prompt: vi.fn(),
|
||||
},
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
command = module.get<ApiKeyCommand>(ApiKeyCommand);
|
||||
apiKeyService = module.get<ApiKeyService>(ApiKeyService);
|
||||
logService = module.get<LogService>(LogService);
|
||||
inquirerService = module.get<InquirerService>(InquirerService);
|
||||
questionSet = module.get<AddApiKeyQuestionSet>(AddApiKeyQuestionSet);
|
||||
});
|
||||
|
||||
describe('AddApiKeyQuestionSet', () => {
|
||||
describe('shouldAskOverwrite', () => {
|
||||
it('should return true when an API key with the given name exists', () => {
|
||||
vi.mocked(apiKeyService.findByField).mockReturnValue({
|
||||
key: 'existing-key',
|
||||
name: 'test-key',
|
||||
description: 'Test key',
|
||||
roles: [],
|
||||
permissions: [],
|
||||
} as any);
|
||||
|
||||
const result = questionSet.shouldAskOverwrite({ name: 'test-key' });
|
||||
|
||||
expect(result).toBe(true);
|
||||
expect(apiKeyService.findByField).toHaveBeenCalledWith('name', 'test-key');
|
||||
});
|
||||
|
||||
it('should return false when no API key with the given name exists', () => {
|
||||
vi.mocked(apiKeyService.findByField).mockReturnValue(null);
|
||||
|
||||
const result = questionSet.shouldAskOverwrite({ name: 'non-existent-key' });
|
||||
|
||||
expect(result).toBe(false);
|
||||
expect(apiKeyService.findByField).toHaveBeenCalledWith('name', 'non-existent-key');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('run', () => {
|
||||
it('should find and return existing key when not creating', async () => {
|
||||
const mockKey = { key: 'test-api-key-123', name: 'test-key' };
|
||||
vi.mocked(apiKeyService.findByField).mockReturnValue(mockKey as any);
|
||||
|
||||
await command.run([], { name: 'test-key', create: false });
|
||||
|
||||
expect(apiKeyService.findByField).toHaveBeenCalledWith('name', 'test-key');
|
||||
expect(logService.log).toHaveBeenCalledWith('test-api-key-123');
|
||||
});
|
||||
|
||||
it('should create new key when key does not exist and create flag is set', async () => {
|
||||
vi.mocked(apiKeyService.findByField).mockReturnValue(null);
|
||||
vi.mocked(apiKeyService.create).mockResolvedValue({ key: 'new-api-key-456' } as any);
|
||||
|
||||
await command.run([], {
|
||||
name: 'new-key',
|
||||
create: true,
|
||||
roles: ['ADMIN'] as any,
|
||||
description: 'Test description',
|
||||
});
|
||||
|
||||
expect(apiKeyService.create).toHaveBeenCalledWith({
|
||||
name: 'new-key',
|
||||
description: 'Test description',
|
||||
roles: ['ADMIN'],
|
||||
permissions: undefined,
|
||||
overwrite: false,
|
||||
});
|
||||
expect(logService.log).toHaveBeenCalledWith('new-api-key-456');
|
||||
});
|
||||
|
||||
it('should error when key exists and overwrite is not set in non-interactive mode', async () => {
|
||||
const mockKey = { key: 'existing-key', name: 'test-key' };
|
||||
vi.mocked(apiKeyService.findByField)
|
||||
.mockReturnValueOnce(null) // First call in line 131
|
||||
.mockReturnValueOnce(mockKey as any); // Second call in non-interactive check
|
||||
const exitSpy = vi.spyOn(process, 'exit').mockImplementation(() => {
|
||||
throw new Error('process.exit');
|
||||
});
|
||||
|
||||
await expect(
|
||||
command.run([], {
|
||||
name: 'test-key',
|
||||
create: true,
|
||||
roles: ['ADMIN'] as any,
|
||||
})
|
||||
).rejects.toThrow();
|
||||
|
||||
expect(logService.error).toHaveBeenCalledWith(
|
||||
"API key with name 'test-key' already exists. Use --overwrite to replace it."
|
||||
);
|
||||
expect(exitSpy).toHaveBeenCalledWith(1);
|
||||
exitSpy.mockRestore();
|
||||
});
|
||||
|
||||
it('should create key with overwrite when key exists and overwrite is set', async () => {
|
||||
const mockKey = { key: 'existing-key', name: 'test-key' };
|
||||
vi.mocked(apiKeyService.findByField)
|
||||
.mockReturnValueOnce(null) // First call in line 131
|
||||
.mockReturnValueOnce(mockKey as any); // Second call in non-interactive check
|
||||
vi.mocked(apiKeyService.create).mockResolvedValue({ key: 'overwritten-key' } as any);
|
||||
|
||||
await command.run([], {
|
||||
name: 'test-key',
|
||||
create: true,
|
||||
roles: ['ADMIN'] as any,
|
||||
overwrite: true,
|
||||
});
|
||||
|
||||
expect(apiKeyService.create).toHaveBeenCalledWith({
|
||||
name: 'test-key',
|
||||
description: 'CLI generated key: test-key',
|
||||
roles: ['ADMIN'],
|
||||
permissions: undefined,
|
||||
overwrite: true,
|
||||
});
|
||||
expect(logService.log).toHaveBeenCalledWith('overwritten-key');
|
||||
});
|
||||
|
||||
it('should prompt for missing fields when creating without sufficient info', async () => {
|
||||
vi.mocked(apiKeyService.findByField).mockReturnValue(null);
|
||||
vi.mocked(inquirerService.prompt).mockResolvedValue({
|
||||
name: 'prompted-key',
|
||||
roles: ['USER'],
|
||||
permissions: [],
|
||||
description: 'Prompted description',
|
||||
overwrite: false,
|
||||
} as any);
|
||||
vi.mocked(apiKeyService.create).mockResolvedValue({ key: 'prompted-api-key' } as any);
|
||||
|
||||
await command.run([], { name: '', create: true });
|
||||
|
||||
expect(inquirerService.prompt).toHaveBeenCalledWith('add-api-key', {
|
||||
name: '',
|
||||
create: true,
|
||||
});
|
||||
expect(apiKeyService.create).toHaveBeenCalledWith({
|
||||
name: 'prompted-key',
|
||||
description: 'Prompted description',
|
||||
roles: ['USER'],
|
||||
permissions: [],
|
||||
overwrite: false,
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,9 +1,10 @@
|
||||
import { Test } from '@nestjs/testing';
|
||||
|
||||
import type { CanonicalInternalClientService } from '@unraid/shared';
|
||||
import { CANONICAL_INTERNAL_CLIENT_TOKEN } from '@unraid/shared';
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { ApiReportService } from '@app/unraid-api/cli/api-report.service.js';
|
||||
import { CliInternalClientService } from '@app/unraid-api/cli/internal-client.service.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
import {
|
||||
CONNECT_STATUS_QUERY,
|
||||
@@ -40,7 +41,7 @@ describe('ApiReportService', () => {
|
||||
providers: [
|
||||
ApiReportService,
|
||||
{ provide: LogService, useValue: mockLogService },
|
||||
{ provide: CliInternalClientService, useValue: mockInternalClientService },
|
||||
{ provide: CANONICAL_INTERNAL_CLIENT_TOKEN, useValue: mockInternalClientService },
|
||||
],
|
||||
}).compile();
|
||||
|
||||
@@ -64,9 +65,13 @@ describe('ApiReportService', () => {
|
||||
uuid: 'test-uuid',
|
||||
},
|
||||
versions: {
|
||||
unraid: '6.12.0',
|
||||
kernel: '5.19.17',
|
||||
openssl: '3.0.8',
|
||||
core: {
|
||||
unraid: '6.12.0',
|
||||
kernel: '5.19.17',
|
||||
},
|
||||
packages: {
|
||||
openssl: '3.0.8',
|
||||
},
|
||||
},
|
||||
},
|
||||
config: {
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
import { access, readFile, unlink, writeFile } from 'fs/promises';
|
||||
|
||||
import type { CanonicalInternalClientService } from '@unraid/shared';
|
||||
import { CANONICAL_INTERNAL_CLIENT_TOKEN } from '@unraid/shared';
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { DeveloperToolsService } from '@app/unraid-api/cli/developer/developer-tools.service.js';
|
||||
import { CliInternalClientService } from '@app/unraid-api/cli/internal-client.service.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
import { RestartCommand } from '@app/unraid-api/cli/restart.command.js';
|
||||
|
||||
@@ -15,7 +16,7 @@ describe('DeveloperToolsService', () => {
|
||||
let service: DeveloperToolsService;
|
||||
let logService: LogService;
|
||||
let restartCommand: RestartCommand;
|
||||
let internalClient: CliInternalClientService;
|
||||
let internalClient: CanonicalInternalClientService;
|
||||
|
||||
const mockClient = {
|
||||
mutate: vi.fn(),
|
||||
@@ -42,7 +43,7 @@ describe('DeveloperToolsService', () => {
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: CliInternalClientService,
|
||||
provide: CANONICAL_INTERNAL_CLIENT_TOKEN,
|
||||
useValue: {
|
||||
getClient: vi.fn().mockResolvedValue(mockClient),
|
||||
},
|
||||
@@ -53,7 +54,7 @@ describe('DeveloperToolsService', () => {
|
||||
service = module.get<DeveloperToolsService>(DeveloperToolsService);
|
||||
logService = module.get<LogService>(LogService);
|
||||
restartCommand = module.get<RestartCommand>(RestartCommand);
|
||||
internalClient = module.get<CliInternalClientService>(CliInternalClientService);
|
||||
internalClient = module.get<CanonicalInternalClientService>(CANONICAL_INTERNAL_CLIENT_TOKEN);
|
||||
});
|
||||
|
||||
describe('setSandboxMode', () => {
|
||||
|
||||
@@ -1,86 +0,0 @@
|
||||
import { Test } from '@nestjs/testing';
|
||||
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { CliInternalClientService } from '@app/unraid-api/cli/internal-client.service.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
import { ListSSOUserCommand } from '@app/unraid-api/cli/sso/list-sso-user.command.js';
|
||||
|
||||
// Mock services
|
||||
const mockInternalClient = {
|
||||
getClient: vi.fn(),
|
||||
};
|
||||
|
||||
const mockLogger = {
|
||||
info: vi.fn(),
|
||||
error: vi.fn(),
|
||||
};
|
||||
|
||||
describe('ListSSOUserCommand', () => {
|
||||
let command: ListSSOUserCommand;
|
||||
|
||||
beforeEach(async () => {
|
||||
const module = await Test.createTestingModule({
|
||||
providers: [
|
||||
ListSSOUserCommand,
|
||||
{ provide: CliInternalClientService, useValue: mockInternalClient },
|
||||
{ provide: LogService, useValue: mockLogger },
|
||||
],
|
||||
}).compile();
|
||||
|
||||
command = module.get<ListSSOUserCommand>(ListSSOUserCommand);
|
||||
|
||||
// Clear mocks
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should list all SSO users', async () => {
|
||||
const mockClient = {
|
||||
query: vi.fn().mockResolvedValue({
|
||||
data: {
|
||||
settings: {
|
||||
api: {
|
||||
ssoSubIds: ['user-1', 'user-2', 'user-3'],
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
};
|
||||
|
||||
mockInternalClient.getClient.mockResolvedValue(mockClient);
|
||||
|
||||
await command.run([]);
|
||||
|
||||
expect(mockClient.query).toHaveBeenCalledWith({
|
||||
query: expect.anything(),
|
||||
});
|
||||
expect(mockLogger.info).toHaveBeenCalledWith('user-1\nuser-2\nuser-3');
|
||||
});
|
||||
|
||||
it('should display message when no users found', async () => {
|
||||
const mockClient = {
|
||||
query: vi.fn().mockResolvedValue({
|
||||
data: {
|
||||
settings: {
|
||||
api: {
|
||||
ssoSubIds: [],
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
};
|
||||
|
||||
mockInternalClient.getClient.mockResolvedValue(mockClient);
|
||||
|
||||
await command.run([]);
|
||||
|
||||
expect(mockClient.query).toHaveBeenCalled();
|
||||
expect(mockLogger.info).toHaveBeenCalledWith('No SSO users found');
|
||||
});
|
||||
|
||||
it('should handle errors gracefully', async () => {
|
||||
mockInternalClient.getClient.mockRejectedValue(new Error('Connection failed'));
|
||||
|
||||
await expect(command.run([])).rejects.toThrow('Connection failed');
|
||||
});
|
||||
});
|
||||
@@ -1,35 +1,65 @@
|
||||
import { Test } from '@nestjs/testing';
|
||||
|
||||
import { InquirerService } from 'nest-commander';
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { CliInternalClientService } from '@app/unraid-api/cli/internal-client.service.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
import { ILogService, LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
import {
|
||||
InstallPluginCommand,
|
||||
ListPluginCommand,
|
||||
RemovePluginCommand,
|
||||
} from '@app/unraid-api/cli/plugins/plugin.command.js';
|
||||
import { RestartCommand } from '@app/unraid-api/cli/restart.command.js';
|
||||
import { ApiConfigPersistence } from '@app/unraid-api/config/api-config.module.js';
|
||||
import { PluginManagementService } from '@app/unraid-api/plugin/plugin-management.service.js';
|
||||
import { PluginService } from '@app/unraid-api/plugin/plugin.service.js';
|
||||
|
||||
// Mock services
|
||||
const mockInternalClient = {
|
||||
getClient: vi.fn(),
|
||||
};
|
||||
|
||||
const mockLogger = {
|
||||
const mockLogger: ILogService = {
|
||||
clear: vi.fn(),
|
||||
shouldLog: vi.fn(),
|
||||
log: vi.fn(),
|
||||
info: vi.fn(),
|
||||
error: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
table: vi.fn(),
|
||||
always: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
trace: vi.fn(),
|
||||
};
|
||||
|
||||
const mockRestartCommand = {
|
||||
run: vi.fn(),
|
||||
};
|
||||
|
||||
const mockPluginManagementService = {
|
||||
addPlugin: vi.fn(),
|
||||
addBundledPlugin: vi.fn(),
|
||||
removePlugin: vi.fn(),
|
||||
removeBundledPlugin: vi.fn(),
|
||||
plugins: [] as string[],
|
||||
};
|
||||
|
||||
const mockInquirerService = {
|
||||
prompt: vi.fn(),
|
||||
};
|
||||
|
||||
const mockApiConfigPersistence = {
|
||||
persist: vi.fn(),
|
||||
};
|
||||
|
||||
vi.mock('@app/unraid-api/plugin/plugin.service.js', () => ({
|
||||
PluginService: {
|
||||
listPlugins: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
describe('Plugin Commands', () => {
|
||||
beforeEach(() => {
|
||||
// Clear mocks before each test
|
||||
vi.clearAllMocks();
|
||||
// Reset process.exitCode
|
||||
process.exitCode = 0;
|
||||
});
|
||||
|
||||
describe('InstallPluginCommand', () => {
|
||||
@@ -39,9 +69,10 @@ describe('Plugin Commands', () => {
|
||||
const module = await Test.createTestingModule({
|
||||
providers: [
|
||||
InstallPluginCommand,
|
||||
{ provide: CliInternalClientService, useValue: mockInternalClient },
|
||||
{ provide: LogService, useValue: mockLogger },
|
||||
{ provide: RestartCommand, useValue: mockRestartCommand },
|
||||
{ provide: PluginManagementService, useValue: mockPluginManagementService },
|
||||
{ provide: ApiConfigPersistence, useValue: mockApiConfigPersistence },
|
||||
],
|
||||
}).compile();
|
||||
|
||||
@@ -49,88 +80,39 @@ describe('Plugin Commands', () => {
|
||||
});
|
||||
|
||||
it('should install a plugin successfully', async () => {
|
||||
const mockClient = {
|
||||
mutate: vi.fn().mockResolvedValue({
|
||||
data: {
|
||||
addPlugin: false, // No manual restart required
|
||||
},
|
||||
}),
|
||||
};
|
||||
|
||||
mockInternalClient.getClient.mockResolvedValue(mockClient);
|
||||
|
||||
await command.run(['@unraid/plugin-example'], { bundled: false, restart: true });
|
||||
|
||||
expect(mockClient.mutate).toHaveBeenCalledWith({
|
||||
mutation: expect.anything(),
|
||||
variables: {
|
||||
input: {
|
||||
names: ['@unraid/plugin-example'],
|
||||
bundled: false,
|
||||
restart: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(mockPluginManagementService.addPlugin).toHaveBeenCalledWith('@unraid/plugin-example');
|
||||
expect(mockLogger.log).toHaveBeenCalledWith('Added plugin @unraid/plugin-example');
|
||||
expect(mockRestartCommand.run).not.toHaveBeenCalled(); // Because addPlugin returned false
|
||||
expect(mockApiConfigPersistence.persist).toHaveBeenCalled();
|
||||
expect(mockRestartCommand.run).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle bundled plugin installation', async () => {
|
||||
const mockClient = {
|
||||
mutate: vi.fn().mockResolvedValue({
|
||||
data: {
|
||||
addPlugin: true, // Manual restart required
|
||||
},
|
||||
}),
|
||||
};
|
||||
|
||||
mockInternalClient.getClient.mockResolvedValue(mockClient);
|
||||
|
||||
await command.run(['@unraid/bundled-plugin'], { bundled: true, restart: true });
|
||||
|
||||
expect(mockClient.mutate).toHaveBeenCalledWith({
|
||||
mutation: expect.anything(),
|
||||
variables: {
|
||||
input: {
|
||||
names: ['@unraid/bundled-plugin'],
|
||||
bundled: true,
|
||||
restart: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(mockPluginManagementService.addBundledPlugin).toHaveBeenCalledWith(
|
||||
'@unraid/bundled-plugin'
|
||||
);
|
||||
expect(mockLogger.log).toHaveBeenCalledWith('Added bundled plugin @unraid/bundled-plugin');
|
||||
expect(mockRestartCommand.run).toHaveBeenCalled(); // Because addPlugin returned true
|
||||
expect(mockApiConfigPersistence.persist).toHaveBeenCalled();
|
||||
expect(mockRestartCommand.run).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not restart when restart option is false', async () => {
|
||||
const mockClient = {
|
||||
mutate: vi.fn().mockResolvedValue({
|
||||
data: {
|
||||
addPlugin: true,
|
||||
},
|
||||
}),
|
||||
};
|
||||
|
||||
mockInternalClient.getClient.mockResolvedValue(mockClient);
|
||||
|
||||
await command.run(['@unraid/plugin'], { bundled: false, restart: false });
|
||||
|
||||
expect(mockPluginManagementService.addPlugin).toHaveBeenCalledWith('@unraid/plugin');
|
||||
expect(mockApiConfigPersistence.persist).toHaveBeenCalled();
|
||||
expect(mockRestartCommand.run).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle errors', async () => {
|
||||
mockInternalClient.getClient.mockRejectedValue(new Error('Connection failed'));
|
||||
|
||||
await command.run(['@unraid/plugin'], { bundled: false, restart: true });
|
||||
|
||||
expect(mockLogger.error).toHaveBeenCalledWith('Failed to add plugin:', expect.any(Error));
|
||||
expect(process.exitCode).toBe(1);
|
||||
});
|
||||
|
||||
it('should error when no package name provided', async () => {
|
||||
await command.run([], { bundled: false, restart: true });
|
||||
|
||||
expect(mockLogger.error).toHaveBeenCalledWith('Package name is required.');
|
||||
expect(mockApiConfigPersistence.persist).not.toHaveBeenCalled();
|
||||
expect(mockRestartCommand.run).not.toHaveBeenCalled();
|
||||
expect(process.exitCode).toBe(1);
|
||||
});
|
||||
});
|
||||
@@ -142,57 +124,62 @@ describe('Plugin Commands', () => {
|
||||
const module = await Test.createTestingModule({
|
||||
providers: [
|
||||
RemovePluginCommand,
|
||||
{ provide: CliInternalClientService, useValue: mockInternalClient },
|
||||
{ provide: LogService, useValue: mockLogger },
|
||||
{ provide: PluginManagementService, useValue: mockPluginManagementService },
|
||||
{ provide: RestartCommand, useValue: mockRestartCommand },
|
||||
{ provide: InquirerService, useValue: mockInquirerService },
|
||||
{ provide: ApiConfigPersistence, useValue: mockApiConfigPersistence },
|
||||
],
|
||||
}).compile();
|
||||
|
||||
command = module.get<RemovePluginCommand>(RemovePluginCommand);
|
||||
});
|
||||
|
||||
it('should remove a plugin successfully', async () => {
|
||||
const mockClient = {
|
||||
mutate: vi.fn().mockResolvedValue({
|
||||
data: {
|
||||
removePlugin: false, // No manual restart required
|
||||
},
|
||||
}),
|
||||
};
|
||||
|
||||
mockInternalClient.getClient.mockResolvedValue(mockClient);
|
||||
|
||||
await command.run(['@unraid/plugin-example'], { bundled: false, restart: true });
|
||||
|
||||
expect(mockClient.mutate).toHaveBeenCalledWith({
|
||||
mutation: expect.anything(),
|
||||
variables: {
|
||||
input: {
|
||||
names: ['@unraid/plugin-example'],
|
||||
bundled: false,
|
||||
restart: true,
|
||||
},
|
||||
},
|
||||
it('should remove plugins successfully', async () => {
|
||||
mockInquirerService.prompt.mockResolvedValue({
|
||||
plugins: ['@unraid/plugin-example', '@unraid/plugin-test'],
|
||||
restart: true,
|
||||
});
|
||||
|
||||
await command.run([], { restart: true });
|
||||
|
||||
expect(mockPluginManagementService.removePlugin).toHaveBeenCalledWith(
|
||||
'@unraid/plugin-example',
|
||||
'@unraid/plugin-test'
|
||||
);
|
||||
expect(mockLogger.log).toHaveBeenCalledWith('Removed plugin @unraid/plugin-example');
|
||||
expect(mockLogger.log).toHaveBeenCalledWith('Removed plugin @unraid/plugin-test');
|
||||
expect(mockApiConfigPersistence.persist).toHaveBeenCalled();
|
||||
expect(mockRestartCommand.run).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle when no plugins are selected', async () => {
|
||||
mockInquirerService.prompt.mockResolvedValue({
|
||||
plugins: [],
|
||||
restart: true,
|
||||
});
|
||||
|
||||
await command.run([], { restart: true });
|
||||
|
||||
expect(mockLogger.warn).toHaveBeenCalledWith('No plugins selected for removal.');
|
||||
expect(mockPluginManagementService.removePlugin).not.toHaveBeenCalled();
|
||||
expect(mockApiConfigPersistence.persist).not.toHaveBeenCalled();
|
||||
expect(mockRestartCommand.run).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle removing bundled plugins', async () => {
|
||||
const mockClient = {
|
||||
mutate: vi.fn().mockResolvedValue({
|
||||
data: {
|
||||
removePlugin: true, // Manual restart required
|
||||
},
|
||||
}),
|
||||
};
|
||||
it('should skip restart when --no-restart is specified', async () => {
|
||||
mockInquirerService.prompt.mockResolvedValue({
|
||||
plugins: ['@unraid/plugin-example'],
|
||||
restart: false,
|
||||
});
|
||||
|
||||
mockInternalClient.getClient.mockResolvedValue(mockClient);
|
||||
await command.run([], { restart: false });
|
||||
|
||||
await command.run(['@unraid/bundled-plugin'], { bundled: true, restart: true });
|
||||
|
||||
expect(mockLogger.log).toHaveBeenCalledWith('Removed bundled plugin @unraid/bundled-plugin');
|
||||
expect(mockRestartCommand.run).toHaveBeenCalled();
|
||||
expect(mockPluginManagementService.removePlugin).toHaveBeenCalledWith(
|
||||
'@unraid/plugin-example'
|
||||
);
|
||||
expect(mockApiConfigPersistence.persist).toHaveBeenCalled();
|
||||
expect(mockRestartCommand.run).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -203,8 +190,8 @@ describe('Plugin Commands', () => {
|
||||
const module = await Test.createTestingModule({
|
||||
providers: [
|
||||
ListPluginCommand,
|
||||
{ provide: CliInternalClientService, useValue: mockInternalClient },
|
||||
{ provide: LogService, useValue: mockLogger },
|
||||
{ provide: PluginManagementService, useValue: mockPluginManagementService },
|
||||
],
|
||||
}).compile();
|
||||
|
||||
@@ -212,63 +199,37 @@ describe('Plugin Commands', () => {
|
||||
});
|
||||
|
||||
it('should list installed plugins', async () => {
|
||||
const mockClient = {
|
||||
query: vi.fn().mockResolvedValue({
|
||||
data: {
|
||||
plugins: [
|
||||
{
|
||||
name: '@unraid/plugin-1',
|
||||
version: '1.0.0',
|
||||
hasApiModule: true,
|
||||
hasCliModule: false,
|
||||
},
|
||||
{
|
||||
name: '@unraid/plugin-2',
|
||||
version: '2.0.0',
|
||||
hasApiModule: true,
|
||||
hasCliModule: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
};
|
||||
|
||||
mockInternalClient.getClient.mockResolvedValue(mockClient);
|
||||
vi.mocked(PluginService.listPlugins).mockResolvedValue([
|
||||
['@unraid/plugin-1', '1.0.0'],
|
||||
['@unraid/plugin-2', '2.0.0'],
|
||||
]);
|
||||
mockPluginManagementService.plugins = ['@unraid/plugin-1', '@unraid/plugin-2'];
|
||||
|
||||
await command.run();
|
||||
|
||||
expect(mockClient.query).toHaveBeenCalledWith({
|
||||
query: expect.anything(),
|
||||
});
|
||||
expect(mockLogger.log).toHaveBeenCalledWith('Installed plugins:\n');
|
||||
expect(mockLogger.log).toHaveBeenCalledWith('☑️ @unraid/plugin-1@1.0.0 [API]');
|
||||
expect(mockLogger.log).toHaveBeenCalledWith('☑️ @unraid/plugin-2@2.0.0 [API, CLI]');
|
||||
expect(mockLogger.log).toHaveBeenCalledWith('☑️ @unraid/plugin-1@1.0.0');
|
||||
expect(mockLogger.log).toHaveBeenCalledWith('☑️ @unraid/plugin-2@2.0.0');
|
||||
expect(mockLogger.log).toHaveBeenCalledWith();
|
||||
});
|
||||
|
||||
it('should handle no plugins installed', async () => {
|
||||
const mockClient = {
|
||||
query: vi.fn().mockResolvedValue({
|
||||
data: {
|
||||
plugins: [],
|
||||
},
|
||||
}),
|
||||
};
|
||||
|
||||
mockInternalClient.getClient.mockResolvedValue(mockClient);
|
||||
vi.mocked(PluginService.listPlugins).mockResolvedValue([]);
|
||||
mockPluginManagementService.plugins = [];
|
||||
|
||||
await command.run();
|
||||
|
||||
expect(mockLogger.log).toHaveBeenCalledWith('No plugins installed.');
|
||||
});
|
||||
|
||||
it('should handle errors', async () => {
|
||||
mockInternalClient.getClient.mockRejectedValue(new Error('Connection failed'));
|
||||
it('should warn about plugins not installed', async () => {
|
||||
vi.mocked(PluginService.listPlugins).mockResolvedValue([['@unraid/plugin-1', '1.0.0']]);
|
||||
mockPluginManagementService.plugins = ['@unraid/plugin-1', '@unraid/plugin-2'];
|
||||
|
||||
await command.run();
|
||||
|
||||
expect(mockLogger.error).toHaveBeenCalledWith('Failed to list plugins:', expect.any(Error));
|
||||
expect(process.exitCode).toBe(1);
|
||||
expect(mockLogger.warn).toHaveBeenCalledWith('1 plugins are not installed:');
|
||||
expect(mockLogger.table).toHaveBeenCalledWith('warn', ['@unraid/plugin-2']);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,186 +0,0 @@
|
||||
import { Test } from '@nestjs/testing';
|
||||
|
||||
import { InquirerService } from 'nest-commander';
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { CliInternalClientService } from '@app/unraid-api/cli/internal-client.service.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
import { RestartCommand } from '@app/unraid-api/cli/restart.command.js';
|
||||
import { RemoveSSOUserCommand } from '@app/unraid-api/cli/sso/remove-sso-user.command.js';
|
||||
|
||||
// Mock services
|
||||
const mockInternalClient = {
|
||||
getClient: vi.fn(),
|
||||
};
|
||||
|
||||
const mockLogger = {
|
||||
info: vi.fn(),
|
||||
error: vi.fn(),
|
||||
};
|
||||
|
||||
const mockRestartCommand = {
|
||||
run: vi.fn(),
|
||||
};
|
||||
|
||||
const mockInquirerService = {
|
||||
prompt: vi.fn(),
|
||||
};
|
||||
|
||||
describe('RemoveSSOUserCommand', () => {
|
||||
let command: RemoveSSOUserCommand;
|
||||
|
||||
beforeEach(async () => {
|
||||
const module = await Test.createTestingModule({
|
||||
providers: [
|
||||
RemoveSSOUserCommand,
|
||||
{ provide: CliInternalClientService, useValue: mockInternalClient },
|
||||
{ provide: LogService, useValue: mockLogger },
|
||||
{ provide: RestartCommand, useValue: mockRestartCommand },
|
||||
{ provide: InquirerService, useValue: mockInquirerService },
|
||||
],
|
||||
}).compile();
|
||||
|
||||
command = module.get<RemoveSSOUserCommand>(RemoveSSOUserCommand);
|
||||
|
||||
// Clear mocks
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should remove a specific SSO user successfully', async () => {
|
||||
const mockClient = {
|
||||
query: vi.fn().mockResolvedValue({
|
||||
data: {
|
||||
settings: {
|
||||
api: {
|
||||
ssoSubIds: ['user-1', 'user-2', 'user-3'],
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
mutate: vi.fn().mockResolvedValue({
|
||||
data: {
|
||||
updateSettings: {
|
||||
restartRequired: true,
|
||||
values: {},
|
||||
},
|
||||
},
|
||||
}),
|
||||
};
|
||||
|
||||
mockInternalClient.getClient.mockResolvedValue(mockClient);
|
||||
mockInquirerService.prompt.mockResolvedValue({
|
||||
username: 'user-2',
|
||||
});
|
||||
|
||||
await command.run([]);
|
||||
|
||||
expect(mockClient.query).toHaveBeenCalled();
|
||||
expect(mockClient.mutate).toHaveBeenCalledWith({
|
||||
mutation: expect.anything(),
|
||||
variables: {
|
||||
input: {
|
||||
api: {
|
||||
ssoSubIds: ['user-1', 'user-3'],
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(mockLogger.info).toHaveBeenCalledWith('User removed: user-2');
|
||||
expect(mockLogger.info).toHaveBeenCalledWith('Restarting the API');
|
||||
expect(mockRestartCommand.run).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should remove all SSO users when "all" is selected', async () => {
|
||||
const mockClient = {
|
||||
query: vi.fn().mockResolvedValue({
|
||||
data: {
|
||||
settings: {
|
||||
api: {
|
||||
ssoSubIds: ['user-1', 'user-2', 'user-3'],
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
mutate: vi.fn().mockResolvedValue({
|
||||
data: {
|
||||
updateSettings: {
|
||||
restartRequired: true,
|
||||
values: {},
|
||||
},
|
||||
},
|
||||
}),
|
||||
};
|
||||
|
||||
mockInternalClient.getClient.mockResolvedValue(mockClient);
|
||||
mockInquirerService.prompt.mockResolvedValue({
|
||||
username: 'all',
|
||||
});
|
||||
|
||||
await command.run([]);
|
||||
|
||||
expect(mockClient.query).toHaveBeenCalled();
|
||||
expect(mockClient.mutate).toHaveBeenCalledWith({
|
||||
mutation: expect.anything(),
|
||||
variables: {
|
||||
input: {
|
||||
api: {
|
||||
ssoSubIds: [],
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(mockLogger.info).toHaveBeenCalledWith('All users removed from SSO');
|
||||
expect(mockRestartCommand.run).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not remove user if user does not exist', async () => {
|
||||
const mockClient = {
|
||||
query: vi.fn().mockResolvedValue({
|
||||
data: {
|
||||
settings: {
|
||||
api: {
|
||||
ssoSubIds: ['user-1', 'user-3'],
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
mutate: vi.fn(),
|
||||
};
|
||||
|
||||
mockInternalClient.getClient.mockResolvedValue(mockClient);
|
||||
mockInquirerService.prompt.mockResolvedValue({
|
||||
username: 'user-2',
|
||||
});
|
||||
|
||||
await command.run([]);
|
||||
|
||||
expect(mockClient.query).toHaveBeenCalled();
|
||||
expect(mockClient.mutate).not.toHaveBeenCalled();
|
||||
expect(mockLogger.error).toHaveBeenCalledWith('User user-2 not found in SSO users');
|
||||
expect(mockRestartCommand.run).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should exit when no SSO users are found', async () => {
|
||||
const processExitSpy = vi.spyOn(process, 'exit').mockImplementation(() => {
|
||||
throw new Error('process.exit');
|
||||
});
|
||||
|
||||
const error = new Error('No SSO Users Found');
|
||||
(error as any).name = 'NoSSOUsersFoundError';
|
||||
mockInquirerService.prompt.mockRejectedValue(error);
|
||||
|
||||
try {
|
||||
await command.run([]);
|
||||
} catch (error) {
|
||||
// Expected to throw due to process.exit
|
||||
}
|
||||
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
'Failed to fetch SSO users: %s',
|
||||
'No SSO Users Found'
|
||||
);
|
||||
expect(processExitSpy).toHaveBeenCalledWith(1);
|
||||
|
||||
processExitSpy.mockRestore();
|
||||
});
|
||||
});
|
||||
@@ -2,17 +2,23 @@ import { Test } from '@nestjs/testing';
|
||||
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import type { ILogService } from '@app/unraid-api/cli/log.service.js';
|
||||
import { ApiReportService } from '@app/unraid-api/cli/api-report.service.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
import { ReportCommand } from '@app/unraid-api/cli/report.command.js';
|
||||
|
||||
// Mock log service
|
||||
const mockLogService = {
|
||||
debug: vi.fn(),
|
||||
info: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
const mockLogService: ILogService = {
|
||||
shouldLog: vi.fn(),
|
||||
clear: vi.fn(),
|
||||
always: vi.fn(),
|
||||
error: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
log: vi.fn(),
|
||||
table: vi.fn(),
|
||||
info: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
trace: vi.fn(),
|
||||
};
|
||||
|
||||
// Mock ApiReportService
|
||||
@@ -101,7 +107,7 @@ describe('ReportCommand', () => {
|
||||
|
||||
// Verify report was logged
|
||||
expect(mockLogService.clear).toHaveBeenCalled();
|
||||
expect(mockLogService.info).toHaveBeenCalledWith(JSON.stringify(mockReport, null, 2));
|
||||
expect(mockLogService.always).toHaveBeenCalledWith(JSON.stringify(mockReport, null, 2));
|
||||
});
|
||||
|
||||
it('should handle API not running gracefully', async () => {
|
||||
@@ -113,7 +119,7 @@ describe('ReportCommand', () => {
|
||||
expect(mockApiReportService.generateReport).not.toHaveBeenCalled();
|
||||
|
||||
// Verify warning was logged
|
||||
expect(mockLogService.warn).toHaveBeenCalledWith(
|
||||
expect(mockLogService.always).toHaveBeenCalledWith(
|
||||
expect.stringContaining('API is not running')
|
||||
);
|
||||
});
|
||||
@@ -128,7 +134,7 @@ describe('ReportCommand', () => {
|
||||
expect(mockLogService.debug).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Error generating report via GraphQL')
|
||||
);
|
||||
expect(mockLogService.warn).toHaveBeenCalledWith(
|
||||
expect(mockLogService.always).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Failed to generate system report')
|
||||
);
|
||||
});
|
||||
|
||||
@@ -1,44 +0,0 @@
|
||||
import { Inject, Injectable, Logger, OnModuleInit } from '@nestjs/common';
|
||||
|
||||
import type { ApiKeyService } from '@unraid/shared/services/api-key.js';
|
||||
import { Role } from '@unraid/shared/graphql.model.js';
|
||||
import { API_KEY_SERVICE_TOKEN } from '@unraid/shared/tokens.js';
|
||||
|
||||
/**
|
||||
* Service that creates and manages the admin API key used by CLI commands.
|
||||
* Uses the standard API key storage location via helper methods in ApiKeyService.
|
||||
*/
|
||||
@Injectable()
|
||||
export class AdminKeyService implements OnModuleInit {
|
||||
private readonly logger = new Logger(AdminKeyService.name);
|
||||
private static readonly ADMIN_KEY_NAME = 'CliInternal';
|
||||
private static readonly ADMIN_KEY_DESCRIPTION =
|
||||
'Internal admin API key used by CLI commands for system operations';
|
||||
|
||||
constructor(
|
||||
@Inject(API_KEY_SERVICE_TOKEN)
|
||||
private readonly apiKeyService: ApiKeyService
|
||||
) {}
|
||||
|
||||
async onModuleInit() {
|
||||
try {
|
||||
await this.getOrCreateLocalAdminKey();
|
||||
this.logger.log('Admin API key initialized successfully');
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to initialize admin API key:', error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets or creates a local admin API key for CLI operations.
|
||||
* Uses the standard API key storage location.
|
||||
*/
|
||||
public async getOrCreateLocalAdminKey(): Promise<string> {
|
||||
return this.apiKeyService.ensureKey({
|
||||
name: AdminKeyService.ADMIN_KEY_NAME,
|
||||
description: AdminKeyService.ADMIN_KEY_DESCRIPTION,
|
||||
roles: [Role.ADMIN], // Full admin privileges for CLI operations
|
||||
legacyNames: ['CLI', 'Internal', 'CliAdmin'], // Clean up old keys
|
||||
});
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user