mirror of
https://github.com/unraid/api.git
synced 2026-01-06 00:30:22 -06:00
Compare commits
45 Commits
4.11.0-bui
...
4.15.0-bui
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ac198d5d1a | ||
|
|
f1c043fe5f | ||
|
|
d0c66020e1 | ||
|
|
335f949b53 | ||
|
|
26aeca3624 | ||
|
|
2b4c2a264b | ||
|
|
b7798b82f4 | ||
|
|
426283011a | ||
|
|
effdbcf0f5 | ||
|
|
541b0edd35 | ||
|
|
ce63d5dca2 | ||
|
|
bcaacca061 | ||
|
|
0afc4e8e9a | ||
|
|
1a01696dc7 | ||
|
|
1bc5251310 | ||
|
|
3a10871918 | ||
|
|
58b5544bea | ||
|
|
a4ff3c4092 | ||
|
|
1e0a54d9ef | ||
|
|
096fe98710 | ||
|
|
57217852a3 | ||
|
|
979a267bc5 | ||
|
|
96c120f9b2 | ||
|
|
a2c5d2495f | ||
|
|
b3216874fa | ||
|
|
27dbfde845 | ||
|
|
1a25fedd23 | ||
|
|
ad6aa3b674 | ||
|
|
9c4e764c95 | ||
|
|
20c2d5b445 | ||
|
|
85a441b51d | ||
|
|
c9577e9bf2 | ||
|
|
18b5209087 | ||
|
|
ec8f4f38c8 | ||
|
|
db0e725107 | ||
|
|
5afca5ecba | ||
|
|
beab83b56e | ||
|
|
78997a02c6 | ||
|
|
3534d6fdd7 | ||
|
|
557b03f882 | ||
|
|
514a0ef560 | ||
|
|
dfe352dfa1 | ||
|
|
8005b8c3b6 | ||
|
|
d6fa102d06 | ||
|
|
52f22678e3 |
@@ -10,4 +10,5 @@ alwaysApply: false
|
||||
* Test suite is VITEST, do not use jest
|
||||
pnpm --filter ./api test
|
||||
* Prefer to not mock simple dependencies
|
||||
* For error testing, use `.rejects.toThrow()` without arguments - don't test exact error message strings unless the message format is specifically what you're testing
|
||||
|
||||
|
||||
@@ -4,6 +4,10 @@ globs: **/*.test.ts,**/__test__/components/**/*.ts,**/__test__/store/**/*.ts,**/
|
||||
alwaysApply: false
|
||||
---
|
||||
|
||||
## General Testing Best Practices
|
||||
- **Error Testing:** Use `.rejects.toThrow()` without arguments to test that functions throw errors. Don't test exact error message strings unless the message format is specifically what you're testing
|
||||
- **Focus on Behavior:** Test what the code does, not implementation details like exact error message wording
|
||||
|
||||
## Vue Component Testing Best Practices
|
||||
- This is a Nuxt.js app but we are testing with vitest outside of the Nuxt environment
|
||||
- Nuxt is currently set to auto import so some vue files may need compute or ref imported
|
||||
|
||||
8
.github/workflows/build-plugin.yml
vendored
8
.github/workflows/build-plugin.yml
vendored
@@ -45,7 +45,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
@@ -88,19 +88,19 @@ jobs:
|
||||
pnpm install --frozen-lockfile --filter @unraid/connect-plugin
|
||||
|
||||
- name: Download Unraid UI Components
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
name: unraid-wc-ui
|
||||
path: ${{ github.workspace }}/plugin/source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components/uui
|
||||
merge-multiple: true
|
||||
- name: Download Unraid Web Components
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
pattern: unraid-wc-rich
|
||||
path: ${{ github.workspace }}/plugin/source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components/nuxt
|
||||
merge-multiple: true
|
||||
- name: Download Unraid API
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
name: unraid-api
|
||||
path: ${{ github.workspace }}/plugin/api/
|
||||
|
||||
89
.github/workflows/claude-code-review.yml
vendored
89
.github/workflows/claude-code-review.yml
vendored
@@ -3,20 +3,30 @@ name: Claude Code Review
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize]
|
||||
# Optional: Only run on specific file changes
|
||||
# paths:
|
||||
# - "src/**/*.ts"
|
||||
# - "src/**/*.tsx"
|
||||
# - "src/**/*.js"
|
||||
# - "src/**/*.jsx"
|
||||
# Skip reviews for non-code changes
|
||||
paths-ignore:
|
||||
- "**/*.md"
|
||||
- "**/package-lock.json"
|
||||
- "**/pnpm-lock.yaml"
|
||||
- "**/.gitignore"
|
||||
- "**/LICENSE"
|
||||
- "**/*.config.js"
|
||||
- "**/*.config.ts"
|
||||
- "**/tsconfig.json"
|
||||
- "**/.github/workflows/*.yml"
|
||||
- "**/docs/**"
|
||||
|
||||
jobs:
|
||||
claude-review:
|
||||
# Optional: Filter by PR author
|
||||
# if: |
|
||||
# github.event.pull_request.user.login == 'external-contributor' ||
|
||||
# github.event.pull_request.user.login == 'new-developer' ||
|
||||
# github.event.pull_request.author_association == 'FIRST_TIME_CONTRIBUTOR'
|
||||
# Skip review for bot PRs and WIP/skip-review PRs
|
||||
# Only run if changes are significant (>10 lines)
|
||||
if: |
|
||||
(github.event.pull_request.additions > 10 || github.event.pull_request.deletions > 10) &&
|
||||
!contains(github.event.pull_request.title, '[skip-review]') &&
|
||||
!contains(github.event.pull_request.title, '[WIP]') &&
|
||||
!endsWith(github.event.pull_request.user.login, '[bot]') &&
|
||||
github.event.pull_request.user.login != 'dependabot' &&
|
||||
github.event.pull_request.user.login != 'renovate'
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
@@ -27,7 +37,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
@@ -42,31 +52,46 @@ jobs:
|
||||
|
||||
# Direct prompt for automated review (no @claude mention needed)
|
||||
direct_prompt: |
|
||||
Please review this pull request and provide feedback on:
|
||||
- Code quality and best practices
|
||||
- Potential bugs or issues
|
||||
- Performance considerations
|
||||
- Security concerns
|
||||
- Test coverage
|
||||
IMPORTANT: Review ONLY the DIFF/CHANGESET - the actual lines that were added or modified in this PR.
|
||||
DO NOT review the entire file context, only analyze the specific changes being made.
|
||||
|
||||
Be constructive and helpful in your feedback.
|
||||
Look for HIGH-PRIORITY issues in the CHANGED LINES ONLY:
|
||||
|
||||
1. CRITICAL BUGS: Logic errors, null pointer issues, infinite loops, race conditions
|
||||
2. SECURITY: SQL injection, XSS, authentication bypass, exposed secrets, unsafe operations
|
||||
3. BREAKING CHANGES: API contract violations, removed exports, changed function signatures
|
||||
4. DATA LOSS RISKS: Destructive operations without safeguards, missing data validation
|
||||
|
||||
DO NOT comment on:
|
||||
- Code that wasn't changed in this PR
|
||||
- Style, formatting, or documentation
|
||||
- Test coverage (unless tests are broken by the changes)
|
||||
- Minor optimizations or best practices
|
||||
- Existing code issues that weren't introduced by this PR
|
||||
|
||||
If you find no critical issues in the DIFF, respond with: "✅ No critical issues found in changes"
|
||||
|
||||
Keep response under 10 lines. Reference specific line numbers from the diff when reporting issues.
|
||||
|
||||
# Optional: Use sticky comments to make Claude reuse the same comment on subsequent pushes to the same PR
|
||||
# use_sticky_comment: true
|
||||
use_sticky_comment: true
|
||||
|
||||
# Optional: Customize review based on file types
|
||||
# Context-aware review based on PR characteristics
|
||||
# Uncomment to enable different review strategies based on context
|
||||
# direct_prompt: |
|
||||
# Review this PR focusing on:
|
||||
# - For TypeScript files: Type safety and proper interface usage
|
||||
# - For API endpoints: Security, input validation, and error handling
|
||||
# - For React components: Performance, accessibility, and best practices
|
||||
# - For tests: Coverage, edge cases, and test quality
|
||||
|
||||
# Optional: Different prompts for different authors
|
||||
# direct_prompt: |
|
||||
# ${{ github.event.pull_request.author_association == 'FIRST_TIME_CONTRIBUTOR' &&
|
||||
# 'Welcome! Please review this PR from a first-time contributor. Be encouraging and provide detailed explanations for any suggestions.' ||
|
||||
# 'Please provide a thorough code review focusing on our coding standards and best practices.' }}
|
||||
# ${{
|
||||
# (github.event.pull_request.additions > 500) &&
|
||||
# 'Large PR detected. Focus only on architectural issues and breaking changes. Skip minor issues.' ||
|
||||
# contains(github.event.pull_request.title, 'fix') &&
|
||||
# 'Bug fix PR: Verify the fix addresses the root cause and check for regression risks.' ||
|
||||
# contains(github.event.pull_request.title, 'deps') &&
|
||||
# 'Dependency update: Check for breaking changes and security advisories only.' ||
|
||||
# contains(github.event.pull_request.title, 'refactor') &&
|
||||
# 'Refactor PR: Verify no behavior changes and check for performance regressions.' ||
|
||||
# contains(github.event.pull_request.title, 'feat') &&
|
||||
# 'New feature: Check for security issues, edge cases, and integration problems only.' ||
|
||||
# 'Standard review: Check for critical bugs, security issues, and breaking changes only.'
|
||||
# }}
|
||||
|
||||
# Optional: Add specific tools for running tests or linting
|
||||
# allowed_tools: "Bash(npm run test),Bash(npm run lint),Bash(npm run typecheck)"
|
||||
|
||||
2
.github/workflows/claude.yml
vendored
2
.github/workflows/claude.yml
vendored
@@ -26,7 +26,7 @@ jobs:
|
||||
actions: read # Required for Claude to read CI results on PRs
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
|
||||
2
.github/workflows/codeql-analysis.yml
vendored
2
.github/workflows/codeql-analysis.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
|
||||
31
.github/workflows/create-docusaurus-pr.yml
vendored
31
.github/workflows/create-docusaurus-pr.yml
vendored
@@ -20,26 +20,49 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout source repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
path: source-repo
|
||||
|
||||
- name: Checkout docs repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
repository: unraid/docs
|
||||
path: docs-repo
|
||||
token: ${{ secrets.DOCS_PAT_UNRAID_BOT }}
|
||||
|
||||
- name: Copy updated docs
|
||||
- name: Copy and process docs
|
||||
run: |
|
||||
if [ ! -d "source-repo/api/docs" ]; then
|
||||
echo "Source directory does not exist!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Remove old API docs but preserve other folders
|
||||
rm -rf docs-repo/docs/API/
|
||||
mkdir -p docs-repo/docs/API
|
||||
|
||||
# Copy all markdown files and maintain directory structure
|
||||
cp -r source-repo/api/docs/public/. docs-repo/docs/API/
|
||||
|
||||
# Copy images to Docusaurus static directory
|
||||
mkdir -p docs-repo/static/img/api
|
||||
|
||||
# Copy images from public/images if they exist
|
||||
if [ -d "source-repo/api/docs/public/images" ]; then
|
||||
cp -r source-repo/api/docs/public/images/. docs-repo/static/img/api/
|
||||
fi
|
||||
|
||||
# Also copy any images from the parent docs/images directory
|
||||
if [ -d "source-repo/api/docs/images" ]; then
|
||||
cp -r source-repo/api/docs/images/. docs-repo/static/img/api/
|
||||
fi
|
||||
|
||||
# Update image paths in markdown files
|
||||
# Replace relative image paths with absolute paths pointing to /img/api/
|
||||
find docs-repo/docs/API -name "*.md" -type f -exec sed -i 's|!\[\([^]]*\)\](\./images/\([^)]*\))||g' {} \;
|
||||
find docs-repo/docs/API -name "*.md" -type f -exec sed -i 's|!\[\([^]]*\)\](images/\([^)]*\))||g' {} \;
|
||||
find docs-repo/docs/API -name "*.md" -type f -exec sed -i 's|!\[\([^]]*\)\](../images/\([^)]*\))||g' {} \;
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
@@ -53,7 +76,7 @@ jobs:
|
||||
Changes were automatically generated from api/docs/* directory.
|
||||
|
||||
@coderabbitai ignore
|
||||
reviewers: ljm42, elibosley, pujitm, mdatelle
|
||||
reviewers: ljm42, elibosley
|
||||
branch: update-api-docs
|
||||
base: main
|
||||
delete-branch: true
|
||||
|
||||
6
.github/workflows/deploy-storybook.yml
vendored
6
.github/workflows/deploy-storybook.yml
vendored
@@ -20,12 +20,12 @@ jobs:
|
||||
name: Deploy Storybook
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '22.17.1'
|
||||
node-version: '22.18.0'
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
@@ -33,7 +33,7 @@ jobs:
|
||||
run_install: false
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.1
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential libvirt-daemon-system
|
||||
version: 1.0
|
||||
|
||||
16
.github/workflows/main.yml
vendored
16
.github/workflows/main.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
# Only run release-please on pushes to main
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
||||
|
||||
@@ -37,7 +37,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
@@ -45,7 +45,7 @@ jobs:
|
||||
node-version-file: ".nvmrc"
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.1
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential libvirt-daemon-system
|
||||
version: 1.0
|
||||
@@ -163,7 +163,7 @@ jobs:
|
||||
working-directory: api
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
@@ -190,7 +190,7 @@ jobs:
|
||||
${{ runner.os }}-pnpm-store-
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.1
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential
|
||||
version: 1.0
|
||||
@@ -240,7 +240,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
@@ -267,7 +267,7 @@ jobs:
|
||||
${{ runner.os }}-pnpm-store-
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.1
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential
|
||||
version: 1.0
|
||||
@@ -298,7 +298,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Create env file
|
||||
run: |
|
||||
|
||||
2
.github/workflows/release-production.yml
vendored
2
.github/workflows/release-production.yml
vendored
@@ -30,7 +30,7 @@ jobs:
|
||||
prerelease: false
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '22.17.1'
|
||||
node-version: '22.18.0'
|
||||
- run: |
|
||||
cat << 'EOF' > release-notes.txt
|
||||
${{ steps.release-info.outputs.body }}
|
||||
|
||||
8
.github/workflows/test-libvirt.yml
vendored
8
.github/workflows/test-libvirt.yml
vendored
@@ -22,16 +22,16 @@ jobs:
|
||||
working-directory: ./libvirt
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
submodules: recursive
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.13.5"
|
||||
python-version: "3.13.6"
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.1
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
packages: libvirt-dev
|
||||
version: 1.0
|
||||
@@ -44,7 +44,7 @@ jobs:
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10
|
||||
version: 10.14.0
|
||||
run_install: false
|
||||
|
||||
- name: Get pnpm store directory
|
||||
|
||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -76,6 +76,9 @@ typescript
|
||||
# Github actions
|
||||
RELEASE_NOTES.md
|
||||
|
||||
# Test backups
|
||||
api/dev/configs/api.json.backup
|
||||
|
||||
# Docker Deploy Folder
|
||||
deploy/*
|
||||
!deploy/.gitkeep
|
||||
@@ -112,3 +115,6 @@ api/dev/Unraid.net/myservers.cfg
|
||||
|
||||
# Claude local settings
|
||||
.claude/settings.local.json
|
||||
|
||||
# local Mise settings
|
||||
.mise.toml
|
||||
|
||||
@@ -1 +1 @@
|
||||
{".":"4.11.0"}
|
||||
{".":"4.15.0"}
|
||||
|
||||
@@ -120,6 +120,13 @@ Enables GraphQL playground at `http://tower.local/graphql`
|
||||
|
||||
### Testing Guidelines
|
||||
|
||||
#### General Testing Best Practices
|
||||
|
||||
- **Error Testing:** Use `.rejects.toThrow()` without arguments to test that functions throw errors. Don't test exact error message strings unless the message format is specifically what you're testing
|
||||
- **Focus on Behavior:** Test what the code does, not implementation details like exact error message wording
|
||||
- **Avoid Brittleness:** Don't write tests that break when minor changes are made to error messages, log formats, or other non-essential details
|
||||
- **Use Mocks Correctly**: Mocks should be used as nouns, not verbs.
|
||||
|
||||
#### Vue Component Testing
|
||||
|
||||
- This is a Nuxt.js app but we are testing with vitest outside of the Nuxt environment
|
||||
|
||||
10
api/.depcheckrc
Normal file
10
api/.depcheckrc
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"parsers": {
|
||||
"**/*.ts": [
|
||||
"@depcheck/parser-typescript",
|
||||
{
|
||||
"project": "tsconfig.json"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -17,6 +17,7 @@ PATHS_RCLONE_SOCKET=./dev/rclone-socket
|
||||
PATHS_LOG_BASE=./dev/log # Where we store logs
|
||||
PATHS_LOGS_FILE=./dev/log/graphql-api.log
|
||||
PATHS_CONNECT_STATUS_FILE_PATH=./dev/connectStatus.json # Connect plugin status file
|
||||
PATHS_OIDC_JSON=./dev/configs/oidc.local.json
|
||||
ENVIRONMENT="development"
|
||||
NODE_ENV="development"
|
||||
PORT="3001"
|
||||
|
||||
9
api/.gitignore
vendored
9
api/.gitignore
vendored
@@ -82,3 +82,12 @@ deploy/*
|
||||
.idea
|
||||
|
||||
!**/*.login.*
|
||||
|
||||
# local api configs - don't need project-wide tracking
|
||||
dev/connectStatus.json
|
||||
dev/configs/*
|
||||
# local status - doesn't need to be tracked
|
||||
dev/connectStatus.json
|
||||
|
||||
# local OIDC config for testing - contains secrets
|
||||
dev/configs/oidc.local.json
|
||||
|
||||
@@ -1,5 +1,72 @@
|
||||
# Changelog
|
||||
|
||||
## [4.15.0](https://github.com/unraid/api/compare/v4.14.0...v4.15.0) (2025-08-20)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **api:** restructure versioning information in GraphQL schema ([#1600](https://github.com/unraid/api/issues/1600)) ([d0c6602](https://github.com/unraid/api/commit/d0c66020e1d1d5b6fcbc4ee8979bba4b3d34c7ad))
|
||||
|
||||
## [4.14.0](https://github.com/unraid/api/compare/v4.13.1...v4.14.0) (2025-08-19)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **api:** add cpu utilization query and subscription ([#1590](https://github.com/unraid/api/issues/1590)) ([2b4c2a2](https://github.com/unraid/api/commit/2b4c2a264bb2769f88c3000d16447889cae57e98))
|
||||
* enhance OIDC claim evaluation with array handling ([#1596](https://github.com/unraid/api/issues/1596)) ([b7798b8](https://github.com/unraid/api/commit/b7798b82f44aae9a428261270fd9dbde35ff7751))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* remove unraid-api sso users & always apply sso modification on < 7.2 ([#1595](https://github.com/unraid/api/issues/1595)) ([4262830](https://github.com/unraid/api/commit/426283011afd41e3af7e48cfbb2a2d351c014bd1))
|
||||
* update Docusaurus PR workflow to process and copy API docs ([3a10871](https://github.com/unraid/api/commit/3a10871918fe392a1974b69d16a135546166e058))
|
||||
* update OIDC provider setup documentation for navigation clarity ([1a01696](https://github.com/unraid/api/commit/1a01696dc7b947abf5f2f097de1b231d5593c2ff))
|
||||
* update OIDC provider setup documentation for redirect URI and screenshots ([1bc5251](https://github.com/unraid/api/commit/1bc52513109436b3ce8237c3796af765e208f9fc))
|
||||
|
||||
## [4.13.1](https://github.com/unraid/api/compare/v4.13.0...v4.13.1) (2025-08-15)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* insecure routes not working for SSO ([#1587](https://github.com/unraid/api/issues/1587)) ([a4ff3c4](https://github.com/unraid/api/commit/a4ff3c40926915f6989ed4af679b30cf295ea15d))
|
||||
|
||||
## [4.13.0](https://github.com/unraid/api/compare/v4.12.0...v4.13.0) (2025-08-15)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* `createDockerFolder` & `setDockerFolderChildren` mutations ([#1558](https://github.com/unraid/api/issues/1558)) ([557b03f](https://github.com/unraid/api/commit/557b03f8829d3f179b5e26162fa250121cb33420))
|
||||
* `deleteDockerEntries` mutation ([#1564](https://github.com/unraid/api/issues/1564)) ([78997a0](https://github.com/unraid/api/commit/78997a02c6d96ec0ed75352dfc9849524147428c))
|
||||
* add `moveDockerEntriesToFolder` mutation ([#1569](https://github.com/unraid/api/issues/1569)) ([20c2d5b](https://github.com/unraid/api/commit/20c2d5b4457ad50d1e287fb3141aa98e8e7de665))
|
||||
* add docker -> organizer query ([#1555](https://github.com/unraid/api/issues/1555)) ([dfe352d](https://github.com/unraid/api/commit/dfe352dfa1bd6aa059cab56357ba6bff5e8ed7cb))
|
||||
* connect settings page updated for responsive webgui ([#1585](https://github.com/unraid/api/issues/1585)) ([96c120f](https://github.com/unraid/api/commit/96c120f9b24d3c91df5e9401917c8994eef36c46))
|
||||
* implement OIDC provider management in GraphQL API ([#1563](https://github.com/unraid/api/issues/1563)) ([979a267](https://github.com/unraid/api/commit/979a267bc5e128a8b789f0123e23c61860ebb11b))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* change config file loading error log to debug ([#1565](https://github.com/unraid/api/issues/1565)) ([3534d6f](https://github.com/unraid/api/commit/3534d6fdd7c59e65615167cfe306deebad9ca4d3))
|
||||
* **connect:** remove unraid-api folder before creating symlink ([#1556](https://github.com/unraid/api/issues/1556)) ([514a0ef](https://github.com/unraid/api/commit/514a0ef560a90595f774b6c0db60f1d2b4cd853c))
|
||||
* **deps:** pin dependencies ([#1586](https://github.com/unraid/api/issues/1586)) ([5721785](https://github.com/unraid/api/commit/57217852a337ead4c8c8e7596d1b7d590b64a26f))
|
||||
* **deps:** update all non-major dependencies ([#1543](https://github.com/unraid/api/issues/1543)) ([18b5209](https://github.com/unraid/api/commit/18b52090874c0ba86878d0f7e31bf0dc42734d75))
|
||||
* **deps:** update all non-major dependencies ([#1579](https://github.com/unraid/api/issues/1579)) ([ad6aa3b](https://github.com/unraid/api/commit/ad6aa3b6743aeeb42eff34d1c89ad874dfd0af09))
|
||||
* refactor API client to support Unix socket connections ([#1575](https://github.com/unraid/api/issues/1575)) ([a2c5d24](https://github.com/unraid/api/commit/a2c5d2495ffc02efa1ec5c63f0a1c5d23c9ed7ff))
|
||||
* **theme:** API key white text on white background ([#1584](https://github.com/unraid/api/issues/1584)) ([b321687](https://github.com/unraid/api/commit/b3216874faae208cdfc3edec719629fce428b6a3))
|
||||
|
||||
## [4.12.0](https://github.com/unraid/api/compare/v4.11.0...v4.12.0) (2025-07-30)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add ups monitoring to graphql api ([#1526](https://github.com/unraid/api/issues/1526)) ([6ea94f0](https://github.com/unraid/api/commit/6ea94f061d5b2e6c6fbfa6949006960501e3f4e7))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* enhance plugin management with interactive removal prompts ([#1549](https://github.com/unraid/api/issues/1549)) ([23ef760](https://github.com/unraid/api/commit/23ef760d763c525a38108048200fa73fc8531aed))
|
||||
* remove connect api plugin upon removal of Connect Unraid plugin ([#1548](https://github.com/unraid/api/issues/1548)) ([782d5eb](https://github.com/unraid/api/commit/782d5ebadc67854298f3b2355255983024d2a225))
|
||||
* SSO not being detected ([#1546](https://github.com/unraid/api/issues/1546)) ([6b3b951](https://github.com/unraid/api/commit/6b3b951d8288cd31d096252be544537dc2bfce50))
|
||||
|
||||
## [4.11.0](https://github.com/unraid/api/compare/v4.10.0...v4.11.0) (2025-07-28)
|
||||
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
###########################################################
|
||||
# Development/Build Image
|
||||
###########################################################
|
||||
FROM node:22.17.1-bookworm-slim AS development
|
||||
FROM node:22.18.0-bookworm-slim AS development
|
||||
|
||||
# Install build tools and dependencies
|
||||
RUN apt-get update -y && apt-get install -y \
|
||||
|
||||
34
api/dev/configs/README.md
Normal file
34
api/dev/configs/README.md
Normal file
@@ -0,0 +1,34 @@
|
||||
# Development Configuration Files
|
||||
|
||||
This directory contains configuration files for local development.
|
||||
|
||||
## OIDC Configuration
|
||||
|
||||
### oidc.json
|
||||
The default OIDC configuration file. This file is committed to git and should only contain non-sensitive test configurations.
|
||||
|
||||
### Using a Local Configuration (gitignored)
|
||||
For local testing with real OAuth providers:
|
||||
|
||||
1. Create an `oidc.local.json` file based on `oidc.json`
|
||||
2. Set the environment variable: `PATHS_OIDC_JSON=./dev/configs/oidc.local.json`
|
||||
3. The API will load your local configuration instead of the default
|
||||
|
||||
Example:
|
||||
```bash
|
||||
PATHS_OIDC_JSON=./dev/configs/oidc.local.json pnpm dev
|
||||
```
|
||||
|
||||
### Setting up OAuth Apps
|
||||
|
||||
#### Google
|
||||
1. Go to [Google Cloud Console](https://console.cloud.google.com/)
|
||||
2. Create a new project or select existing
|
||||
3. Enable Google+ API
|
||||
4. Create OAuth 2.0 credentials
|
||||
5. Add authorized redirect URI: `http://localhost:3000/graphql/api/auth/oidc/callback`
|
||||
|
||||
#### GitHub
|
||||
1. Go to GitHub Settings > Developer settings > OAuth Apps
|
||||
2. Create a new OAuth App
|
||||
3. Set Authorization callback URL: `http://localhost:3000/graphql/api/auth/oidc/callback`
|
||||
@@ -1,7 +1,9 @@
|
||||
{
|
||||
"version": "4.11.0",
|
||||
"version": "4.14.0",
|
||||
"extraOrigins": [],
|
||||
"sandbox": false,
|
||||
"sandbox": true,
|
||||
"ssoSubIds": [],
|
||||
"plugins": []
|
||||
"plugins": [
|
||||
"unraid-api-plugin-connect"
|
||||
]
|
||||
}
|
||||
@@ -2,11 +2,11 @@
|
||||
"wanaccess": true,
|
||||
"wanport": 8443,
|
||||
"upnpEnabled": false,
|
||||
"apikey": "_______________________BIG_API_KEY_HERE_________________________",
|
||||
"apikey": "",
|
||||
"localApiKey": "_______________________LOCAL_API_KEY_HERE_________________________",
|
||||
"email": "test@example.com",
|
||||
"username": "zspearmint",
|
||||
"avatar": "https://via.placeholder.com/200",
|
||||
"regWizTime": "1611175408732_0951-1653-3509-FBA155FA23C0",
|
||||
"dynamicRemoteAccessType": "DISABLED"
|
||||
"dynamicRemoteAccessType": "STATIC"
|
||||
}
|
||||
21
api/dev/configs/oidc.json
Normal file
21
api/dev/configs/oidc.json
Normal file
@@ -0,0 +1,21 @@
|
||||
{
|
||||
"providers": [
|
||||
{
|
||||
"id": "unraid.net",
|
||||
"name": "Unraid.net",
|
||||
"clientId": "CONNECT_SERVER_SSO",
|
||||
"issuer": "https://account.unraid.net",
|
||||
"authorizationEndpoint": "https://account.unraid.net/sso/",
|
||||
"tokenEndpoint": "https://account.unraid.net/api/oauth2/token",
|
||||
"scopes": [
|
||||
"openid",
|
||||
"profile",
|
||||
"email"
|
||||
],
|
||||
"authorizedSubIds": [
|
||||
"297294e2-b31c-4bcc-a441-88aee0ad609f"
|
||||
],
|
||||
"buttonText": "Login With Unraid.net"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1,5 +1,17 @@
|
||||
---
|
||||
title: CLI Reference
|
||||
description: Complete reference for all Unraid API CLI commands
|
||||
sidebar_position: 4
|
||||
---
|
||||
|
||||
# CLI Commands
|
||||
|
||||
:::info[Command Structure]
|
||||
All commands follow the pattern: `unraid-api <command> [options]`
|
||||
:::
|
||||
|
||||
## 🚀 Service Management
|
||||
|
||||
### Start
|
||||
|
||||
```bash
|
||||
@@ -39,7 +51,7 @@ View the API logs.
|
||||
|
||||
- `-l, --lines`: Optional. Number of lines to tail (default: 100)
|
||||
|
||||
## Configuration Commands
|
||||
## ⚙️ Configuration Commands
|
||||
|
||||
### Config
|
||||
|
||||
@@ -61,6 +73,10 @@ Switch between production and staging environments.
|
||||
|
||||
### Developer Mode
|
||||
|
||||
:::tip Web GUI Management
|
||||
You can also manage developer options through the web interface at **Settings** → **Management Access** → **Developer Options**
|
||||
:::
|
||||
|
||||
```bash
|
||||
unraid-api developer # Interactive prompt for tools
|
||||
unraid-api developer --sandbox true # Enable GraphQL sandbox
|
||||
@@ -76,13 +92,17 @@ Configure developer features for the API:
|
||||
|
||||
## API Key Management
|
||||
|
||||
:::tip Web GUI Management
|
||||
You can also manage API keys through the web interface at **Settings** → **Management Access** → **API Keys**
|
||||
:::
|
||||
|
||||
### API Key Commands
|
||||
|
||||
```bash
|
||||
unraid-api apikey [options]
|
||||
```
|
||||
|
||||
Create and manage API keys.
|
||||
Create and manage API keys via CLI.
|
||||
|
||||
Options:
|
||||
|
||||
@@ -94,6 +114,10 @@ Options:
|
||||
|
||||
## SSO (Single Sign-On) Management
|
||||
|
||||
:::info OIDC Configuration
|
||||
For OIDC/SSO provider configuration, see the web interface at **Settings** → **Management Access** → **API** → **OIDC** or refer to the [OIDC Provider Setup](./oidc-provider-setup.md) guide.
|
||||
:::
|
||||
|
||||
### SSO Base Command
|
||||
|
||||
```bash
|
||||
|
||||
@@ -1,39 +1,75 @@
|
||||
---
|
||||
title: Using the Unraid API
|
||||
description: Learn how to interact with your Unraid server through the GraphQL API
|
||||
sidebar_position: 2
|
||||
---
|
||||
|
||||
# Using the Unraid API
|
||||
|
||||
:::tip[Quick Start]
|
||||
The Unraid API provides a powerful GraphQL interface for managing your server. This guide covers authentication, common queries, and best practices.
|
||||
:::
|
||||
|
||||
The Unraid API provides a GraphQL interface that allows you to interact with your Unraid server. This guide will help you get started with exploring and using the API.
|
||||
|
||||
## Enabling the GraphQL Sandbox
|
||||
## 🎮 Enabling the GraphQL Sandbox
|
||||
|
||||
1. Enable developer mode using the CLI:
|
||||
### Web GUI Method (Recommended)
|
||||
|
||||
```bash
|
||||
unraid-api developer --sandbox true
|
||||
```
|
||||
|
||||
Or use the interactive mode:
|
||||
|
||||
```bash
|
||||
unraid-api developer
|
||||
```
|
||||
|
||||
2. Once enabled, you can access the Apollo Sandbox interface
|
||||
:::info[Preferred Method]
|
||||
Using the Web GUI is the easiest way to enable the GraphQL sandbox.
|
||||
:::
|
||||
|
||||
1. Navigate to **Settings** → **Management Access** → **Developer Options**
|
||||
2. Enable the **GraphQL Sandbox** toggle
|
||||
3. Access the GraphQL playground by navigating to:
|
||||
|
||||
```txt
|
||||
http://YOUR_SERVER_IP/graphql
|
||||
```
|
||||
|
||||
## Authentication
|
||||
### CLI Method
|
||||
|
||||
Most queries and mutations require authentication. You can authenticate using either:
|
||||
Alternatively, you can enable developer mode using the CLI:
|
||||
|
||||
1. API Keys
|
||||
2. Cookies (default method when signed into the WebGUI)
|
||||
```bash
|
||||
unraid-api developer --sandbox true
|
||||
```
|
||||
|
||||
### Creating an API Key
|
||||
Or use the interactive mode:
|
||||
|
||||
Use the CLI to create an API key:
|
||||
```bash
|
||||
unraid-api developer
|
||||
```
|
||||
|
||||
## 🔑 Authentication
|
||||
|
||||
:::warning[Required for Most Operations]
|
||||
Most queries and mutations require authentication. Always include appropriate credentials in your requests.
|
||||
:::
|
||||
|
||||
You can authenticate using:
|
||||
|
||||
1. **API Keys** - For programmatic access
|
||||
2. **Cookies** - Automatic when signed into the WebGUI
|
||||
3. **SSO/OIDC** - When configured with external providers
|
||||
|
||||
### Managing API Keys
|
||||
|
||||
<tabs>
|
||||
<tabItem value="gui" label="Web GUI (Recommended)" default>
|
||||
|
||||
Navigate to **Settings** → **Management Access** → **API Keys** in your Unraid web interface to:
|
||||
|
||||
- View existing API keys
|
||||
- Create new API keys
|
||||
- Manage permissions and roles
|
||||
- Revoke or regenerate keys
|
||||
|
||||
</tabItem>
|
||||
<tabItem value="cli" label="CLI Method">
|
||||
|
||||
You can also use the CLI to create an API key:
|
||||
|
||||
```bash
|
||||
unraid-api apikey --create
|
||||
@@ -46,6 +82,11 @@ Follow the prompts to set:
|
||||
- Roles
|
||||
- Permissions
|
||||
|
||||
</tabItem>
|
||||
</tabs>
|
||||
|
||||
### Using API Keys
|
||||
|
||||
The generated API key should be included in your GraphQL requests as a header:
|
||||
|
||||
```json
|
||||
@@ -54,7 +95,7 @@ The generated API key should be included in your GraphQL requests as a header:
|
||||
}
|
||||
```
|
||||
|
||||
## Available Schemas
|
||||
## 📊 Available Schemas
|
||||
|
||||
The API provides access to various aspects of your Unraid server:
|
||||
|
||||
@@ -83,9 +124,9 @@ The API provides access to various aspects of your Unraid server:
|
||||
- Handle SSO configuration
|
||||
- Manage allowed origins
|
||||
|
||||
### Example Queries
|
||||
### 💻 Example Queries
|
||||
|
||||
1. Check System Status:
|
||||
#### Check System Status
|
||||
|
||||
```graphql
|
||||
query {
|
||||
@@ -106,7 +147,7 @@ query {
|
||||
}
|
||||
```
|
||||
|
||||
2. Monitor Array Status:
|
||||
#### Monitor Array Status
|
||||
|
||||
```graphql
|
||||
query {
|
||||
@@ -129,7 +170,7 @@ query {
|
||||
}
|
||||
```
|
||||
|
||||
3. List Docker Containers:
|
||||
#### List Docker Containers
|
||||
|
||||
```graphql
|
||||
query {
|
||||
@@ -143,7 +184,7 @@ query {
|
||||
}
|
||||
```
|
||||
|
||||
## Schema Types
|
||||
## 🏗️ Schema Types
|
||||
|
||||
The API includes several core types:
|
||||
|
||||
@@ -170,19 +211,23 @@ Available roles:
|
||||
- `connect`: Remote access features
|
||||
- `guest`: Limited read access
|
||||
|
||||
## Best Practices
|
||||
## ✨ Best Practices
|
||||
|
||||
:::tip[Pro Tips]
|
||||
1. Use the Apollo Sandbox to explore the schema and test queries
|
||||
2. Start with small queries and gradually add fields as needed
|
||||
3. Monitor your query complexity to maintain performance
|
||||
4. Use appropriate roles and permissions for your API keys
|
||||
5. Keep your API keys secure and rotate them periodically
|
||||
:::
|
||||
|
||||
## Rate Limiting
|
||||
## ⏱️ Rate Limiting
|
||||
|
||||
:::caution[Rate Limits]
|
||||
The API implements rate limiting to prevent abuse. Ensure your applications handle rate limit responses appropriately.
|
||||
:::
|
||||
|
||||
## Error Handling
|
||||
## 🚨 Error Handling
|
||||
|
||||
The API returns standard GraphQL errors in the following format:
|
||||
|
||||
@@ -198,11 +243,13 @@ The API returns standard GraphQL errors in the following format:
|
||||
}
|
||||
```
|
||||
|
||||
## Additional Resources
|
||||
## 📚 Additional Resources
|
||||
|
||||
:::info[Learn More]
|
||||
- Use the Apollo Sandbox's schema explorer to browse all available types and fields
|
||||
- Check the documentation tab in Apollo Sandbox for detailed field descriptions
|
||||
- Monitor the API's health using `unraid-api status`
|
||||
- Generate reports using `unraid-api report` for troubleshooting
|
||||
|
||||
For more information about specific commands and configuration options, refer to the CLI documentation or run `unraid-api --help`.
|
||||
For more information about specific commands and configuration options, refer to the [CLI documentation](/cli) or run `unraid-api --help`.
|
||||
:::
|
||||
|
||||
BIN
api/docs/public/images/advanced-rules.png
Normal file
BIN
api/docs/public/images/advanced-rules.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 101 KiB |
BIN
api/docs/public/images/button-customization.png
Normal file
BIN
api/docs/public/images/button-customization.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 96 KiB |
BIN
api/docs/public/images/configured-provider.png
Normal file
BIN
api/docs/public/images/configured-provider.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 85 KiB |
BIN
api/docs/public/images/default-unraid-provider.png
Normal file
BIN
api/docs/public/images/default-unraid-provider.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 128 KiB |
BIN
api/docs/public/images/sso-with-options.png
Normal file
BIN
api/docs/public/images/sso-with-options.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 75 KiB |
@@ -1,37 +1,94 @@
|
||||
# Unraid API
|
||||
---
|
||||
title: Welcome to Unraid API
|
||||
description: The official GraphQL API for Unraid Server management and automation
|
||||
sidebar_position: 1
|
||||
---
|
||||
|
||||
# Welcome to Unraid API
|
||||
|
||||
:::tip[What's New]
|
||||
Starting with Unraid OS v7.2, the API comes built into the operating system - no plugin installation required!
|
||||
:::
|
||||
|
||||
The Unraid API provides a GraphQL interface for programmatic interaction with your Unraid server. It enables automation, monitoring, and integration capabilities.
|
||||
|
||||
## Current Availability
|
||||
## 📦 Availability
|
||||
|
||||
The API is available through the Unraid Connect Plugin:
|
||||
### ✨ Native Integration (Unraid OS v7.2+)
|
||||
|
||||
1. Install Unraid Connect Plugin from Apps
|
||||
Starting with Unraid OS v7.2, the API is integrated directly into the operating system:
|
||||
|
||||
- No plugin installation required
|
||||
- Automatically available on system startup
|
||||
- Deep system integration
|
||||
- Access through **Settings** → **Management Access** → **API**
|
||||
|
||||
### 🔌 Plugin Installation (Pre-7.2 and Advanced Users)
|
||||
|
||||
For Unraid versions prior to v7.2 or to access newer API features:
|
||||
|
||||
1. Install the Unraid Connect Plugin from Community Apps
|
||||
2. [Configure the plugin](./how-to-use-the-api.md#enabling-the-graphql-sandbox)
|
||||
3. Access API functionality through the [GraphQL Sandbox](./how-to-use-the-api.md#accessing-the-graphql-sandbox)
|
||||
3. Access API functionality through the [GraphQL Sandbox](./how-to-use-the-api.md)
|
||||
|
||||
## Future Availability
|
||||
:::info Important Notes
|
||||
- The Unraid Connect plugin provides the API for pre-7.2 versions
|
||||
- You do NOT need to sign in to Unraid Connect to use the API locally
|
||||
- Installing the plugin on 7.2+ gives you access to newer API features before they're included in OS releases
|
||||
:::
|
||||
|
||||
The API will be integrated directly into the Unraid operating system in an upcoming OS release. This integration will:
|
||||
## 📚 Documentation Sections
|
||||
|
||||
- Make the API a core part of the Unraid system
|
||||
- Remove the need for separate plugin installation
|
||||
- Enable deeper system integration capabilities
|
||||
<cards>
|
||||
<card title="CLI Commands" icon="terminal" href="./cli">
|
||||
Complete reference for all CLI commands
|
||||
</card>
|
||||
<card title="Using the API" icon="code" href="./how-to-use-the-api">
|
||||
Learn how to interact with the GraphQL API
|
||||
</card>
|
||||
<card title="OIDC Setup" icon="shield" href="./oidc-provider-setup">
|
||||
Configure SSO authentication providers
|
||||
</card>
|
||||
<card title="Upcoming Features" icon="rocket" href="./upcoming-features">
|
||||
See what's coming next
|
||||
</card>
|
||||
</cards>
|
||||
|
||||
## Documentation Sections
|
||||
|
||||
- [CLI Commands](./cli.md) - Reference for all available command-line interface commands
|
||||
- [Using the Unraid API](./how-to-use-the-api.md) - Comprehensive guide on using the GraphQL API
|
||||
- [Upcoming Features](./upcoming-features.md) - Roadmap of planned features and improvements
|
||||
|
||||
## Key Features
|
||||
## 🌟 Key Features
|
||||
|
||||
:::info[Core Capabilities]
|
||||
The API provides:
|
||||
|
||||
- GraphQL Interface: Modern, flexible API with strong typing
|
||||
- Authentication: Secure access via API keys or session cookies
|
||||
- Comprehensive Coverage: Access to system information, array management, and Docker operations
|
||||
- Developer Tools: Built-in GraphQL sandbox for testing
|
||||
- Role-Based Access: Granular permission control
|
||||
- **GraphQL Interface**: Modern, flexible API with strong typing
|
||||
- **Authentication**: Multiple methods including API keys, session cookies, and SSO/OIDC
|
||||
- **Comprehensive Coverage**: Access to system information, array management, and Docker operations
|
||||
- **Developer Tools**: Built-in GraphQL sandbox configurable via web interface or CLI
|
||||
- **Role-Based Access**: Granular permission control
|
||||
- **Web Management**: Manage API keys and settings through the web interface
|
||||
:::
|
||||
|
||||
For detailed usage instructions, see [CLI Commands](./cli.md).
|
||||
## 🚀 Get Started
|
||||
|
||||
<tabs>
|
||||
<tabItem value="v72" label="Unraid OS v7.2+" default>
|
||||
|
||||
1. The API is already installed and running
|
||||
2. Access settings at **Settings** → **Management Access** → **API**
|
||||
3. Enable the GraphQL Sandbox for development
|
||||
4. Create your first API key
|
||||
5. Start making GraphQL queries!
|
||||
|
||||
</tabItem>
|
||||
<tabItem value="older" label="Pre-7.2 Versions">
|
||||
|
||||
1. Install the Unraid Connect plugin from Community Apps
|
||||
2. No Unraid Connect login required for local API access
|
||||
3. Configure the plugin settings
|
||||
4. Enable the GraphQL Sandbox
|
||||
5. Start exploring the API!
|
||||
|
||||
</tabItem>
|
||||
</tabs>
|
||||
|
||||
For detailed usage instructions, see the [CLI Commands](./cli) reference.
|
||||
|
||||
420
api/docs/public/oidc-provider-setup.md
Normal file
420
api/docs/public/oidc-provider-setup.md
Normal file
@@ -0,0 +1,420 @@
|
||||
---
|
||||
title: OIDC Provider Setup
|
||||
description: Configure OIDC (OpenID Connect) providers for SSO authentication in Unraid API
|
||||
sidebar_position: 3
|
||||
---
|
||||
|
||||
# OIDC Provider Setup
|
||||
|
||||
:::info[What is OIDC?]
|
||||
OpenID Connect (OIDC) is an authentication protocol that allows users to sign in using their existing accounts from providers like Google, Microsoft, or your corporate identity provider. It enables Single Sign-On (SSO) for seamless and secure authentication.
|
||||
:::
|
||||
|
||||
This guide walks you through configuring OIDC (OpenID Connect) providers for SSO authentication in the Unraid API using the web interface.
|
||||
|
||||
## 🚀 Quick Start
|
||||
|
||||
<details open>
|
||||
<summary><strong>Getting to OIDC Settings</strong></summary>
|
||||
|
||||
1. Navigate to your Unraid server's web interface
|
||||
2. Go to **Settings** → **Management Access** → **API** → **OIDC**
|
||||
3. You'll see tabs for different providers - click the **+** button to add a new provider
|
||||
|
||||
</details>
|
||||
|
||||
### OIDC Providers Interface Overview
|
||||
|
||||

|
||||
*Login page showing traditional login form with SSO options - "Login With Unraid.net" and "Sign in with Google" buttons*
|
||||
|
||||
The interface includes:
|
||||
|
||||
- **Provider tabs**: Each configured provider (Unraid.net, Google, etc.) appears as a tab
|
||||
- **Add Provider button**: Click the **+** button to add new providers
|
||||
- **Authorization Mode dropdown**: Toggle between "simple" and "advanced" modes
|
||||
- **Simple Authorization section**: Configure allowed email domains and specific addresses
|
||||
- **Add Item buttons**: Click to add multiple authorization rules
|
||||
|
||||
## Understanding Authorization Modes
|
||||
|
||||
The interface provides two authorization modes:
|
||||
|
||||
### Simple Mode (Recommended)
|
||||
|
||||
Simple mode is the easiest way to configure authorization. You can:
|
||||
|
||||
- Allow specific email domains (e.g., @company.com)
|
||||
- Allow specific email addresses
|
||||
- Configure who can access your Unraid server with minimal setup
|
||||
|
||||
**When to use Simple Mode:**
|
||||
|
||||
- You want to allow all users from your company domain
|
||||
- You have a small list of specific users
|
||||
- You're new to OIDC configuration
|
||||
|
||||
<details>
|
||||
<summary><strong>Advanced Mode</strong></summary>
|
||||
|
||||
Advanced mode provides granular control using claim-based rules. You can:
|
||||
|
||||
- Create complex authorization rules based on JWT claims
|
||||
- Use operators like equals, contains, endsWith, startsWith
|
||||
- Combine multiple conditions with OR/AND logic
|
||||
- Choose whether ANY rule must pass (OR mode) or ALL rules must pass (AND mode)
|
||||
|
||||
**When to use Advanced Mode:**
|
||||
|
||||
- You need to check group memberships
|
||||
- You want to verify multiple claims (e.g., email domain AND verified status)
|
||||
- You have complex authorization requirements
|
||||
- You need fine-grained control over how rules are evaluated
|
||||
|
||||
</details>
|
||||
|
||||
## Authorization Rules
|
||||
|
||||

|
||||
*Advanced authorization rules showing JWT claim configuration with email endsWith operator for domain-based access control*
|
||||
|
||||
### Simple Mode Examples
|
||||
|
||||
#### Allow Company Domain
|
||||
|
||||
In Simple Authorization:
|
||||
|
||||
- **Allowed Email Domains**: Enter `company.com`
|
||||
- This allows anyone with @company.com email
|
||||
|
||||
#### Allow Specific Users
|
||||
|
||||
- **Specific Email Addresses**: Add individual emails
|
||||
- Click **Add Item** to add multiple addresses
|
||||
|
||||
<details>
|
||||
<summary><strong>Advanced Mode Examples</strong></summary>
|
||||
|
||||
#### Authorization Rule Mode
|
||||
|
||||
When using multiple rules, you can choose how they're evaluated:
|
||||
|
||||
- **OR Mode** (default): User is authorized if ANY rule passes
|
||||
- **AND Mode**: User is authorized only if ALL rules pass
|
||||
|
||||
#### Email Domain with Verification (AND Mode)
|
||||
|
||||
To require both email domain AND verification:
|
||||
|
||||
1. Set **Authorization Rule Mode** to `AND`
|
||||
2. Add two rules:
|
||||
- Rule 1:
|
||||
- **Claim**: `email`
|
||||
- **Operator**: `endsWith`
|
||||
- **Value**: `@company.com`
|
||||
- Rule 2:
|
||||
- **Claim**: `email_verified`
|
||||
- **Operator**: `equals`
|
||||
- **Value**: `true`
|
||||
|
||||
This ensures users must have both a company email AND a verified email address.
|
||||
|
||||
#### Group-Based Access (OR Mode)
|
||||
|
||||
To allow access to multiple groups:
|
||||
|
||||
1. Set **Authorization Rule Mode** to `OR` (default)
|
||||
2. Add rules for each group:
|
||||
- **Claim**: `groups`
|
||||
- **Operator**: `contains`
|
||||
- **Value**: `admins`
|
||||
|
||||
Or add another rule:
|
||||
- **Claim**: `groups`
|
||||
- **Operator**: `contains`
|
||||
- **Value**: `developers`
|
||||
|
||||
Users in either `admins` OR `developers` group will be authorized.
|
||||
|
||||
#### Multiple Domains
|
||||
|
||||
- **Claim**: `email`
|
||||
- **Operator**: `endsWith`
|
||||
- **Values**: Add multiple domains (e.g., `company.com`, `subsidiary.com`)
|
||||
|
||||
#### Complex Authorization (AND Mode)
|
||||
|
||||
For strict security requiring multiple conditions:
|
||||
|
||||
1. Set **Authorization Rule Mode** to `AND`
|
||||
2. Add multiple rules that ALL must pass:
|
||||
- Email must be from company domain
|
||||
- Email must be verified
|
||||
- User must be in specific group
|
||||
- Account must have 2FA enabled (if claim available)
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Configuration Interface Details</strong></summary>
|
||||
|
||||
### Provider Tabs
|
||||
|
||||
- Each configured provider appears as a tab at the top
|
||||
- Click a tab to switch between provider configurations
|
||||
- The **+** button on the right adds a new provider
|
||||
|
||||
### Authorization Mode Dropdown
|
||||
|
||||
- **simple**: Best for email-based authorization (recommended for most users)
|
||||
- **advanced**: For complex claim-based rules using JWT claims
|
||||
|
||||
### Simple Authorization Fields
|
||||
|
||||
When "simple" mode is selected, you'll see:
|
||||
|
||||
- **Allowed Email Domains**: Enter domains without @ (e.g., `company.com`)
|
||||
- Helper text: "Users with emails ending in these domains can login"
|
||||
- **Specific Email Addresses**: Add individual email addresses
|
||||
- Helper text: "Only these exact email addresses can login"
|
||||
- **Add Item** buttons to add multiple entries
|
||||
|
||||
### Advanced Authorization Fields
|
||||
|
||||
When "advanced" mode is selected, you'll see:
|
||||
|
||||
- **Authorization Rule Mode**: Choose `OR` (any rule passes) or `AND` (all rules must pass)
|
||||
- **Authorization Rules**: Add multiple claim-based rules
|
||||
- **For each rule**:
|
||||
- **Claim**: The JWT claim to check
|
||||
- **Operator**: How to compare (equals, contains, endsWith, startsWith)
|
||||
- **Value**: What to match against
|
||||
|
||||
### Additional Interface Elements
|
||||
|
||||
- **Enable Developer Sandbox**: Toggle to enable GraphQL sandbox at `/graphql`
|
||||
- The interface uses a dark theme for better visibility
|
||||
- Field validation indicators help ensure correct configuration
|
||||
|
||||
</details>
|
||||
|
||||
### Required Redirect URI
|
||||
|
||||
:::caution[Important Configuration]
|
||||
All providers must be configured with this exact redirect URI format:
|
||||
:::
|
||||
|
||||
```bash
|
||||
http://YOUR_UNRAID_IP/graphql/api/auth/oidc/callback
|
||||
```
|
||||
|
||||
:::tip
|
||||
Replace `YOUR_UNRAID_IP` with your actual server IP address (e.g., `192.168.1.100` or `tower.local`).
|
||||
:::
|
||||
|
||||
### Issuer URL Format
|
||||
|
||||
The **Issuer URL** field accepts both formats, but **base URL is strongly recommended** for security:
|
||||
|
||||
- **Base URL** (recommended): `https://accounts.google.com`
|
||||
- **Full discovery URL**: `https://accounts.google.com/.well-known/openid-configuration`
|
||||
|
||||
**⚠️ Security Note**: Always use the base URL format when possible. The system automatically appends `/.well-known/openid-configuration` for OIDC discovery. Using the full discovery URL directly disables important issuer validation checks and is not recommended by the OpenID Connect specification.
|
||||
|
||||
**Examples of correct base URLs:**
|
||||
- Google: `https://accounts.google.com`
|
||||
- Microsoft/Azure: `https://login.microsoftonline.com/YOUR_TENANT_ID/v2.0`
|
||||
- Keycloak: `https://keycloak.example.com/realms/YOUR_REALM`
|
||||
- Authelia: `https://auth.yourdomain.com`
|
||||
|
||||
## ✅ Testing Your Configuration
|
||||
|
||||

|
||||
*Unraid login page displaying both traditional username/password authentication and SSO options with customized provider buttons*
|
||||
|
||||
1. Save your provider configuration
|
||||
2. Log out (if logged in)
|
||||
3. Navigate to the login page
|
||||
4. Your configured provider button should appear
|
||||
5. Click to test the login flow
|
||||
|
||||
## 🔧 Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
#### "Provider not found" error
|
||||
|
||||
- Ensure the Issuer URL is correct
|
||||
- Check that the provider supports OIDC discovery (/.well-known/openid-configuration)
|
||||
|
||||
#### "Authorization failed"
|
||||
|
||||
- In Simple Mode: Check email domains are entered correctly (without @)
|
||||
- In Advanced Mode:
|
||||
- Verify claim names match exactly what your provider sends
|
||||
- Check if Authorization Rule Mode is set correctly (OR vs AND)
|
||||
- Ensure all required claims are present in the token
|
||||
- Enable debug logging to see actual claims and rule evaluation
|
||||
|
||||
#### "Invalid redirect URI"
|
||||
|
||||
- Ensure the redirect URI in your provider matches exactly
|
||||
- Include the correct port if using a non-standard configuration
|
||||
- Verify the redirect URI protocol matches your server's configuration (HTTP or HTTPS)
|
||||
|
||||
#### Cannot see login button
|
||||
|
||||
- Check that at least one authorization rule is configured
|
||||
- Verify the provider is enabled/saved
|
||||
|
||||
### Debug Mode
|
||||
|
||||
To troubleshoot issues:
|
||||
|
||||
1. Enable debug logging:
|
||||
|
||||
```bash
|
||||
LOG_LEVEL=debug unraid-api start --debug
|
||||
```
|
||||
|
||||
2. Check logs for:
|
||||
|
||||
- Received claims from provider
|
||||
- Authorization rule evaluation
|
||||
- Token validation errors
|
||||
|
||||
## 🔐 Security Best Practices
|
||||
|
||||
1. **Use Simple Mode for authorization** - Prevents overly accepting configurations and reduces misconfiguration risks
|
||||
2. **Be specific with authorization** - Don't use overly broad rules
|
||||
3. **Rotate secrets regularly** - Update client secrets periodically
|
||||
4. **Test thoroughly** - Verify only intended users can access
|
||||
|
||||
## 💡 Need Help?
|
||||
|
||||
- Check provider's OIDC documentation
|
||||
- Review Unraid API logs for detailed error messages
|
||||
- Ensure your provider supports standard OIDC discovery
|
||||
- Verify network connectivity between Unraid and provider
|
||||
|
||||
## 🏢 Provider-Specific Setup
|
||||
|
||||
### Unraid.net Provider
|
||||
|
||||
The Unraid.net provider is built-in and pre-configured. You only need to configure authorization rules in the interface.
|
||||
|
||||
**Configuration:**
|
||||
|
||||
- **Issuer URL**: Pre-configured (built-in provider)
|
||||
- **Client ID/Secret**: Pre-configured (built-in provider)
|
||||
- **Redirect URI**: `http://YOUR_UNRAID_IP/graphql/api/auth/oidc/callback`
|
||||
|
||||
:::tip[Redirect URI Protocol]
|
||||
**Match the protocol to your server setup:** Use `http://` if accessing your Unraid server without SSL/TLS (typical for local network access). Use `https://` if you've configured SSL/TLS on your server. Some OIDC providers (like Google) require HTTPS and won't accept HTTP redirect URIs.
|
||||
:::
|
||||
|
||||
Configure authorization rules using Simple Mode (allowed email domains/addresses) or Advanced Mode for complex requirements.
|
||||
|
||||
### Google
|
||||
|
||||
<details>
|
||||
<summary><strong>📋 Setup Steps</strong></summary>
|
||||
|
||||
Set up OAuth 2.0 credentials in [Google Cloud Console](https://console.cloud.google.com/):
|
||||
|
||||
1. Go to **APIs & Services** → **Credentials**
|
||||
2. Click **Create Credentials** → **OAuth client ID**
|
||||
3. Choose **Web application** as the application type
|
||||
4. Add your redirect URI to **Authorized redirect URIs**
|
||||
5. Configure the OAuth consent screen if prompted
|
||||
|
||||
</details>
|
||||
|
||||
**Configuration:**
|
||||
|
||||
- **Issuer URL**: `https://accounts.google.com`
|
||||
- **Client ID/Secret**: From your OAuth 2.0 client credentials
|
||||
- **Required Scopes**: `openid`, `profile`, `email`
|
||||
- **Redirect URI**: `http://YOUR_UNRAID_IP/graphql/api/auth/oidc/callback`
|
||||
|
||||
:::warning[Google Domain Requirements]
|
||||
**Google requires valid domain names for OAuth redirect URIs.** Local IP addresses and `.local` domains are not accepted. To use Google OAuth with your Unraid server, you'll need:
|
||||
|
||||
- **Option 1: Reverse Proxy** - Set up a reverse proxy (like NGINX Proxy Manager or Traefik) with a valid domain name pointing to your Unraid API
|
||||
- **Option 2: Tailscale** - Use Tailscale to get a valid `*.ts.net` domain that Google will accept
|
||||
- **Option 3: Dynamic DNS** - Use a DDNS service to get a public domain name for your server
|
||||
|
||||
Remember to update your redirect URI in both Google Cloud Console and your Unraid OIDC configuration to use the valid domain.
|
||||
:::
|
||||
|
||||
For Google Workspace domains, use Advanced Mode with the `hd` claim to restrict access to your organization's domain.
|
||||
|
||||
### Authelia
|
||||
|
||||
Configure OIDC client in your Authelia `configuration.yml` with client ID `unraid-api` and generate a hashed secret using the Authelia hash-password command.
|
||||
|
||||
**Configuration:**
|
||||
|
||||
- **Issuer URL**: `https://auth.yourdomain.com`
|
||||
- **Client ID**: `unraid-api` (or as configured in Authelia)
|
||||
- **Client Secret**: Your unhashed secret
|
||||
- **Required Scopes**: `openid`, `profile`, `email`, `groups`
|
||||
- **Redirect URI**: `http://YOUR_UNRAID_IP/graphql/api/auth/oidc/callback`
|
||||
|
||||
Use Advanced Mode with `groups` claim for group-based authorization.
|
||||
|
||||
### Microsoft/Azure AD
|
||||
|
||||
Register a new app in [Azure Portal](https://portal.azure.com/) under Azure Active Directory → App registrations. Note the Application ID, create a client secret, and note your tenant ID.
|
||||
|
||||
**Configuration:**
|
||||
|
||||
- **Issuer URL**: `https://login.microsoftonline.com/YOUR_TENANT_ID/v2.0`
|
||||
- **Client ID**: Your Application (client) ID
|
||||
- **Client Secret**: Generated client secret
|
||||
- **Required Scopes**: `openid`, `profile`, `email`
|
||||
- **Redirect URI**: `http://YOUR_UNRAID_IP/graphql/api/auth/oidc/callback`
|
||||
|
||||
Authorization rules can be configured in the interface using email domains or advanced claims.
|
||||
|
||||
### Keycloak
|
||||
|
||||
Create a new confidential client in Keycloak Admin Console with `openid-connect` protocol and copy the client secret from the Credentials tab.
|
||||
|
||||
**Configuration:**
|
||||
|
||||
- **Issuer URL**: `https://keycloak.example.com/realms/YOUR_REALM`
|
||||
- **Client ID**: `unraid-api` (or as configured in Keycloak)
|
||||
- **Client Secret**: From Keycloak Credentials tab
|
||||
- **Required Scopes**: `openid`, `profile`, `email`
|
||||
- **Redirect URI**: `http://YOUR_UNRAID_IP/graphql/api/auth/oidc/callback`
|
||||
|
||||
For role-based authorization, use Advanced Mode with `realm_access.roles` or `resource_access` claims.
|
||||
|
||||
### Authentik
|
||||
|
||||
Create a new OAuth2/OpenID Provider in Authentik, then create an Application and link it to the provider.
|
||||
|
||||
**Configuration:**
|
||||
|
||||
- **Issuer URL**: `https://authentik.example.com/application/o/<application_slug>/`
|
||||
- **Client ID**: From Authentik provider configuration
|
||||
- **Client Secret**: From Authentik provider configuration
|
||||
- **Required Scopes**: `openid`, `profile`, `email`
|
||||
- **Redirect URI**: `http://YOUR_UNRAID_IP/graphql/api/auth/oidc/callback`
|
||||
|
||||
Authorization rules can be configured in the interface.
|
||||
|
||||
### Okta
|
||||
|
||||
Create a new OIDC Web Application in Okta Admin Console and assign appropriate users or groups.
|
||||
|
||||
**Configuration:**
|
||||
|
||||
- **Issuer URL**: `https://YOUR_DOMAIN.okta.com`
|
||||
- **Client ID**: From Okta application configuration
|
||||
- **Client Secret**: From Okta application configuration
|
||||
- **Required Scopes**: `openid`, `profile`, `email`
|
||||
- **Redirect URI**: `http://YOUR_UNRAID_IP/graphql/api/auth/oidc/callback`
|
||||
|
||||
Authorization rules can be configured in the interface using email domains or advanced claims.
|
||||
@@ -1,71 +1,172 @@
|
||||
# Upcoming Features
|
||||
---
|
||||
title: Roadmap & Features
|
||||
description: Current status and upcoming features for the Unraid API
|
||||
sidebar_position: 10
|
||||
---
|
||||
|
||||
Note: This roadmap outlines planned features and improvements for the Unraid API. Features and timelines may change based on development priorities and community feedback.
|
||||
# Roadmap & Features
|
||||
|
||||
:::info Development Status
|
||||
This roadmap outlines completed and planned features for the Unraid API. Features and timelines may change based on development priorities and community feedback.
|
||||
:::
|
||||
|
||||
## Feature Status Legend
|
||||
|
||||
| Status | Description |
|
||||
|--------|-------------|
|
||||
| ✅ **Done** | Feature is complete and available |
|
||||
| 🚧 **In Progress** | Currently under active development |
|
||||
| 📅 **Planned** | Scheduled for future development |
|
||||
| 💡 **Under Consideration** | Being evaluated for future inclusion |
|
||||
|
||||
## Core Infrastructure
|
||||
|
||||
| Feature | Status | Tag |
|
||||
|---------|--------|-----|
|
||||
| API Development Environment Improvements | Done | v4.0.0 |
|
||||
| Include API in Unraid OS | Planned (Q1 2025) | - |
|
||||
| Make API Open Source | Planned (Q1 2025) | - |
|
||||
| Separate API from Connect Plugin | Planned (Q2 2025) | - |
|
||||
| Developer Tools for Plugins | Planned (Q2 2025) | - |
|
||||
### Completed Features ✅
|
||||
|
||||
| Feature | Available Since |
|
||||
|---------|-----------------|
|
||||
| **API Development Environment Improvements** | v4.0.0 |
|
||||
| **Include API in Unraid OS** | Unraid v7.2-beta.1 |
|
||||
| **Separate API from Connect Plugin** | Unraid v7.2-beta.1 |
|
||||
|
||||
### Upcoming Features 📅
|
||||
|
||||
| Feature | Target Timeline |
|
||||
|---------|-----------------|
|
||||
| **Make API Open Source** | Q1 2025 |
|
||||
| **Developer Tools for Plugins** | Q2 2025 |
|
||||
|
||||
## Security & Authentication
|
||||
|
||||
| Feature | Status | Tag |
|
||||
|---------|--------|-----|
|
||||
| Permissions System Rewrite | Done | v4.0.0 |
|
||||
| User Interface Component Library | In Progress | - |
|
||||
### Completed Features ✅
|
||||
|
||||
| Feature | Available Since |
|
||||
|---------|-----------------|
|
||||
| **Permissions System Rewrite** | v4.0.0 |
|
||||
| **OIDC/SSO Support** | Unraid v7.2-beta.1 |
|
||||
|
||||
### In Development 🚧
|
||||
|
||||
- **User Interface Component Library** - Enhanced security components for the UI
|
||||
|
||||
## User Interface Improvements
|
||||
|
||||
| Feature | Status | Tag |
|
||||
|---------|--------|-----|
|
||||
| New Settings Pages | Planned (Q2 2025) | - |
|
||||
| Custom Theme Creator | Planned (Q2-Q3 2025) | - |
|
||||
| New Connect Settings Interface | Planned (Q1 2025) | - |
|
||||
### Planned Features 📅
|
||||
|
||||
| Feature | Target Timeline | Description |
|
||||
|---------|-----------------|-------------|
|
||||
| **New Settings Pages** | Q2 2025 | Modernized settings interface with improved UX |
|
||||
| **Custom Theme Creator** | Q2-Q3 2025 | Allow users to create and share custom themes |
|
||||
| **New Connect Settings Interface** | Q1 2025 | Redesigned Unraid Connect configuration |
|
||||
|
||||
## Array Management
|
||||
|
||||
| Feature | Status | Tag |
|
||||
|---------|--------|-----|
|
||||
| Array Status Monitoring | Done | v4.0.0 |
|
||||
| Storage Pool Creation Interface | Planned (Q2 2025) | - |
|
||||
| Storage Pool Status Interface | Planned (Q2 2025) | - |
|
||||
### Completed Features ✅
|
||||
|
||||
| Feature | Available Since |
|
||||
|---------|-----------------|
|
||||
| **Array Status Monitoring** | v4.0.0 |
|
||||
|
||||
### Planned Features 📅
|
||||
|
||||
| Feature | Target Timeline | Description |
|
||||
|---------|-----------------|-------------|
|
||||
| **Storage Pool Creation Interface** | Q2 2025 | Simplified pool creation workflow |
|
||||
| **Storage Pool Status Interface** | Q2 2025 | Real-time pool health monitoring |
|
||||
|
||||
## Docker Integration
|
||||
|
||||
| Feature | Status | Tag |
|
||||
|---------|--------|-----|
|
||||
| Docker Container Status Monitoring | Done | v4.0.0 |
|
||||
| New Docker Status Interface Design | Planned (Q3 2025) | - |
|
||||
| New Docker Status Interface | Planned (Q3 2025) | - |
|
||||
| Docker Container Setup Interface | Planned (Q3 2025) | - |
|
||||
| Docker Compose Support | Planned | - |
|
||||
### Completed Features ✅
|
||||
|
||||
| Feature | Available Since |
|
||||
|---------|-----------------|
|
||||
| **Docker Container Status Monitoring** | v4.0.0 |
|
||||
|
||||
### Planned Features 📅
|
||||
|
||||
| Feature | Target Timeline | Description |
|
||||
|---------|-----------------|-------------|
|
||||
| **New Docker Status Interface Design** | Q3 2025 | Modern container management UI |
|
||||
| **New Docker Status Interface** | Q3 2025 | Implementation of new design |
|
||||
| **Docker Container Setup Interface** | Q3 2025 | Streamlined container deployment |
|
||||
| **Docker Compose Support** | TBD | Native docker-compose.yml support |
|
||||
|
||||
## Share Management
|
||||
|
||||
| Feature | Status | Tag |
|
||||
|---------|--------|-----|
|
||||
| Array/Cache Share Status Monitoring | Done | v4.0.0 |
|
||||
| Storage Share Creation & Settings | Planned | - |
|
||||
| Storage Share Management Interface | Planned | - |
|
||||
### Completed Features ✅
|
||||
|
||||
| Feature | Available Since |
|
||||
|---------|-----------------|
|
||||
| **Array/Cache Share Status Monitoring** | v4.0.0 |
|
||||
|
||||
### Under Consideration 💡
|
||||
|
||||
- **Storage Share Creation & Settings** - Enhanced share configuration options
|
||||
- **Storage Share Management Interface** - Unified share management dashboard
|
||||
|
||||
## Plugin System
|
||||
|
||||
| Feature | Status | Tag |
|
||||
|---------|--------|-----|
|
||||
| New Plugins Interface | Planned (Q3 2025) | - |
|
||||
| Plugin Management Interface | Planned | - |
|
||||
| Plugin Development Tools | Planned | - |
|
||||
### Planned Features 📅
|
||||
|
||||
| Feature | Target Timeline | Description |
|
||||
|---------|-----------------|-------------|
|
||||
| **New Plugins Interface** | Q3 2025 | Redesigned plugin management UI |
|
||||
| **Plugin Management Interface** | TBD | Advanced plugin configuration |
|
||||
| **Plugin Development Tools** | TBD | SDK and tooling for developers |
|
||||
|
||||
## Notifications
|
||||
|
||||
| Feature | Status | Tag |
|
||||
|---------|--------|-----|
|
||||
| Notifications System | Done | v4.0.0 |
|
||||
| Notifications Interface | Done | v4.0.0 |
|
||||
### Completed Features ✅
|
||||
|
||||
Features marked as "Done" are available in current releases. The tag column shows the version where a feature was first introduced.
|
||||
| Feature | Available Since |
|
||||
|---------|-----------------|
|
||||
| **Notifications System** | v4.0.0 |
|
||||
| **Notifications Interface** | v4.0.0 |
|
||||
|
||||
---
|
||||
|
||||
## Recent Releases
|
||||
|
||||
:::info Full Release History
|
||||
For a complete list of all releases, changelogs, and download links, visit the [Unraid API GitHub Releases](https://github.com/unraid/api/releases) page.
|
||||
:::
|
||||
|
||||
### Unraid v7.2-beta.1 Highlights
|
||||
|
||||
- 🎉 **API included in Unraid OS** - Native integration
|
||||
- 🔐 **OIDC/SSO Support** - Enterprise authentication
|
||||
- 📦 **Standalone API** - Separated from Connect plugin
|
||||
|
||||
### v4.0.0 Highlights
|
||||
|
||||
- 🛡️ **Permissions System Rewrite** - Enhanced security
|
||||
- 📊 **Comprehensive Monitoring** - Array, Docker, and Share status
|
||||
- 🔔 **Notifications System** - Real-time alerts and notifications
|
||||
- 🛠️ **Developer Environment** - Improved development tools
|
||||
|
||||
## Community Feedback
|
||||
|
||||
:::tip Have a Feature Request?
|
||||
We value community input! Please submit feature requests and feedback through:
|
||||
|
||||
- [Unraid Forums](https://forums.unraid.net)
|
||||
- [GitHub Issues](https://github.com/unraid/api/issues) - API is open source!
|
||||
|
||||
:::
|
||||
|
||||
## Version Support
|
||||
|
||||
| Unraid Version | API Version | Support Status |
|
||||
|----------------|-------------|----------------|
|
||||
| Unraid v7.2-beta.1+ | Latest | ✅ Active |
|
||||
| 7.0 - 7.1.x | v4.x via Plugin | ⚠️ Limited |
|
||||
| 6.12.x | v4.x via Plugin | ⚠️ Limited |
|
||||
| < 6.12 | Not Supported | ❌ EOL |
|
||||
|
||||
:::warning Legacy Support
|
||||
Versions prior to Unraid 7.2 require the API to be installed through the Unraid Connect plugin. Some features may not be available on older versions.
|
||||
:::
|
||||
|
||||
:::tip Pre-release Versions
|
||||
You can always install the Unraid Connect plugin to access pre-release versions of the API and get early access to new features before they're included in Unraid OS releases.
|
||||
:::
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@unraid/api",
|
||||
"version": "4.11.0",
|
||||
"version": "4.15.0",
|
||||
"main": "src/cli/index.ts",
|
||||
"type": "module",
|
||||
"corepack": {
|
||||
@@ -10,12 +10,12 @@
|
||||
"author": "Lime Technology, Inc. <unraid.net>",
|
||||
"license": "GPL-2.0-or-later",
|
||||
"engines": {
|
||||
"pnpm": "10.13.1"
|
||||
"pnpm": "10.14.0"
|
||||
},
|
||||
"scripts": {
|
||||
"// Development": "",
|
||||
"start": "node dist/main.js",
|
||||
"dev": "vite",
|
||||
"dev": "clear && vite",
|
||||
"dev:debug": "NODE_OPTIONS='--inspect-brk=9229 --enable-source-maps' vite",
|
||||
"command": "COMMAND_TESTER=true pnpm run build > /dev/null 2>&1 && NODE_ENV=development ./dist/cli.js",
|
||||
"command:raw": "./dist/cli.js",
|
||||
@@ -51,7 +51,7 @@
|
||||
"unraid-api": "dist/cli.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"@apollo/client": "3.13.8",
|
||||
"@apollo/client": "3.13.9",
|
||||
"@apollo/server": "4.12.2",
|
||||
"@as-integrations/fastify": "2.1.1",
|
||||
"@fastify/cookie": "11.0.2",
|
||||
@@ -64,13 +64,13 @@
|
||||
"@jsonforms/core": "3.6.0",
|
||||
"@nestjs/apollo": "13.1.0",
|
||||
"@nestjs/cache-manager": "3.0.1",
|
||||
"@nestjs/common": "11.1.5",
|
||||
"@nestjs/common": "11.1.6",
|
||||
"@nestjs/config": "4.0.2",
|
||||
"@nestjs/core": "11.1.5",
|
||||
"@nestjs/core": "11.1.6",
|
||||
"@nestjs/event-emitter": "3.0.1",
|
||||
"@nestjs/graphql": "13.1.0",
|
||||
"@nestjs/passport": "11.0.5",
|
||||
"@nestjs/platform-fastify": "11.1.5",
|
||||
"@nestjs/platform-fastify": "11.1.6",
|
||||
"@nestjs/schedule": "6.0.0",
|
||||
"@nestjs/throttler": "6.4.0",
|
||||
"@reduxjs/toolkit": "2.8.2",
|
||||
@@ -82,7 +82,7 @@
|
||||
"atomically": "2.0.3",
|
||||
"bycontract": "2.0.11",
|
||||
"bytes": "3.1.2",
|
||||
"cache-manager": "7.0.1",
|
||||
"cache-manager": "7.1.1",
|
||||
"cacheable-lookup": "7.0.0",
|
||||
"camelcase-keys": "9.1.3",
|
||||
"casbin": "5.38.0",
|
||||
@@ -94,16 +94,16 @@
|
||||
"command-exists": "1.2.9",
|
||||
"convert": "5.12.0",
|
||||
"cookie": "1.0.2",
|
||||
"cron": "4.3.2",
|
||||
"cron": "4.3.3",
|
||||
"cross-fetch": "4.1.0",
|
||||
"diff": "8.0.2",
|
||||
"dockerode": "4.0.7",
|
||||
"dotenv": "17.2.1",
|
||||
"execa": "9.6.0",
|
||||
"exit-hook": "4.0.0",
|
||||
"fastify": "5.4.0",
|
||||
"fastify": "5.5.0",
|
||||
"filenamify": "6.0.0",
|
||||
"fs-extra": "11.3.0",
|
||||
"fs-extra": "11.3.1",
|
||||
"glob": "11.0.3",
|
||||
"global-agent": "3.0.0",
|
||||
"got": "14.4.7",
|
||||
@@ -125,20 +125,21 @@
|
||||
"nestjs-pino": "4.4.0",
|
||||
"node-cache": "5.1.2",
|
||||
"node-window-polyfill": "1.0.4",
|
||||
"openid-client": "6.6.2",
|
||||
"p-retry": "6.2.1",
|
||||
"passport-custom": "1.1.1",
|
||||
"passport-http-header-strategy": "1.1.0",
|
||||
"path-type": "6.0.0",
|
||||
"pino": "9.7.0",
|
||||
"pino": "9.8.0",
|
||||
"pino-http": "10.5.0",
|
||||
"pino-pretty": "13.0.0",
|
||||
"pino-pretty": "13.1.1",
|
||||
"pm2": "6.0.8",
|
||||
"reflect-metadata": "^0.1.14",
|
||||
"request": "2.88.2",
|
||||
"rxjs": "7.8.2",
|
||||
"semver": "7.7.2",
|
||||
"strftime": "0.10.3",
|
||||
"systeminformation": "5.27.7",
|
||||
"undici": "7.13.0",
|
||||
"uuid": "11.1.0",
|
||||
"ws": "8.18.3",
|
||||
"zen-observable-ts": "1.1.0",
|
||||
@@ -153,7 +154,7 @@
|
||||
}
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/js": "9.32.0",
|
||||
"@eslint/js": "9.33.0",
|
||||
"@graphql-codegen/add": "5.0.3",
|
||||
"@graphql-codegen/cli": "5.0.7",
|
||||
"@graphql-codegen/fragment-matcher": "5.1.0",
|
||||
@@ -163,11 +164,11 @@
|
||||
"@graphql-codegen/typescript-operations": "4.6.1",
|
||||
"@graphql-codegen/typescript-resolvers": "4.5.1",
|
||||
"@graphql-typed-document-node/core": "3.2.0",
|
||||
"@ianvs/prettier-plugin-sort-imports": "4.5.1",
|
||||
"@nestjs/testing": "11.1.5",
|
||||
"@ianvs/prettier-plugin-sort-imports": "4.6.1",
|
||||
"@nestjs/testing": "11.1.6",
|
||||
"@originjs/vite-plugin-commonjs": "1.0.3",
|
||||
"@rollup/plugin-node-resolve": "16.0.1",
|
||||
"@swc/core": "1.13.2",
|
||||
"@swc/core": "1.13.3",
|
||||
"@types/async-exit-hook": "2.0.2",
|
||||
"@types/bytes": "3.1.5",
|
||||
"@types/cli-table": "0.3.4",
|
||||
@@ -181,41 +182,37 @@
|
||||
"@types/lodash": "4.17.20",
|
||||
"@types/lodash-es": "4.17.12",
|
||||
"@types/mustache": "4.2.6",
|
||||
"@types/node": "22.16.5",
|
||||
"@types/node": "22.17.1",
|
||||
"@types/pify": "6.1.0",
|
||||
"@types/semver": "7.7.0",
|
||||
"@types/sendmail": "1.4.7",
|
||||
"@types/stoppable": "1.1.3",
|
||||
"@types/strftime": "0.9.8",
|
||||
"@types/supertest": "^6.0.3",
|
||||
"@types/supertest": "6.0.3",
|
||||
"@types/uuid": "10.0.0",
|
||||
"@types/ws": "8.18.1",
|
||||
"@types/wtfnode": "0.7.3",
|
||||
"@vitest/coverage-v8": "3.2.4",
|
||||
"@vitest/ui": "3.2.4",
|
||||
"commit-and-tag-version": "9.6.0",
|
||||
"cz-conventional-changelog": "3.3.0",
|
||||
"eslint": "9.32.0",
|
||||
"eslint": "9.33.0",
|
||||
"eslint-plugin-import": "2.32.0",
|
||||
"eslint-plugin-n": "17.21.2",
|
||||
"eslint-plugin-no-relative-import-paths": "1.6.1",
|
||||
"eslint-plugin-prettier": "5.5.3",
|
||||
"graphql-codegen-typescript-validation-schema": "0.17.1",
|
||||
"eslint-plugin-prettier": "5.5.4",
|
||||
"jiti": "2.5.1",
|
||||
"nodemon": "3.1.10",
|
||||
"prettier": "3.6.2",
|
||||
"rollup-plugin-node-externals": "8.0.1",
|
||||
"supertest": "^7.1.4",
|
||||
"supertest": "7.1.4",
|
||||
"tsx": "4.20.3",
|
||||
"type-fest": "4.41.0",
|
||||
"typescript": "5.8.3",
|
||||
"typescript-eslint": "8.38.0",
|
||||
"typescript": "5.9.2",
|
||||
"typescript-eslint": "8.39.1",
|
||||
"unplugin-swc": "1.5.5",
|
||||
"vite": "7.0.6",
|
||||
"vite": "7.1.1",
|
||||
"vite-plugin-node": "7.0.0",
|
||||
"vite-tsconfig-paths": "5.1.4",
|
||||
"vitest": "3.2.4",
|
||||
"zx": "8.7.1"
|
||||
"zx": "8.8.0"
|
||||
},
|
||||
"overrides": {
|
||||
"eslint": {
|
||||
@@ -230,5 +227,5 @@
|
||||
}
|
||||
},
|
||||
"private": true,
|
||||
"packageManager": "pnpm@10.13.1"
|
||||
"packageManager": "pnpm@10.14.0"
|
||||
}
|
||||
|
||||
@@ -34,6 +34,15 @@ vi.mock('@app/store/index.js', () => ({
|
||||
}),
|
||||
},
|
||||
}));
|
||||
vi.mock('@app/environment.js', () => ({
|
||||
ENVIRONMENT: 'development',
|
||||
environment: {
|
||||
IS_MAIN_PROCESS: true,
|
||||
},
|
||||
}));
|
||||
vi.mock('@app/core/utils/files/file-exists.js', () => ({
|
||||
fileExists: vi.fn().mockResolvedValue(true),
|
||||
}));
|
||||
|
||||
// Mock NestJS Logger to suppress logs during tests
|
||||
vi.mock('@nestjs/common', async (importOriginal) => {
|
||||
@@ -63,13 +72,22 @@ describe('RCloneApiService', () => {
|
||||
const { execa } = await import('execa');
|
||||
const pRetry = await import('p-retry');
|
||||
const { existsSync } = await import('node:fs');
|
||||
const { fileExists } = await import('@app/core/utils/files/file-exists.js');
|
||||
|
||||
mockGot = vi.mocked(got);
|
||||
mockExeca = vi.mocked(execa);
|
||||
mockPRetry = vi.mocked(pRetry.default);
|
||||
mockExistsSync = vi.mocked(existsSync);
|
||||
|
||||
mockGot.post = vi.fn().mockResolvedValue({ body: {} });
|
||||
// Mock successful RClone API response for socket check
|
||||
mockGot.post = vi.fn().mockResolvedValue({ body: { pid: 12345 } });
|
||||
|
||||
// Mock RClone binary exists check
|
||||
vi.mocked(fileExists).mockResolvedValue(true);
|
||||
|
||||
// Mock socket exists
|
||||
mockExistsSync.mockReturnValue(true);
|
||||
|
||||
mockExeca.mockReturnValue({
|
||||
on: vi.fn(),
|
||||
kill: vi.fn(),
|
||||
@@ -77,10 +95,12 @@ describe('RCloneApiService', () => {
|
||||
pid: 12345,
|
||||
} as any);
|
||||
mockPRetry.mockResolvedValue(undefined);
|
||||
mockExistsSync.mockReturnValue(false);
|
||||
|
||||
service = new RCloneApiService();
|
||||
await service.onModuleInit();
|
||||
|
||||
// Reset the mock after initialization to prepare for test-specific responses
|
||||
mockGot.post.mockClear();
|
||||
});
|
||||
|
||||
describe('getProviders', () => {
|
||||
@@ -102,6 +122,9 @@ describe('RCloneApiService', () => {
|
||||
json: {},
|
||||
responseType: 'json',
|
||||
enableUnixSockets: true,
|
||||
headers: expect.objectContaining({
|
||||
Authorization: expect.stringMatching(/^Basic /),
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
@@ -129,6 +152,11 @@ describe('RCloneApiService', () => {
|
||||
'http://unix:/tmp/rclone.sock:/config/listremotes',
|
||||
expect.objectContaining({
|
||||
json: {},
|
||||
responseType: 'json',
|
||||
enableUnixSockets: true,
|
||||
headers: expect.objectContaining({
|
||||
Authorization: expect.stringMatching(/^Basic /),
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
@@ -155,6 +183,11 @@ describe('RCloneApiService', () => {
|
||||
'http://unix:/tmp/rclone.sock:/config/get',
|
||||
expect.objectContaining({
|
||||
json: { name: 'test-remote' },
|
||||
responseType: 'json',
|
||||
enableUnixSockets: true,
|
||||
headers: expect.objectContaining({
|
||||
Authorization: expect.stringMatching(/^Basic /),
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
@@ -193,6 +226,11 @@ describe('RCloneApiService', () => {
|
||||
type: 's3',
|
||||
parameters: { access_key_id: 'AKIA...', secret_access_key: 'secret' },
|
||||
},
|
||||
responseType: 'json',
|
||||
enableUnixSockets: true,
|
||||
headers: expect.objectContaining({
|
||||
Authorization: expect.stringMatching(/^Basic /),
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
@@ -217,6 +255,11 @@ describe('RCloneApiService', () => {
|
||||
name: 'existing-remote',
|
||||
access_key_id: 'NEW_AKIA...',
|
||||
},
|
||||
responseType: 'json',
|
||||
enableUnixSockets: true,
|
||||
headers: expect.objectContaining({
|
||||
Authorization: expect.stringMatching(/^Basic /),
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
@@ -235,6 +278,11 @@ describe('RCloneApiService', () => {
|
||||
'http://unix:/tmp/rclone.sock:/config/delete',
|
||||
expect.objectContaining({
|
||||
json: { name: 'remote-to-delete' },
|
||||
responseType: 'json',
|
||||
enableUnixSockets: true,
|
||||
headers: expect.objectContaining({
|
||||
Authorization: expect.stringMatching(/^Basic /),
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
@@ -261,6 +309,11 @@ describe('RCloneApiService', () => {
|
||||
dstFs: 'remote:backup/path',
|
||||
delete_on: 'dst',
|
||||
},
|
||||
responseType: 'json',
|
||||
enableUnixSockets: true,
|
||||
headers: expect.objectContaining({
|
||||
Authorization: expect.stringMatching(/^Basic /),
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
@@ -279,6 +332,11 @@ describe('RCloneApiService', () => {
|
||||
'http://unix:/tmp/rclone.sock:/job/status',
|
||||
expect.objectContaining({
|
||||
json: { jobid: 'job-123' },
|
||||
responseType: 'json',
|
||||
enableUnixSockets: true,
|
||||
headers: expect.objectContaining({
|
||||
Authorization: expect.stringMatching(/^Basic /),
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
@@ -299,6 +357,11 @@ describe('RCloneApiService', () => {
|
||||
'http://unix:/tmp/rclone.sock:/job/list',
|
||||
expect.objectContaining({
|
||||
json: {},
|
||||
responseType: 'json',
|
||||
enableUnixSockets: true,
|
||||
headers: expect.objectContaining({
|
||||
Authorization: expect.stringMatching(/^Basic /),
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
@@ -3,6 +3,7 @@ import '@app/__test__/setup/env-setup.js';
|
||||
import '@app/__test__/setup/keyserver-mock.js';
|
||||
import '@app/__test__/setup/config-setup.js';
|
||||
import '@app/__test__/setup/store-reset.js';
|
||||
import '@app/__test__/setup/api-json-backup.js';
|
||||
|
||||
// This file is automatically loaded by Vitest before running tests
|
||||
// It imports all the setup files that need to be run before tests
|
||||
|
||||
36
api/src/__test__/setup/api-json-backup.ts
Normal file
36
api/src/__test__/setup/api-json-backup.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
import { existsSync, readFileSync, writeFileSync } from 'fs';
|
||||
import { join, resolve } from 'path';
|
||||
|
||||
import { afterAll, beforeAll } from 'vitest';
|
||||
|
||||
// Get the project root directory
|
||||
const projectRoot = resolve(process.cwd());
|
||||
const apiJsonPath = join(projectRoot, 'dev/configs/api.json');
|
||||
const apiJsonBackupPath = join(projectRoot, 'dev/configs/api.json.backup');
|
||||
|
||||
let originalContent: string | null = null;
|
||||
|
||||
/**
|
||||
* Backs up api.json before tests run and restores it after tests complete.
|
||||
* This prevents tests from permanently modifying the development configuration.
|
||||
*/
|
||||
export function setupApiJsonBackup() {
|
||||
beforeAll(() => {
|
||||
// Save the original content if the file exists
|
||||
if (existsSync(apiJsonPath)) {
|
||||
originalContent = readFileSync(apiJsonPath, 'utf-8');
|
||||
// Create a backup file as well for safety
|
||||
writeFileSync(apiJsonBackupPath, originalContent, 'utf-8');
|
||||
}
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
// Restore the original content if we saved it
|
||||
if (originalContent !== null) {
|
||||
writeFileSync(apiJsonPath, originalContent, 'utf-8');
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Auto-run for all tests that import this module
|
||||
setupApiJsonBackup();
|
||||
@@ -1,4 +1,4 @@
|
||||
import { pino } from 'pino';
|
||||
import pino from 'pino';
|
||||
import pretty from 'pino-pretty';
|
||||
|
||||
import { API_VERSION, LOG_LEVEL, LOG_TYPE, PATHS_LOGS_FILE, SUPPRESS_LOGS } from '@app/environment.js';
|
||||
|
||||
@@ -8,7 +8,7 @@ export class NginxManager {
|
||||
await execa('/etc/rc.d/rc.nginx', ['reload']);
|
||||
return true;
|
||||
} catch (err: unknown) {
|
||||
logger.warn('Failed to restart Nginx with error: ', err);
|
||||
logger.warn('Failed to restart Nginx with error: %o', err as object);
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
@@ -8,7 +8,7 @@ export class UpdateDNSManager {
|
||||
await execa('/usr/bin/php', ['/usr/local/emhttp/plugins/dynamix/include/UpdateDNS.php']);
|
||||
return true;
|
||||
} catch (err: unknown) {
|
||||
logger.warn('Failed to call Update DNS with error: ', err);
|
||||
logger.warn('Failed to call Update DNS with error: %o', err as object);
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
@@ -15,6 +15,8 @@ export const pubsub = new PubSub({ eventEmitter });
|
||||
* Create a pubsub subscription.
|
||||
* @param channel The pubsub channel to subscribe to.
|
||||
*/
|
||||
export const createSubscription = (channel: GRAPHQL_PUBSUB_CHANNEL) => {
|
||||
return pubsub.asyncIterableIterator(channel);
|
||||
export const createSubscription = <T = any>(
|
||||
channel: GRAPHQL_PUBSUB_CHANNEL
|
||||
): AsyncIterableIterator<T> => {
|
||||
return pubsub.asyncIterableIterator<T>(channel);
|
||||
};
|
||||
|
||||
@@ -26,7 +26,7 @@ export const loadState = <T extends Record<string, unknown>>(filePath: string):
|
||||
logger.trace(
|
||||
'Failed loading state file "%s" with "%s"',
|
||||
filePath,
|
||||
error instanceof Error ? error.message : error
|
||||
error instanceof Error ? error.message : String(error)
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
17
api/src/core/utils/validation/enum-validator.ts
Normal file
17
api/src/core/utils/validation/enum-validator.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
export function isValidEnumValue<T extends Record<string, string | number>>(
|
||||
value: unknown,
|
||||
enumObject: T
|
||||
): value is T[keyof T] {
|
||||
if (value == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return Object.values(enumObject).includes(value as T[keyof T]);
|
||||
}
|
||||
|
||||
export function validateEnumValue<T extends Record<string, string | number>>(
|
||||
value: unknown,
|
||||
enumObject: T
|
||||
): T[keyof T] | undefined {
|
||||
return isValidEnumValue(value, enumObject) ? (value as T[keyof T]) : undefined;
|
||||
}
|
||||
@@ -13,7 +13,7 @@ const isGuiMode = async (): Promise<boolean> => {
|
||||
// exitCode 0 means process was found, 1 means not found
|
||||
return exitCode === 0;
|
||||
} catch (error) {
|
||||
internalLogger.error('Error checking GUI mode: %s', error);
|
||||
internalLogger.error('Error checking GUI mode: %o', error as object);
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
@@ -99,7 +99,7 @@ export const viteNodeApp = async () => {
|
||||
|
||||
asyncExitHook(
|
||||
async (signal) => {
|
||||
logger.info('Exiting with signal %s', signal);
|
||||
logger.info('Exiting with signal %d', signal);
|
||||
await server?.close?.();
|
||||
// If port is unix socket, delete socket before exiting
|
||||
unlinkUnixPort();
|
||||
|
||||
@@ -94,7 +94,7 @@ export const run = async (channel: string, mutation: string, options: RunOptions
|
||||
);
|
||||
}
|
||||
} else {
|
||||
logger.debug('Error: %s', error);
|
||||
logger.debug('Error: %o', error as object);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
@@ -62,7 +62,7 @@ export class StateManager {
|
||||
emhttpLogger.error(
|
||||
'Failed to load state file: [%s]\nerror: %o',
|
||||
stateFile,
|
||||
error
|
||||
error as object
|
||||
);
|
||||
}
|
||||
} else {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { CacheModule } from '@nestjs/cache-manager';
|
||||
import { Module } from '@nestjs/common';
|
||||
import { APP_GUARD } from '@nestjs/core';
|
||||
import { ScheduleModule } from '@nestjs/schedule';
|
||||
import { ThrottlerModule } from '@nestjs/throttler';
|
||||
|
||||
import { AuthZGuard } from 'nest-authz';
|
||||
@@ -23,23 +24,16 @@ import { UnraidFileModifierModule } from '@app/unraid-api/unraid-file-modifier/u
|
||||
GlobalDepsModule,
|
||||
LegacyConfigModule,
|
||||
PubSubModule,
|
||||
ScheduleModule.forRoot(),
|
||||
LoggerModule.forRoot({
|
||||
pinoHttp: {
|
||||
logger: apiLogger,
|
||||
autoLogging: false,
|
||||
timestamp: false,
|
||||
...(LOG_LEVEL !== 'TRACE'
|
||||
? {
|
||||
serializers: {
|
||||
req: (req) => ({
|
||||
id: req.id,
|
||||
method: req.method,
|
||||
url: req.url,
|
||||
remoteAddress: req.remoteAddress,
|
||||
}),
|
||||
},
|
||||
}
|
||||
: {}),
|
||||
serializers: {
|
||||
req: () => undefined,
|
||||
res: () => undefined,
|
||||
},
|
||||
},
|
||||
}),
|
||||
AuthModule,
|
||||
|
||||
@@ -1,80 +0,0 @@
|
||||
import { Injectable, Logger, Optional } from '@nestjs/common';
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
|
||||
import type { SsoUserService as ISsoUserService } from '@unraid/shared/services/sso.js';
|
||||
import { GraphQLError } from 'graphql/error/GraphQLError.js';
|
||||
|
||||
import type { ApiConfig } from '@app/unraid-api/config/api-config.module.js';
|
||||
import { UnraidFileModificationService } from '@app/unraid-api/unraid-file-modifier/unraid-file-modifier.service.js';
|
||||
|
||||
@Injectable()
|
||||
export class SsoUserService implements ISsoUserService {
|
||||
private readonly logger = new Logger(SsoUserService.name);
|
||||
private ssoSubIdsConfigKey = 'api.ssoSubIds';
|
||||
|
||||
constructor(
|
||||
private readonly configService: ConfigService,
|
||||
@Optional() private readonly fileModificationService?: UnraidFileModificationService
|
||||
) {}
|
||||
|
||||
/**
|
||||
* Get the current list of SSO user IDs
|
||||
* @returns Array of SSO user IDs
|
||||
*/
|
||||
async getSsoUsers(): Promise<string[]> {
|
||||
const ssoSubIds = this.configService.getOrThrow<ApiConfig['ssoSubIds']>(this.ssoSubIdsConfigKey);
|
||||
return ssoSubIds;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the complete list of SSO user IDs
|
||||
* @param userIds - The list of SSO user IDs to set
|
||||
* @returns true if a restart is required, false otherwise
|
||||
*/
|
||||
async setSsoUsers(userIds: string[]): Promise<boolean> {
|
||||
const currentUsers = await this.getSsoUsers();
|
||||
const currentUserSet = new Set(currentUsers);
|
||||
const newUserSet = new Set(userIds);
|
||||
|
||||
// If there's no change, no need to update
|
||||
if (newUserSet.symmetricDifference(currentUserSet).size === 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Validate user IDs
|
||||
const uuidRegex =
|
||||
/^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/;
|
||||
const invalidUserIds = userIds.filter((id) => !uuidRegex.test(id));
|
||||
if (invalidUserIds.length > 0) {
|
||||
throw new GraphQLError(`Invalid SSO user ID's: ${invalidUserIds.join(', ')}`);
|
||||
}
|
||||
|
||||
// Update the config
|
||||
this.configService.set(this.ssoSubIdsConfigKey, userIds);
|
||||
|
||||
// Handle file modification if available
|
||||
if (this.fileModificationService) {
|
||||
// If going from 0 to 1+ users, apply the SSO modification
|
||||
if (currentUserSet.size === 0 && newUserSet.size > 0) {
|
||||
try {
|
||||
await this.fileModificationService.applyModificationById('sso');
|
||||
this.logger.log('Applied SSO file modification after adding SSO users');
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to apply SSO file modification', error);
|
||||
}
|
||||
}
|
||||
// If going from 1+ to 0 users, rollback the SSO modification
|
||||
else if (currentUserSet.size > 0 && newUserSet.size === 0) {
|
||||
try {
|
||||
await this.fileModificationService.rollbackModificationById('sso');
|
||||
this.logger.log('Rolled back SSO file modification after removing all SSO users');
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to rollback SSO file modification', error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// No restart required - file modifications are applied immediately
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -1,154 +0,0 @@
|
||||
import { Test } from '@nestjs/testing';
|
||||
|
||||
import { InquirerService } from 'nest-commander';
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { CliInternalClientService } from '@app/unraid-api/cli/internal-client.service.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
import { RestartCommand } from '@app/unraid-api/cli/restart.command.js';
|
||||
import { AddSSOUserCommand } from '@app/unraid-api/cli/sso/add-sso-user.command.js';
|
||||
|
||||
// Mock services
|
||||
const mockInternalClient = {
|
||||
getClient: vi.fn(),
|
||||
};
|
||||
|
||||
const mockLogger = {
|
||||
info: vi.fn(),
|
||||
error: vi.fn(),
|
||||
};
|
||||
|
||||
const mockRestartCommand = {
|
||||
run: vi.fn(),
|
||||
};
|
||||
|
||||
const mockInquirerService = {
|
||||
prompt: vi.fn(),
|
||||
};
|
||||
|
||||
describe('AddSSOUserCommand', () => {
|
||||
let command: AddSSOUserCommand;
|
||||
|
||||
beforeEach(async () => {
|
||||
const module = await Test.createTestingModule({
|
||||
providers: [
|
||||
AddSSOUserCommand,
|
||||
{ provide: CliInternalClientService, useValue: mockInternalClient },
|
||||
{ provide: LogService, useValue: mockLogger },
|
||||
{ provide: RestartCommand, useValue: mockRestartCommand },
|
||||
{ provide: InquirerService, useValue: mockInquirerService },
|
||||
],
|
||||
}).compile();
|
||||
|
||||
command = module.get<AddSSOUserCommand>(AddSSOUserCommand);
|
||||
|
||||
// Clear mocks
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should add a new SSO user successfully', async () => {
|
||||
const mockClient = {
|
||||
query: vi.fn().mockResolvedValue({
|
||||
data: {
|
||||
settings: {
|
||||
api: {
|
||||
ssoSubIds: ['existing-user-id'],
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
mutate: vi.fn().mockResolvedValue({
|
||||
data: {
|
||||
updateSettings: {
|
||||
restartRequired: false,
|
||||
values: {},
|
||||
},
|
||||
},
|
||||
}),
|
||||
};
|
||||
|
||||
mockInternalClient.getClient.mockResolvedValue(mockClient);
|
||||
mockInquirerService.prompt.mockResolvedValue({
|
||||
disclaimer: 'y',
|
||||
username: 'new-user-id',
|
||||
});
|
||||
|
||||
await command.run([]);
|
||||
|
||||
expect(mockClient.query).toHaveBeenCalled();
|
||||
expect(mockClient.mutate).toHaveBeenCalledWith({
|
||||
mutation: expect.anything(),
|
||||
variables: {
|
||||
input: {
|
||||
api: {
|
||||
ssoSubIds: ['existing-user-id', 'new-user-id'],
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(mockLogger.info).toHaveBeenCalledWith('User added: new-user-id');
|
||||
expect(mockLogger.info).not.toHaveBeenCalledWith('Restarting the API');
|
||||
expect(mockRestartCommand.run).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not add user if disclaimer is not accepted', async () => {
|
||||
const mockClient = {
|
||||
query: vi.fn(),
|
||||
mutate: vi.fn(),
|
||||
};
|
||||
|
||||
mockInternalClient.getClient.mockResolvedValue(mockClient);
|
||||
mockInquirerService.prompt.mockResolvedValue({
|
||||
disclaimer: 'n',
|
||||
username: 'new-user-id',
|
||||
});
|
||||
|
||||
await command.run([]);
|
||||
|
||||
expect(mockClient.query).not.toHaveBeenCalled();
|
||||
expect(mockClient.mutate).not.toHaveBeenCalled();
|
||||
expect(mockRestartCommand.run).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not add user if user already exists', async () => {
|
||||
const mockClient = {
|
||||
query: vi.fn().mockResolvedValue({
|
||||
data: {
|
||||
settings: {
|
||||
api: {
|
||||
ssoSubIds: ['existing-user-id'],
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
mutate: vi.fn(),
|
||||
};
|
||||
|
||||
mockInternalClient.getClient.mockResolvedValue(mockClient);
|
||||
mockInquirerService.prompt.mockResolvedValue({
|
||||
disclaimer: 'y',
|
||||
username: 'existing-user-id',
|
||||
});
|
||||
|
||||
await command.run([]);
|
||||
|
||||
expect(mockClient.query).toHaveBeenCalled();
|
||||
expect(mockClient.mutate).not.toHaveBeenCalled();
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
'User existing-user-id already exists in SSO users'
|
||||
);
|
||||
expect(mockRestartCommand.run).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle errors gracefully', async () => {
|
||||
mockInternalClient.getClient.mockRejectedValue(new Error('Connection failed'));
|
||||
mockInquirerService.prompt.mockResolvedValue({
|
||||
disclaimer: 'y',
|
||||
username: 'new-user-id',
|
||||
});
|
||||
|
||||
await command.run([]);
|
||||
|
||||
expect(mockLogger.error).toHaveBeenCalledWith('Error adding user:', expect.any(Error));
|
||||
});
|
||||
});
|
||||
@@ -64,9 +64,13 @@ describe('ApiReportService', () => {
|
||||
uuid: 'test-uuid',
|
||||
},
|
||||
versions: {
|
||||
unraid: '6.12.0',
|
||||
kernel: '5.19.17',
|
||||
openssl: '3.0.8',
|
||||
core: {
|
||||
unraid: '6.12.0',
|
||||
kernel: '5.19.17',
|
||||
},
|
||||
packages: {
|
||||
openssl: '3.0.8',
|
||||
},
|
||||
},
|
||||
},
|
||||
config: {
|
||||
|
||||
@@ -1,86 +0,0 @@
|
||||
import { Test } from '@nestjs/testing';
|
||||
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { CliInternalClientService } from '@app/unraid-api/cli/internal-client.service.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
import { ListSSOUserCommand } from '@app/unraid-api/cli/sso/list-sso-user.command.js';
|
||||
|
||||
// Mock services
|
||||
const mockInternalClient = {
|
||||
getClient: vi.fn(),
|
||||
};
|
||||
|
||||
const mockLogger = {
|
||||
info: vi.fn(),
|
||||
error: vi.fn(),
|
||||
};
|
||||
|
||||
describe('ListSSOUserCommand', () => {
|
||||
let command: ListSSOUserCommand;
|
||||
|
||||
beforeEach(async () => {
|
||||
const module = await Test.createTestingModule({
|
||||
providers: [
|
||||
ListSSOUserCommand,
|
||||
{ provide: CliInternalClientService, useValue: mockInternalClient },
|
||||
{ provide: LogService, useValue: mockLogger },
|
||||
],
|
||||
}).compile();
|
||||
|
||||
command = module.get<ListSSOUserCommand>(ListSSOUserCommand);
|
||||
|
||||
// Clear mocks
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should list all SSO users', async () => {
|
||||
const mockClient = {
|
||||
query: vi.fn().mockResolvedValue({
|
||||
data: {
|
||||
settings: {
|
||||
api: {
|
||||
ssoSubIds: ['user-1', 'user-2', 'user-3'],
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
};
|
||||
|
||||
mockInternalClient.getClient.mockResolvedValue(mockClient);
|
||||
|
||||
await command.run([]);
|
||||
|
||||
expect(mockClient.query).toHaveBeenCalledWith({
|
||||
query: expect.anything(),
|
||||
});
|
||||
expect(mockLogger.info).toHaveBeenCalledWith('user-1\nuser-2\nuser-3');
|
||||
});
|
||||
|
||||
it('should display message when no users found', async () => {
|
||||
const mockClient = {
|
||||
query: vi.fn().mockResolvedValue({
|
||||
data: {
|
||||
settings: {
|
||||
api: {
|
||||
ssoSubIds: [],
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
};
|
||||
|
||||
mockInternalClient.getClient.mockResolvedValue(mockClient);
|
||||
|
||||
await command.run([]);
|
||||
|
||||
expect(mockClient.query).toHaveBeenCalled();
|
||||
expect(mockLogger.info).toHaveBeenCalledWith('No SSO users found');
|
||||
});
|
||||
|
||||
it('should handle errors gracefully', async () => {
|
||||
mockInternalClient.getClient.mockRejectedValue(new Error('Connection failed'));
|
||||
|
||||
await expect(command.run([])).rejects.toThrow('Connection failed');
|
||||
});
|
||||
});
|
||||
@@ -1,186 +0,0 @@
|
||||
import { Test } from '@nestjs/testing';
|
||||
|
||||
import { InquirerService } from 'nest-commander';
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { CliInternalClientService } from '@app/unraid-api/cli/internal-client.service.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
import { RestartCommand } from '@app/unraid-api/cli/restart.command.js';
|
||||
import { RemoveSSOUserCommand } from '@app/unraid-api/cli/sso/remove-sso-user.command.js';
|
||||
|
||||
// Mock services
|
||||
const mockInternalClient = {
|
||||
getClient: vi.fn(),
|
||||
};
|
||||
|
||||
const mockLogger = {
|
||||
info: vi.fn(),
|
||||
error: vi.fn(),
|
||||
};
|
||||
|
||||
const mockRestartCommand = {
|
||||
run: vi.fn(),
|
||||
};
|
||||
|
||||
const mockInquirerService = {
|
||||
prompt: vi.fn(),
|
||||
};
|
||||
|
||||
describe('RemoveSSOUserCommand', () => {
|
||||
let command: RemoveSSOUserCommand;
|
||||
|
||||
beforeEach(async () => {
|
||||
const module = await Test.createTestingModule({
|
||||
providers: [
|
||||
RemoveSSOUserCommand,
|
||||
{ provide: CliInternalClientService, useValue: mockInternalClient },
|
||||
{ provide: LogService, useValue: mockLogger },
|
||||
{ provide: RestartCommand, useValue: mockRestartCommand },
|
||||
{ provide: InquirerService, useValue: mockInquirerService },
|
||||
],
|
||||
}).compile();
|
||||
|
||||
command = module.get<RemoveSSOUserCommand>(RemoveSSOUserCommand);
|
||||
|
||||
// Clear mocks
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should remove a specific SSO user successfully', async () => {
|
||||
const mockClient = {
|
||||
query: vi.fn().mockResolvedValue({
|
||||
data: {
|
||||
settings: {
|
||||
api: {
|
||||
ssoSubIds: ['user-1', 'user-2', 'user-3'],
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
mutate: vi.fn().mockResolvedValue({
|
||||
data: {
|
||||
updateSettings: {
|
||||
restartRequired: true,
|
||||
values: {},
|
||||
},
|
||||
},
|
||||
}),
|
||||
};
|
||||
|
||||
mockInternalClient.getClient.mockResolvedValue(mockClient);
|
||||
mockInquirerService.prompt.mockResolvedValue({
|
||||
username: 'user-2',
|
||||
});
|
||||
|
||||
await command.run([]);
|
||||
|
||||
expect(mockClient.query).toHaveBeenCalled();
|
||||
expect(mockClient.mutate).toHaveBeenCalledWith({
|
||||
mutation: expect.anything(),
|
||||
variables: {
|
||||
input: {
|
||||
api: {
|
||||
ssoSubIds: ['user-1', 'user-3'],
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(mockLogger.info).toHaveBeenCalledWith('User removed: user-2');
|
||||
expect(mockLogger.info).toHaveBeenCalledWith('Restarting the API');
|
||||
expect(mockRestartCommand.run).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should remove all SSO users when "all" is selected', async () => {
|
||||
const mockClient = {
|
||||
query: vi.fn().mockResolvedValue({
|
||||
data: {
|
||||
settings: {
|
||||
api: {
|
||||
ssoSubIds: ['user-1', 'user-2', 'user-3'],
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
mutate: vi.fn().mockResolvedValue({
|
||||
data: {
|
||||
updateSettings: {
|
||||
restartRequired: true,
|
||||
values: {},
|
||||
},
|
||||
},
|
||||
}),
|
||||
};
|
||||
|
||||
mockInternalClient.getClient.mockResolvedValue(mockClient);
|
||||
mockInquirerService.prompt.mockResolvedValue({
|
||||
username: 'all',
|
||||
});
|
||||
|
||||
await command.run([]);
|
||||
|
||||
expect(mockClient.query).toHaveBeenCalled();
|
||||
expect(mockClient.mutate).toHaveBeenCalledWith({
|
||||
mutation: expect.anything(),
|
||||
variables: {
|
||||
input: {
|
||||
api: {
|
||||
ssoSubIds: [],
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(mockLogger.info).toHaveBeenCalledWith('All users removed from SSO');
|
||||
expect(mockRestartCommand.run).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not remove user if user does not exist', async () => {
|
||||
const mockClient = {
|
||||
query: vi.fn().mockResolvedValue({
|
||||
data: {
|
||||
settings: {
|
||||
api: {
|
||||
ssoSubIds: ['user-1', 'user-3'],
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
mutate: vi.fn(),
|
||||
};
|
||||
|
||||
mockInternalClient.getClient.mockResolvedValue(mockClient);
|
||||
mockInquirerService.prompt.mockResolvedValue({
|
||||
username: 'user-2',
|
||||
});
|
||||
|
||||
await command.run([]);
|
||||
|
||||
expect(mockClient.query).toHaveBeenCalled();
|
||||
expect(mockClient.mutate).not.toHaveBeenCalled();
|
||||
expect(mockLogger.error).toHaveBeenCalledWith('User user-2 not found in SSO users');
|
||||
expect(mockRestartCommand.run).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should exit when no SSO users are found', async () => {
|
||||
const processExitSpy = vi.spyOn(process, 'exit').mockImplementation(() => {
|
||||
throw new Error('process.exit');
|
||||
});
|
||||
|
||||
const error = new Error('No SSO Users Found');
|
||||
(error as any).name = 'NoSSOUsersFoundError';
|
||||
mockInquirerService.prompt.mockRejectedValue(error);
|
||||
|
||||
try {
|
||||
await command.run([]);
|
||||
} catch (error) {
|
||||
// Expected to throw due to process.exit
|
||||
}
|
||||
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
'Failed to fetch SSO users: %s',
|
||||
'No SSO Users Found'
|
||||
);
|
||||
expect(processExitSpy).toHaveBeenCalledWith(1);
|
||||
|
||||
processExitSpy.mockRestore();
|
||||
});
|
||||
});
|
||||
@@ -82,7 +82,7 @@ export class ApiReportService {
|
||||
? {
|
||||
id: systemData.info.system.uuid,
|
||||
name: systemData.server?.name || 'Unknown',
|
||||
version: systemData.info.versions.unraid || 'Unknown',
|
||||
version: systemData.info.versions.core.unraid || 'Unknown',
|
||||
machineId: 'REDACTED',
|
||||
manufacturer: systemData.info.system.manufacturer,
|
||||
model: systemData.info.system.model,
|
||||
|
||||
@@ -2,7 +2,6 @@ import { Module } from '@nestjs/common';
|
||||
|
||||
import { DependencyService } from '@app/unraid-api/app/dependency.service.js';
|
||||
import { ApiKeyService } from '@app/unraid-api/auth/api-key.service.js';
|
||||
import { SsoUserService } from '@app/unraid-api/auth/sso-user.service.js';
|
||||
import { AdminKeyService } from '@app/unraid-api/cli/admin-key.service.js';
|
||||
import { ApiReportService } from '@app/unraid-api/cli/api-report.service.js';
|
||||
import { CliInternalClientService } from '@app/unraid-api/cli/internal-client.service.js';
|
||||
@@ -28,12 +27,11 @@ import { UnraidFileModifierModule } from '@app/unraid-api/unraid-file-modifier/u
|
||||
LogService,
|
||||
PM2Service,
|
||||
ApiKeyService,
|
||||
SsoUserService,
|
||||
DependencyService,
|
||||
AdminKeyService,
|
||||
ApiReportService,
|
||||
CliInternalClientService,
|
||||
],
|
||||
exports: [ApiReportService, LogService, ApiKeyService, SsoUserService, CliInternalClientService],
|
||||
exports: [ApiReportService, LogService, ApiKeyService, CliInternalClientService],
|
||||
})
|
||||
export class CliServicesModule {}
|
||||
|
||||
74
api/src/unraid-api/cli/cli.module.spec.ts
Normal file
74
api/src/unraid-api/cli/cli.module.spec.ts
Normal file
@@ -0,0 +1,74 @@
|
||||
import { ConfigModule } from '@nestjs/config';
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
|
||||
import { INTERNAL_CLIENT_SERVICE_TOKEN } from '@unraid/shared';
|
||||
import { afterEach, beforeEach, describe, expect, it } from 'vitest';
|
||||
|
||||
import { AdminKeyService } from '@app/unraid-api/cli/admin-key.service.js';
|
||||
import { CliServicesModule } from '@app/unraid-api/cli/cli-services.module.js';
|
||||
import { CliInternalClientService } from '@app/unraid-api/cli/internal-client.service.js';
|
||||
import { InternalGraphQLClientFactory } from '@app/unraid-api/shared/internal-graphql-client.factory.js';
|
||||
|
||||
describe('CliServicesModule', () => {
|
||||
let module: TestingModule;
|
||||
|
||||
beforeEach(async () => {
|
||||
module = await Test.createTestingModule({
|
||||
imports: [CliServicesModule],
|
||||
}).compile();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await module?.close();
|
||||
});
|
||||
|
||||
it('should compile the module', () => {
|
||||
expect(module).toBeDefined();
|
||||
});
|
||||
|
||||
it('should provide CliInternalClientService', () => {
|
||||
const service = module.get(CliInternalClientService);
|
||||
expect(service).toBeDefined();
|
||||
expect(service).toBeInstanceOf(CliInternalClientService);
|
||||
});
|
||||
|
||||
it('should provide AdminKeyService', () => {
|
||||
const service = module.get(AdminKeyService);
|
||||
expect(service).toBeDefined();
|
||||
expect(service).toBeInstanceOf(AdminKeyService);
|
||||
});
|
||||
|
||||
it('should provide InternalGraphQLClientFactory via token', () => {
|
||||
const factory = module.get(INTERNAL_CLIENT_SERVICE_TOKEN);
|
||||
expect(factory).toBeDefined();
|
||||
expect(factory).toBeInstanceOf(InternalGraphQLClientFactory);
|
||||
});
|
||||
|
||||
describe('CliInternalClientService dependencies', () => {
|
||||
it('should have all required dependencies available', () => {
|
||||
// This test ensures that CliInternalClientService can be instantiated
|
||||
// with all its dependencies properly resolved
|
||||
const service = module.get(CliInternalClientService);
|
||||
expect(service).toBeDefined();
|
||||
|
||||
// Verify the service has its dependencies injected
|
||||
// The service should be able to create a client without errors
|
||||
expect(service.getClient).toBeDefined();
|
||||
expect(service.clearClient).toBeDefined();
|
||||
});
|
||||
|
||||
it('should resolve InternalGraphQLClientFactory dependency via token', () => {
|
||||
// Explicitly test that the factory is available in the module context via token
|
||||
const factory = module.get(INTERNAL_CLIENT_SERVICE_TOKEN);
|
||||
expect(factory).toBeDefined();
|
||||
expect(factory.createClient).toBeDefined();
|
||||
});
|
||||
|
||||
it('should resolve AdminKeyService dependency', () => {
|
||||
// Explicitly test that AdminKeyService is available in the module context
|
||||
const adminKeyService = module.get(AdminKeyService);
|
||||
expect(adminKeyService).toBeDefined();
|
||||
expect(adminKeyService.getOrCreateLocalAdminKey).toBeDefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -3,7 +3,6 @@ import { ConfigModule } from '@nestjs/config';
|
||||
|
||||
import { DependencyService } from '@app/unraid-api/app/dependency.service.js';
|
||||
import { ApiKeyService } from '@app/unraid-api/auth/api-key.service.js';
|
||||
import { SsoUserService } from '@app/unraid-api/auth/sso-user.service.js';
|
||||
import { AdminKeyService } from '@app/unraid-api/cli/admin-key.service.js';
|
||||
import { ApiReportService } from '@app/unraid-api/cli/api-report.service.js';
|
||||
import { AddApiKeyQuestionSet } from '@app/unraid-api/cli/apikey/add-api-key.questions.js';
|
||||
@@ -26,11 +25,6 @@ import { RemovePluginQuestionSet } from '@app/unraid-api/cli/plugins/remove-plug
|
||||
import { PM2Service } from '@app/unraid-api/cli/pm2.service.js';
|
||||
import { ReportCommand } from '@app/unraid-api/cli/report.command.js';
|
||||
import { RestartCommand } from '@app/unraid-api/cli/restart.command.js';
|
||||
import { AddSSOUserCommand } from '@app/unraid-api/cli/sso/add-sso-user.command.js';
|
||||
import { AddSSOUserQuestionSet } from '@app/unraid-api/cli/sso/add-sso-user.questions.js';
|
||||
import { ListSSOUserCommand } from '@app/unraid-api/cli/sso/list-sso-user.command.js';
|
||||
import { RemoveSSOUserCommand } from '@app/unraid-api/cli/sso/remove-sso-user.command.js';
|
||||
import { RemoveSSOUserQuestionSet } from '@app/unraid-api/cli/sso/remove-sso-user.questions.js';
|
||||
import { SSOCommand } from '@app/unraid-api/cli/sso/sso.command.js';
|
||||
import { ValidateTokenCommand } from '@app/unraid-api/cli/sso/validate-token.command.js';
|
||||
import { StartCommand } from '@app/unraid-api/cli/start.command.js';
|
||||
@@ -39,7 +33,6 @@ import { StopCommand } from '@app/unraid-api/cli/stop.command.js';
|
||||
import { SwitchEnvCommand } from '@app/unraid-api/cli/switch-env.command.js';
|
||||
import { VersionCommand } from '@app/unraid-api/cli/version.command.js';
|
||||
import { ApiConfigModule } from '@app/unraid-api/config/api-config.module.js';
|
||||
import { LegacyConfigModule } from '@app/unraid-api/config/legacy-config.module.js';
|
||||
import { GlobalDepsModule } from '@app/unraid-api/plugin/global-deps.module.js';
|
||||
import { PluginCliModule } from '@app/unraid-api/plugin/plugin.module.js';
|
||||
|
||||
@@ -56,12 +49,9 @@ const DEFAULT_COMMANDS = [
|
||||
StartCommand,
|
||||
StatusCommand,
|
||||
StopCommand,
|
||||
// SSO commands
|
||||
// SSO commands (validation only)
|
||||
SSOCommand,
|
||||
ValidateTokenCommand,
|
||||
AddSSOUserCommand,
|
||||
RemoveSSOUserCommand,
|
||||
ListSSOUserCommand,
|
||||
// Plugin commands
|
||||
PluginCommand,
|
||||
ListPluginCommand,
|
||||
@@ -72,15 +62,12 @@ const DEFAULT_COMMANDS = [
|
||||
const DEFAULT_PROVIDERS = [
|
||||
AddApiKeyQuestionSet,
|
||||
DeleteApiKeyQuestionSet,
|
||||
AddSSOUserQuestionSet,
|
||||
RemoveSSOUserQuestionSet,
|
||||
RemovePluginQuestionSet,
|
||||
DeveloperQuestions,
|
||||
DeveloperToolsService,
|
||||
LogService,
|
||||
PM2Service,
|
||||
ApiKeyService,
|
||||
SsoUserService,
|
||||
DependencyService,
|
||||
AdminKeyService,
|
||||
ApiReportService,
|
||||
|
||||
@@ -20,9 +20,10 @@ type Documents = {
|
||||
"\n mutation UpdateSandboxSettings($input: JSON!) {\n updateSettings(input: $input) {\n restartRequired\n values\n }\n }\n": typeof types.UpdateSandboxSettingsDocument,
|
||||
"\n query GetPlugins {\n plugins {\n name\n version\n hasApiModule\n hasCliModule\n }\n }\n": typeof types.GetPluginsDocument,
|
||||
"\n query GetSSOUsers {\n settings {\n api {\n ssoSubIds\n }\n }\n }\n": typeof types.GetSsoUsersDocument,
|
||||
"\n query SystemReport {\n info {\n id\n machineId\n system {\n manufacturer\n model\n version\n sku\n serial\n uuid\n }\n versions {\n unraid\n kernel\n openssl\n }\n }\n config {\n id\n valid\n error\n }\n server {\n id\n name\n }\n }\n": typeof types.SystemReportDocument,
|
||||
"\n query SystemReport {\n info {\n id\n machineId\n system {\n manufacturer\n model\n version\n sku\n serial\n uuid\n }\n versions {\n core {\n unraid\n kernel\n }\n packages {\n openssl\n }\n }\n }\n config {\n id\n valid\n error\n }\n server {\n id\n name\n }\n }\n": typeof types.SystemReportDocument,
|
||||
"\n query ConnectStatus {\n connect {\n id\n dynamicRemoteAccess {\n enabledType\n runningType\n error\n }\n }\n }\n": typeof types.ConnectStatusDocument,
|
||||
"\n query Services {\n services {\n id\n name\n online\n uptime {\n timestamp\n }\n version\n }\n }\n": typeof types.ServicesDocument,
|
||||
"\n query ValidateOidcSession($token: String!) {\n validateOidcSession(token: $token) {\n valid\n username\n }\n }\n": typeof types.ValidateOidcSessionDocument,
|
||||
};
|
||||
const documents: Documents = {
|
||||
"\n mutation AddPlugin($input: PluginManagementInput!) {\n addPlugin(input: $input)\n }\n": types.AddPluginDocument,
|
||||
@@ -31,9 +32,10 @@ const documents: Documents = {
|
||||
"\n mutation UpdateSandboxSettings($input: JSON!) {\n updateSettings(input: $input) {\n restartRequired\n values\n }\n }\n": types.UpdateSandboxSettingsDocument,
|
||||
"\n query GetPlugins {\n plugins {\n name\n version\n hasApiModule\n hasCliModule\n }\n }\n": types.GetPluginsDocument,
|
||||
"\n query GetSSOUsers {\n settings {\n api {\n ssoSubIds\n }\n }\n }\n": types.GetSsoUsersDocument,
|
||||
"\n query SystemReport {\n info {\n id\n machineId\n system {\n manufacturer\n model\n version\n sku\n serial\n uuid\n }\n versions {\n unraid\n kernel\n openssl\n }\n }\n config {\n id\n valid\n error\n }\n server {\n id\n name\n }\n }\n": types.SystemReportDocument,
|
||||
"\n query SystemReport {\n info {\n id\n machineId\n system {\n manufacturer\n model\n version\n sku\n serial\n uuid\n }\n versions {\n core {\n unraid\n kernel\n }\n packages {\n openssl\n }\n }\n }\n config {\n id\n valid\n error\n }\n server {\n id\n name\n }\n }\n": types.SystemReportDocument,
|
||||
"\n query ConnectStatus {\n connect {\n id\n dynamicRemoteAccess {\n enabledType\n runningType\n error\n }\n }\n }\n": types.ConnectStatusDocument,
|
||||
"\n query Services {\n services {\n id\n name\n online\n uptime {\n timestamp\n }\n version\n }\n }\n": types.ServicesDocument,
|
||||
"\n query ValidateOidcSession($token: String!) {\n validateOidcSession(token: $token) {\n valid\n username\n }\n }\n": types.ValidateOidcSessionDocument,
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -77,7 +79,7 @@ export function gql(source: "\n query GetSSOUsers {\n settings {\n
|
||||
/**
|
||||
* The gql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
|
||||
*/
|
||||
export function gql(source: "\n query SystemReport {\n info {\n id\n machineId\n system {\n manufacturer\n model\n version\n sku\n serial\n uuid\n }\n versions {\n unraid\n kernel\n openssl\n }\n }\n config {\n id\n valid\n error\n }\n server {\n id\n name\n }\n }\n"): (typeof documents)["\n query SystemReport {\n info {\n id\n machineId\n system {\n manufacturer\n model\n version\n sku\n serial\n uuid\n }\n versions {\n unraid\n kernel\n openssl\n }\n }\n config {\n id\n valid\n error\n }\n server {\n id\n name\n }\n }\n"];
|
||||
export function gql(source: "\n query SystemReport {\n info {\n id\n machineId\n system {\n manufacturer\n model\n version\n sku\n serial\n uuid\n }\n versions {\n core {\n unraid\n kernel\n }\n packages {\n openssl\n }\n }\n }\n config {\n id\n valid\n error\n }\n server {\n id\n name\n }\n }\n"): (typeof documents)["\n query SystemReport {\n info {\n id\n machineId\n system {\n manufacturer\n model\n version\n sku\n serial\n uuid\n }\n versions {\n core {\n unraid\n kernel\n }\n packages {\n openssl\n }\n }\n }\n config {\n id\n valid\n error\n }\n server {\n id\n name\n }\n }\n"];
|
||||
/**
|
||||
* The gql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
|
||||
*/
|
||||
@@ -86,6 +88,10 @@ export function gql(source: "\n query ConnectStatus {\n connect {\n
|
||||
* The gql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
|
||||
*/
|
||||
export function gql(source: "\n query Services {\n services {\n id\n name\n online\n uptime {\n timestamp\n }\n version\n }\n }\n"): (typeof documents)["\n query Services {\n services {\n id\n name\n online\n uptime {\n timestamp\n }\n version\n }\n }\n"];
|
||||
/**
|
||||
* The gql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
|
||||
*/
|
||||
export function gql(source: "\n query ValidateOidcSession($token: String!) {\n validateOidcSession(token: $token) {\n valid\n username\n }\n }\n"): (typeof documents)["\n query ValidateOidcSession($token: String!) {\n validateOidcSession(token: $token) {\n valid\n username\n }\n }\n"];
|
||||
|
||||
export function gql(source: string) {
|
||||
return (documents as any)[source] ?? {};
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
203
api/src/unraid-api/cli/internal-client.service.spec.ts
Normal file
203
api/src/unraid-api/cli/internal-client.service.spec.ts
Normal file
@@ -0,0 +1,203 @@
|
||||
import { ConfigModule, ConfigService } from '@nestjs/config';
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
|
||||
import type { InternalGraphQLClientFactory } from '@unraid/shared';
|
||||
import { ApolloClient } from '@apollo/client/core/index.js';
|
||||
import { INTERNAL_CLIENT_SERVICE_TOKEN } from '@unraid/shared';
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { AdminKeyService } from '@app/unraid-api/cli/admin-key.service.js';
|
||||
import { CliInternalClientService } from '@app/unraid-api/cli/internal-client.service.js';
|
||||
|
||||
describe('CliInternalClientService', () => {
|
||||
let service: CliInternalClientService;
|
||||
let clientFactory: InternalGraphQLClientFactory;
|
||||
let adminKeyService: AdminKeyService;
|
||||
let module: TestingModule;
|
||||
|
||||
const mockApolloClient = {
|
||||
query: vi.fn(),
|
||||
mutate: vi.fn(),
|
||||
stop: vi.fn(),
|
||||
};
|
||||
|
||||
beforeEach(async () => {
|
||||
module = await Test.createTestingModule({
|
||||
imports: [ConfigModule.forRoot()],
|
||||
providers: [
|
||||
CliInternalClientService,
|
||||
{
|
||||
provide: INTERNAL_CLIENT_SERVICE_TOKEN,
|
||||
useValue: {
|
||||
createClient: vi.fn().mockResolvedValue(mockApolloClient),
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: AdminKeyService,
|
||||
useValue: {
|
||||
getOrCreateLocalAdminKey: vi.fn().mockResolvedValue('test-admin-key'),
|
||||
},
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
service = module.get<CliInternalClientService>(CliInternalClientService);
|
||||
clientFactory = module.get<InternalGraphQLClientFactory>(INTERNAL_CLIENT_SERVICE_TOKEN);
|
||||
adminKeyService = module.get<AdminKeyService>(AdminKeyService);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await module?.close();
|
||||
});
|
||||
|
||||
it('should be defined', () => {
|
||||
expect(service).toBeDefined();
|
||||
});
|
||||
|
||||
describe('dependency injection', () => {
|
||||
it('should have InternalGraphQLClientFactory injected', () => {
|
||||
expect(clientFactory).toBeDefined();
|
||||
expect(clientFactory.createClient).toBeDefined();
|
||||
});
|
||||
|
||||
it('should have AdminKeyService injected', () => {
|
||||
expect(adminKeyService).toBeDefined();
|
||||
expect(adminKeyService.getOrCreateLocalAdminKey).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getClient', () => {
|
||||
it('should create a client with getApiKey function', async () => {
|
||||
const client = await service.getClient();
|
||||
|
||||
// The API key is now fetched lazily, not immediately
|
||||
expect(clientFactory.createClient).toHaveBeenCalledWith({
|
||||
getApiKey: expect.any(Function),
|
||||
enableSubscriptions: false,
|
||||
});
|
||||
|
||||
// Verify the getApiKey function works correctly when called
|
||||
const callArgs = vi.mocked(clientFactory.createClient).mock.calls[0][0];
|
||||
const apiKey = await callArgs.getApiKey();
|
||||
expect(apiKey).toBe('test-admin-key');
|
||||
expect(adminKeyService.getOrCreateLocalAdminKey).toHaveBeenCalled();
|
||||
|
||||
expect(client).toBe(mockApolloClient);
|
||||
});
|
||||
|
||||
it('should return cached client on subsequent calls', async () => {
|
||||
const client1 = await service.getClient();
|
||||
const client2 = await service.getClient();
|
||||
|
||||
expect(client1).toBe(client2);
|
||||
expect(clientFactory.createClient).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should handle errors when getting admin key', async () => {
|
||||
const error = new Error('Failed to get admin key');
|
||||
vi.mocked(adminKeyService.getOrCreateLocalAdminKey).mockRejectedValueOnce(error);
|
||||
|
||||
// The client creation will succeed, but the API key error happens later
|
||||
const client = await service.getClient();
|
||||
expect(client).toBe(mockApolloClient);
|
||||
|
||||
// Now test that the getApiKey function throws the expected error
|
||||
const callArgs = vi.mocked(clientFactory.createClient).mock.calls[0][0];
|
||||
await expect(callArgs.getApiKey()).rejects.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('clearClient', () => {
|
||||
it('should stop and clear the client', async () => {
|
||||
// First create a client
|
||||
await service.getClient();
|
||||
|
||||
// Clear the client
|
||||
service.clearClient();
|
||||
|
||||
expect(mockApolloClient.stop).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle clearing when no client exists', () => {
|
||||
// Should not throw when clearing a non-existent client
|
||||
expect(() => service.clearClient()).not.toThrow();
|
||||
});
|
||||
|
||||
it('should create a new client after clearing', async () => {
|
||||
// Create initial client
|
||||
await service.getClient();
|
||||
|
||||
// Clear it
|
||||
service.clearClient();
|
||||
|
||||
// Create new client
|
||||
await service.getClient();
|
||||
|
||||
// Should have created client twice
|
||||
expect(clientFactory.createClient).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('race condition protection', () => {
|
||||
it('should prevent stale client resurrection when clearClient() is called during creation', async () => {
|
||||
let resolveClientCreation!: (client: any) => void;
|
||||
|
||||
// Mock createClient to return a controllable promise
|
||||
const clientCreationPromise = new Promise<any>((resolve) => {
|
||||
resolveClientCreation = resolve;
|
||||
});
|
||||
vi.mocked(clientFactory.createClient).mockReturnValueOnce(clientCreationPromise);
|
||||
|
||||
// Start client creation (but don't await yet)
|
||||
const getClientPromise = service.getClient();
|
||||
|
||||
// Clear the client while creation is in progress
|
||||
service.clearClient();
|
||||
|
||||
// Now complete the client creation
|
||||
resolveClientCreation(mockApolloClient);
|
||||
|
||||
// Wait for getClient to complete
|
||||
const client = await getClientPromise;
|
||||
|
||||
// The client should be returned from getClient
|
||||
expect(client).toBe(mockApolloClient);
|
||||
|
||||
// But subsequent getClient calls should create a new client
|
||||
// because the race condition protection prevented assignment
|
||||
await service.getClient();
|
||||
|
||||
// Should have created a second client, proving the first wasn't assigned
|
||||
expect(clientFactory.createClient).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
it('should handle concurrent getClient calls during race condition', async () => {
|
||||
let resolveClientCreation!: (client: any) => void;
|
||||
|
||||
// Mock createClient to return a controllable promise
|
||||
const clientCreationPromise = new Promise<any>((resolve) => {
|
||||
resolveClientCreation = resolve;
|
||||
});
|
||||
vi.mocked(clientFactory.createClient).mockReturnValueOnce(clientCreationPromise);
|
||||
|
||||
// Start multiple concurrent client creation calls
|
||||
const getClientPromise1 = service.getClient();
|
||||
const getClientPromise2 = service.getClient(); // Should wait for first one
|
||||
|
||||
// Clear the client while creation is in progress
|
||||
service.clearClient();
|
||||
|
||||
// Complete the client creation
|
||||
resolveClientCreation(mockApolloClient);
|
||||
|
||||
// Both calls should resolve with the same client
|
||||
const [client1, client2] = await Promise.all([getClientPromise1, getClientPromise2]);
|
||||
expect(client1).toBe(mockApolloClient);
|
||||
expect(client2).toBe(mockApolloClient);
|
||||
|
||||
// But the client should not be cached due to race condition protection
|
||||
await service.getClient();
|
||||
expect(clientFactory.createClient).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,9 +1,8 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import { Inject, Injectable, Logger } from '@nestjs/common';
|
||||
|
||||
import { ApolloClient, InMemoryCache, NormalizedCacheObject } from '@apollo/client/core/index.js';
|
||||
import { onError } from '@apollo/client/link/error/index.js';
|
||||
import { HttpLink } from '@apollo/client/link/http/index.js';
|
||||
import type { InternalGraphQLClientFactory } from '@unraid/shared';
|
||||
import { ApolloClient, NormalizedCacheObject } from '@apollo/client/core/index.js';
|
||||
import { INTERNAL_CLIENT_SERVICE_TOKEN } from '@unraid/shared';
|
||||
|
||||
import { AdminKeyService } from '@app/unraid-api/cli/admin-key.service.js';
|
||||
|
||||
@@ -11,51 +10,20 @@ import { AdminKeyService } from '@app/unraid-api/cli/admin-key.service.js';
|
||||
* Internal GraphQL client for CLI commands.
|
||||
*
|
||||
* This service creates an Apollo client that queries the local API server
|
||||
* through IPC, providing access to the same data that external clients would get
|
||||
* but without needing to parse config files directly.
|
||||
* with admin privileges for CLI operations.
|
||||
*/
|
||||
@Injectable()
|
||||
export class CliInternalClientService {
|
||||
private readonly logger = new Logger(CliInternalClientService.name);
|
||||
private client: ApolloClient<NormalizedCacheObject> | null = null;
|
||||
private creatingClient: Promise<ApolloClient<NormalizedCacheObject>> | null = null;
|
||||
|
||||
constructor(
|
||||
private readonly configService: ConfigService,
|
||||
@Inject(INTERNAL_CLIENT_SERVICE_TOKEN)
|
||||
private readonly clientFactory: InternalGraphQLClientFactory,
|
||||
private readonly adminKeyService: AdminKeyService
|
||||
) {}
|
||||
|
||||
private PROD_NGINX_PORT = 80;
|
||||
|
||||
private getNginxPort() {
|
||||
return Number(this.configService.get('store.emhttp.nginx.httpPort', this.PROD_NGINX_PORT));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the port override from the environment variable PORT. e.g. during development.
|
||||
* If the port is a socket port, return undefined.
|
||||
*/
|
||||
private getNonSocketPortOverride() {
|
||||
const port = this.configService.get<string | number | undefined>('PORT');
|
||||
if (!port || port.toString().includes('.sock')) {
|
||||
return undefined;
|
||||
}
|
||||
return Number(port);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the API address for HTTP requests.
|
||||
*/
|
||||
private getApiAddress(port = this.getNginxPort()) {
|
||||
const portOverride = this.getNonSocketPortOverride();
|
||||
if (portOverride) {
|
||||
return `http://127.0.0.1:${portOverride}/graphql`;
|
||||
}
|
||||
if (port !== this.PROD_NGINX_PORT) {
|
||||
return `http://127.0.0.1:${port}/graphql`;
|
||||
}
|
||||
return `http://127.0.0.1/graphql`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the admin API key using the AdminKeyService.
|
||||
* This ensures the key exists and is available for CLI operations.
|
||||
@@ -71,49 +39,59 @@ export class CliInternalClientService {
|
||||
}
|
||||
}
|
||||
|
||||
private async createApiClient(): Promise<ApolloClient<NormalizedCacheObject>> {
|
||||
const httpUri = this.getApiAddress();
|
||||
const apiKey = await this.getLocalApiKey();
|
||||
|
||||
this.logger.debug('Internal GraphQL URL: %s', httpUri);
|
||||
|
||||
const httpLink = new HttpLink({
|
||||
uri: httpUri,
|
||||
fetch,
|
||||
headers: {
|
||||
Origin: '/var/run/unraid-cli.sock',
|
||||
'x-api-key': apiKey,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
|
||||
const errorLink = onError(({ networkError }) => {
|
||||
if (networkError) {
|
||||
this.logger.warn('[GRAPHQL-CLIENT] NETWORK ERROR ENCOUNTERED %o', networkError);
|
||||
}
|
||||
});
|
||||
|
||||
return new ApolloClient({
|
||||
defaultOptions: {
|
||||
query: {
|
||||
fetchPolicy: 'no-cache',
|
||||
},
|
||||
},
|
||||
cache: new InMemoryCache(),
|
||||
link: errorLink.concat(httpLink),
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the default CLI client with admin API key.
|
||||
* This is for CLI commands that need admin access.
|
||||
*/
|
||||
public async getClient(): Promise<ApolloClient<NormalizedCacheObject>> {
|
||||
// If client already exists, return it
|
||||
if (this.client) {
|
||||
return this.client;
|
||||
}
|
||||
this.client = await this.createApiClient();
|
||||
return this.client;
|
||||
|
||||
// If another call is already creating the client, wait for it
|
||||
if (this.creatingClient) {
|
||||
return await this.creatingClient;
|
||||
}
|
||||
|
||||
// Start creating the client with race condition protection
|
||||
let creationPromise!: Promise<ApolloClient<NormalizedCacheObject>>;
|
||||
// eslint-disable-next-line prefer-const
|
||||
creationPromise = (async () => {
|
||||
try {
|
||||
const client = await this.clientFactory.createClient({
|
||||
getApiKey: () => this.getLocalApiKey(),
|
||||
enableSubscriptions: false, // CLI doesn't need subscriptions
|
||||
});
|
||||
|
||||
// awaiting *before* checking this.creatingClient is important!
|
||||
// by yielding to the event loop, it ensures
|
||||
// `this.creatingClient = creationPromise;` is executed before the next check.
|
||||
|
||||
// This prevents race conditions where the client is assigned to the wrong instance.
|
||||
// Only assign client if this creation is still current
|
||||
if (this.creatingClient === creationPromise) {
|
||||
this.client = client;
|
||||
this.logger.debug('Created CLI internal GraphQL client with admin privileges');
|
||||
}
|
||||
|
||||
return client;
|
||||
} finally {
|
||||
// Only clear if this creation is still current
|
||||
if (this.creatingClient === creationPromise) {
|
||||
this.creatingClient = null;
|
||||
}
|
||||
}
|
||||
})();
|
||||
|
||||
this.creatingClient = creationPromise;
|
||||
return await creationPromise;
|
||||
}
|
||||
|
||||
public clearClient() {
|
||||
// Stop the Apollo client to terminate any active processes
|
||||
this.client?.stop();
|
||||
this.client = null;
|
||||
this.creatingClient = null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,9 +14,13 @@ export const SYSTEM_REPORT_QUERY = gql(`
|
||||
uuid
|
||||
}
|
||||
versions {
|
||||
unraid
|
||||
kernel
|
||||
openssl
|
||||
core {
|
||||
unraid
|
||||
kernel
|
||||
}
|
||||
packages {
|
||||
openssl
|
||||
}
|
||||
}
|
||||
}
|
||||
config {
|
||||
|
||||
@@ -0,0 +1,10 @@
|
||||
import { gql } from '@app/unraid-api/cli/generated/index.js';
|
||||
|
||||
export const VALIDATE_OIDC_SESSION_QUERY = gql(`
|
||||
query ValidateOidcSession($token: String!) {
|
||||
validateOidcSession(token: $token) {
|
||||
valid
|
||||
username
|
||||
}
|
||||
}
|
||||
`);
|
||||
@@ -1,104 +0,0 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
|
||||
import { CommandRunner, InquirerService, Option, SubCommand } from 'nest-commander';
|
||||
import { v4 } from 'uuid';
|
||||
|
||||
import { CliInternalClientService } from '@app/unraid-api/cli/internal-client.service.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
import { UPDATE_SSO_USERS_MUTATION } from '@app/unraid-api/cli/mutations/update-sso-users.mutation.js';
|
||||
import { SSO_USERS_QUERY } from '@app/unraid-api/cli/queries/sso-users.query.js';
|
||||
import { RestartCommand } from '@app/unraid-api/cli/restart.command.js';
|
||||
import { AddSSOUserQuestionSet } from '@app/unraid-api/cli/sso/add-sso-user.questions.js';
|
||||
|
||||
interface AddSSOUserCommandOptions {
|
||||
disclaimer: string;
|
||||
username: string;
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
@SubCommand({
|
||||
name: 'add-user',
|
||||
aliases: ['add', 'a'],
|
||||
description: 'Add a user for SSO',
|
||||
})
|
||||
export class AddSSOUserCommand extends CommandRunner {
|
||||
constructor(
|
||||
private readonly logger: LogService,
|
||||
private readonly inquirerService: InquirerService,
|
||||
private readonly restartCommand: RestartCommand,
|
||||
private readonly internalClient: CliInternalClientService
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
async run(_input: string[], options?: AddSSOUserCommandOptions): Promise<void> {
|
||||
try {
|
||||
options = await this.inquirerService.prompt(AddSSOUserQuestionSet.name, options);
|
||||
if (options.disclaimer === 'y' && options.username) {
|
||||
const client = await this.internalClient.getClient();
|
||||
|
||||
const result = await client.query({
|
||||
query: SSO_USERS_QUERY,
|
||||
});
|
||||
|
||||
const currentUsers = result.data?.settings?.api?.ssoSubIds || [];
|
||||
|
||||
if (currentUsers.includes(options.username)) {
|
||||
this.logger.error(`User ${options.username} already exists in SSO users`);
|
||||
return;
|
||||
}
|
||||
|
||||
const updatedUsers = [...currentUsers, options.username];
|
||||
|
||||
const mutationResult = await client.mutate({
|
||||
mutation: UPDATE_SSO_USERS_MUTATION,
|
||||
variables: {
|
||||
input: {
|
||||
api: {
|
||||
ssoSubIds: updatedUsers,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
this.logger.info(`User added: ${options.username}`);
|
||||
|
||||
// Check if restart is required based on mutation response
|
||||
if (mutationResult.data?.updateSettings?.restartRequired) {
|
||||
this.logger.info('Restarting the API');
|
||||
await this.restartCommand.run();
|
||||
}
|
||||
}
|
||||
} catch (e: unknown) {
|
||||
this.logger.error('Error adding user:', e);
|
||||
}
|
||||
}
|
||||
|
||||
@Option({
|
||||
flags: '--username <username>',
|
||||
description: 'Cognito Username',
|
||||
})
|
||||
parseUsername(input: string) {
|
||||
if (
|
||||
!/^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/.test(input)
|
||||
) {
|
||||
throw new Error(`Username must be in the format of a UUID (e.g., ${v4()}}\n`);
|
||||
}
|
||||
|
||||
return input;
|
||||
}
|
||||
|
||||
@Option({
|
||||
flags: '--disclaimer <disclaimer>',
|
||||
description: 'Disclaimer (y/n)',
|
||||
})
|
||||
parseDisclaimer(input: string) {
|
||||
if (!input || !['y', 'n'].includes(input.toLowerCase())) {
|
||||
throw new Error('Please answer the diclaimer with (y/n)\n');
|
||||
}
|
||||
if (input.toLowerCase() === 'n') {
|
||||
process.exit(1);
|
||||
}
|
||||
return input;
|
||||
}
|
||||
}
|
||||
@@ -1,56 +0,0 @@
|
||||
import { Question, QuestionSet } from 'nest-commander';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
|
||||
@QuestionSet({ name: 'add-user' })
|
||||
export class AddSSOUserQuestionSet {
|
||||
static name = 'add-user';
|
||||
|
||||
@Question({
|
||||
message: `Enabling Single Sign-On (SSO) will simplify authentication by centralizing access to your Unraid server. However, this comes with certain security considerations: if your SSO account is compromised, unauthorized access to your server could occur.
|
||||
|
||||
Please note: your existing username and password will continue to work alongside SSO. We recommend using 2FA on your Unraid.net account or a single sign-on provider to enhance security.
|
||||
|
||||
Are you sure you want to proceed with adding a user for SSO? (y/n)
|
||||
`,
|
||||
name: 'disclaimer',
|
||||
validate(input) {
|
||||
if (!input) {
|
||||
return 'Please provide a response';
|
||||
}
|
||||
if (!['y', 'n'].includes(input.toLowerCase())) {
|
||||
return 'Please provide a valid response';
|
||||
}
|
||||
if (input.toLowerCase() === 'n') {
|
||||
process.exit(1);
|
||||
}
|
||||
return true;
|
||||
},
|
||||
})
|
||||
parseDisclaimer(val: string) {
|
||||
return val;
|
||||
}
|
||||
|
||||
@Question({
|
||||
message:
|
||||
'What is your Unique Unraid Account ID? Find it in your Unraid Account at https://account.unraid.net/settings\n',
|
||||
name: 'username',
|
||||
validate(input) {
|
||||
if (!input) {
|
||||
return 'Username is required';
|
||||
}
|
||||
const randomUUID = uuidv4();
|
||||
|
||||
if (
|
||||
!/^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/.test(
|
||||
input
|
||||
)
|
||||
) {
|
||||
return `Username must be in the format of a UUID (e.g., ${randomUUID}).`;
|
||||
}
|
||||
return true;
|
||||
},
|
||||
})
|
||||
parseName(val: string) {
|
||||
return val;
|
||||
}
|
||||
}
|
||||
@@ -1,38 +0,0 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
|
||||
import { CommandRunner, SubCommand } from 'nest-commander';
|
||||
|
||||
import { CliInternalClientService } from '@app/unraid-api/cli/internal-client.service.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
import { SSO_USERS_QUERY } from '@app/unraid-api/cli/queries/sso-users.query.js';
|
||||
|
||||
@Injectable()
|
||||
@SubCommand({
|
||||
name: 'list-users',
|
||||
aliases: ['list', 'l'],
|
||||
description: 'List all users for SSO',
|
||||
})
|
||||
export class ListSSOUserCommand extends CommandRunner {
|
||||
constructor(
|
||||
private readonly logger: LogService,
|
||||
private readonly internalClient: CliInternalClientService
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
async run(_input: string[]): Promise<void> {
|
||||
const client = await this.internalClient.getClient();
|
||||
|
||||
const result = await client.query({
|
||||
query: SSO_USERS_QUERY,
|
||||
});
|
||||
|
||||
const users = result.data?.settings?.api?.ssoSubIds || [];
|
||||
|
||||
if (users.length === 0) {
|
||||
this.logger.info('No SSO users found');
|
||||
} else {
|
||||
this.logger.info(users.join('\n'));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,112 +0,0 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
|
||||
import { CommandRunner, InquirerService, Option, SubCommand } from 'nest-commander';
|
||||
|
||||
import { CliInternalClientService } from '@app/unraid-api/cli/internal-client.service.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
import { UPDATE_SSO_USERS_MUTATION } from '@app/unraid-api/cli/mutations/update-sso-users.mutation.js';
|
||||
import { SSO_USERS_QUERY } from '@app/unraid-api/cli/queries/sso-users.query.js';
|
||||
import { RestartCommand } from '@app/unraid-api/cli/restart.command.js';
|
||||
import {
|
||||
NoSSOUsersFoundError,
|
||||
RemoveSSOUserQuestionSet,
|
||||
} from '@app/unraid-api/cli/sso/remove-sso-user.questions.js';
|
||||
|
||||
interface RemoveSSOUserCommandOptions {
|
||||
username: string;
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
@SubCommand({
|
||||
name: 'remove-user',
|
||||
aliases: ['remove', 'r'],
|
||||
description: 'Remove a user (or all users) from SSO',
|
||||
})
|
||||
export class RemoveSSOUserCommand extends CommandRunner {
|
||||
constructor(
|
||||
private readonly logger: LogService,
|
||||
private readonly inquirerService: InquirerService,
|
||||
private readonly restartCommand: RestartCommand,
|
||||
private readonly internalClient: CliInternalClientService
|
||||
) {
|
||||
super();
|
||||
}
|
||||
public async run(_input: string[], options?: RemoveSSOUserCommandOptions): Promise<void> {
|
||||
try {
|
||||
options = await this.inquirerService.prompt(RemoveSSOUserQuestionSet.name, options);
|
||||
} catch (error) {
|
||||
if (error instanceof NoSSOUsersFoundError) {
|
||||
this.logger.error(error.message);
|
||||
process.exit(0);
|
||||
} else if (error instanceof Error) {
|
||||
this.logger.error('Failed to fetch SSO users: %s', error.message);
|
||||
process.exit(1);
|
||||
} else {
|
||||
this.logger.error('An unexpected error occurred');
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
const client = await this.internalClient.getClient();
|
||||
|
||||
const result = await client.query({
|
||||
query: SSO_USERS_QUERY,
|
||||
});
|
||||
|
||||
const currentUsers = result.data?.settings?.api?.ssoSubIds || [];
|
||||
|
||||
if (options.username === 'all') {
|
||||
await client.mutate({
|
||||
mutation: UPDATE_SSO_USERS_MUTATION,
|
||||
variables: {
|
||||
input: {
|
||||
api: {
|
||||
ssoSubIds: [],
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
this.logger.info('All users removed from SSO');
|
||||
} else {
|
||||
const updatedUsers = currentUsers.filter((id: string) => id !== options.username);
|
||||
|
||||
if (updatedUsers.length === currentUsers.length) {
|
||||
this.logger.error(`User ${options.username} not found in SSO users`);
|
||||
return;
|
||||
}
|
||||
|
||||
await client.mutate({
|
||||
mutation: UPDATE_SSO_USERS_MUTATION,
|
||||
variables: {
|
||||
input: {
|
||||
api: {
|
||||
ssoSubIds: updatedUsers,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
this.logger.info('User removed: ' + options.username);
|
||||
}
|
||||
this.logger.info('Restarting the API');
|
||||
await this.restartCommand.run();
|
||||
}
|
||||
|
||||
@Option({
|
||||
name: 'username',
|
||||
flags: '--username <username>',
|
||||
description: 'Cognito Username',
|
||||
})
|
||||
parseUsername(input: string) {
|
||||
if (!input) {
|
||||
throw new Error('Username is required\n');
|
||||
}
|
||||
|
||||
if (
|
||||
!/^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/.test(input)
|
||||
) {
|
||||
throw new Error('Username must be in the format of a UUID (e.g., ${v4()}}\n');
|
||||
}
|
||||
|
||||
return input;
|
||||
}
|
||||
}
|
||||
@@ -1,44 +0,0 @@
|
||||
import { ChoicesFor, Question, QuestionSet } from 'nest-commander';
|
||||
|
||||
import { CliInternalClientService } from '@app/unraid-api/cli/internal-client.service.js';
|
||||
import { SSO_USERS_QUERY } from '@app/unraid-api/cli/queries/sso-users.query.js';
|
||||
|
||||
export class NoSSOUsersFoundError extends Error {
|
||||
constructor() {
|
||||
super('No SSO Users Found');
|
||||
this.name = 'NoSSOUsersFoundError';
|
||||
}
|
||||
}
|
||||
|
||||
@QuestionSet({ name: 'remove-user' })
|
||||
export class RemoveSSOUserQuestionSet {
|
||||
constructor(private readonly internalClient: CliInternalClientService) {}
|
||||
static name = 'remove-user';
|
||||
|
||||
@Question({
|
||||
message: `Please select from the following list of users to remove from SSO, or enter all to remove all users from SSO.\n`,
|
||||
name: 'username',
|
||||
type: 'list',
|
||||
})
|
||||
parseName(val: string) {
|
||||
return val;
|
||||
}
|
||||
|
||||
@ChoicesFor({ name: 'username' })
|
||||
async choicesForUsername() {
|
||||
const client = await this.internalClient.getClient();
|
||||
|
||||
const result = await client.query({
|
||||
query: SSO_USERS_QUERY,
|
||||
});
|
||||
|
||||
const users = result.data?.settings?.api?.ssoSubIds || [];
|
||||
|
||||
if (users.length === 0) {
|
||||
throw new NoSSOUsersFoundError();
|
||||
}
|
||||
|
||||
users.push('all');
|
||||
return users;
|
||||
}
|
||||
}
|
||||
@@ -3,16 +3,13 @@ import { Injectable } from '@nestjs/common';
|
||||
import { Command, CommandRunner } from 'nest-commander';
|
||||
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
import { AddSSOUserCommand } from '@app/unraid-api/cli/sso/add-sso-user.command.js';
|
||||
import { ListSSOUserCommand } from '@app/unraid-api/cli/sso/list-sso-user.command.js';
|
||||
import { RemoveSSOUserCommand } from '@app/unraid-api/cli/sso/remove-sso-user.command.js';
|
||||
import { ValidateTokenCommand } from '@app/unraid-api/cli/sso/validate-token.command.js';
|
||||
|
||||
@Injectable()
|
||||
@Command({
|
||||
name: 'sso',
|
||||
description: 'Main Command to Configure / Validate SSO Tokens',
|
||||
subCommands: [ValidateTokenCommand, AddSSOUserCommand, RemoveSSOUserCommand, ListSSOUserCommand],
|
||||
description: 'SSO Token Validation Command',
|
||||
subCommands: [ValidateTokenCommand],
|
||||
})
|
||||
export class SSOCommand extends CommandRunner {
|
||||
constructor(private readonly logger: LogService) {
|
||||
@@ -20,7 +17,15 @@ export class SSOCommand extends CommandRunner {
|
||||
}
|
||||
|
||||
async run(): Promise<void> {
|
||||
this.logger.info('Please provide a subcommand or use --help for more information');
|
||||
this.logger.info('SSO Token Validation Command');
|
||||
this.logger.info('');
|
||||
this.logger.info('To configure SSO providers and authorization rules:');
|
||||
this.logger.info(' Go to Settings -> Management Access in the WebGUI');
|
||||
this.logger.info('');
|
||||
this.logger.info('Available subcommands:');
|
||||
this.logger.info(' validate-token <token> - Validate an SSO session token');
|
||||
this.logger.info('');
|
||||
this.logger.info('Use --help for more information');
|
||||
process.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,8 @@
|
||||
import type { JWTPayload } from 'jose';
|
||||
import { createLocalJWKSet, createRemoteJWKSet, jwtVerify } from 'jose';
|
||||
import { CommandRunner, SubCommand } from 'nest-commander';
|
||||
|
||||
import { JWKS_LOCAL_PAYLOAD, JWKS_REMOTE_LINK } from '@app/consts.js';
|
||||
import { CliInternalClientService } from '@app/unraid-api/cli/internal-client.service.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
import { SSO_USERS_QUERY } from '@app/unraid-api/cli/queries/sso-users.query.js';
|
||||
import { VALIDATE_OIDC_SESSION_QUERY } from '@app/unraid-api/cli/queries/validate-oidc-session.query.js';
|
||||
|
||||
@SubCommand({
|
||||
name: 'validate-token',
|
||||
@@ -14,15 +11,11 @@ import { SSO_USERS_QUERY } from '@app/unraid-api/cli/queries/sso-users.query.js'
|
||||
arguments: '<token>',
|
||||
})
|
||||
export class ValidateTokenCommand extends CommandRunner {
|
||||
JWKSOffline: ReturnType<typeof createLocalJWKSet>;
|
||||
JWKSOnline: ReturnType<typeof createRemoteJWKSet>;
|
||||
constructor(
|
||||
private readonly logger: LogService,
|
||||
private readonly internalClient: CliInternalClientService
|
||||
) {
|
||||
super();
|
||||
this.JWKSOffline = createLocalJWKSet(JWKS_LOCAL_PAYLOAD);
|
||||
this.JWKSOnline = createRemoteJWKSet(new URL(JWKS_REMOTE_LINK));
|
||||
}
|
||||
|
||||
private createErrorAndExit = (errorMessage: string) => {
|
||||
@@ -46,68 +39,40 @@ export class ValidateTokenCommand extends CommandRunner {
|
||||
this.createErrorAndExit('Invalid token provided');
|
||||
}
|
||||
|
||||
if (!/^[A-Za-z0-9-_]+\.[A-Za-z0-9-_]+\.[A-Za-z0-9-_]+$/.test(token)) {
|
||||
this.createErrorAndExit('Token format is invalid');
|
||||
}
|
||||
// Always validate as OIDC token
|
||||
await this.validateOidcToken(token);
|
||||
}
|
||||
|
||||
let caughtError: null | unknown = null;
|
||||
let tokenPayload: null | JWTPayload = null;
|
||||
private async validateOidcToken(token: string): Promise<void> {
|
||||
try {
|
||||
// this.logger.debug('Attempting to validate token with local key');
|
||||
tokenPayload = (await jwtVerify(token, this.JWKSOffline)).payload;
|
||||
} catch (error: unknown) {
|
||||
try {
|
||||
// this.logger.debug('Local validation failed for key, trying remote validation');
|
||||
tokenPayload = (await jwtVerify(token, this.JWKSOnline)).payload;
|
||||
} catch (error: unknown) {
|
||||
caughtError = error;
|
||||
}
|
||||
}
|
||||
|
||||
if (caughtError) {
|
||||
if (caughtError instanceof Error) {
|
||||
this.createErrorAndExit(`Caught error validating jwt token: ${caughtError.message}`);
|
||||
} else {
|
||||
this.createErrorAndExit('Caught unknown error validating jwt token');
|
||||
}
|
||||
}
|
||||
|
||||
if (tokenPayload === null) {
|
||||
this.createErrorAndExit('No data in JWT to use for user validation');
|
||||
}
|
||||
|
||||
const username = tokenPayload?.sub;
|
||||
|
||||
if (!username) {
|
||||
return this.createErrorAndExit('No ID found in token');
|
||||
}
|
||||
const client = await this.internalClient.getClient();
|
||||
|
||||
let result;
|
||||
try {
|
||||
result = await client.query({
|
||||
query: SSO_USERS_QUERY,
|
||||
const client = await this.internalClient.getClient();
|
||||
const { data, errors } = await client.query({
|
||||
query: VALIDATE_OIDC_SESSION_QUERY,
|
||||
variables: { token },
|
||||
});
|
||||
|
||||
if (errors?.length) {
|
||||
const errorMessages = errors.map((e) => e.message).join(', ');
|
||||
this.createErrorAndExit(`GraphQL errors: ${errorMessages}`);
|
||||
}
|
||||
|
||||
const validation = data?.validateOidcSession;
|
||||
|
||||
if (validation?.valid) {
|
||||
this.logger.always(
|
||||
JSON.stringify({
|
||||
error: null,
|
||||
valid: true,
|
||||
username: validation.username || 'root',
|
||||
})
|
||||
);
|
||||
process.exit(0);
|
||||
} else {
|
||||
this.createErrorAndExit('Invalid OIDC session token');
|
||||
}
|
||||
} catch (error) {
|
||||
this.createErrorAndExit('Failed to query SSO users');
|
||||
}
|
||||
|
||||
if (result.errors && result.errors.length > 0) {
|
||||
this.createErrorAndExit('Failed to retrieve SSO configuration');
|
||||
}
|
||||
|
||||
const ssoUsers = result.data?.settings?.api?.ssoSubIds || [];
|
||||
|
||||
if (ssoUsers.length === 0) {
|
||||
this.createErrorAndExit(
|
||||
'No local user token set to compare to - please set any valid SSO IDs you would like to sign in with'
|
||||
);
|
||||
}
|
||||
if (ssoUsers.includes(username)) {
|
||||
this.logger.always(JSON.stringify({ error: null, valid: true, username }));
|
||||
process.exit(0);
|
||||
} else {
|
||||
this.createErrorAndExit('Username on token does not match');
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
this.createErrorAndExit(`Failed to validate OIDC session: ${errorMessage}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ import { LogRotateService } from '@app/unraid-api/cron/log-rotate.service.js';
|
||||
import { WriteFlashFileService } from '@app/unraid-api/cron/write-flash-file.service.js';
|
||||
|
||||
@Module({
|
||||
imports: [ScheduleModule.forRoot()],
|
||||
imports: [],
|
||||
providers: [WriteFlashFileService, LogRotateService],
|
||||
})
|
||||
export class CronModule {}
|
||||
|
||||
@@ -12,6 +12,7 @@ import { NoUnusedVariablesRule } from 'graphql';
|
||||
|
||||
import { ENVIRONMENT } from '@app/environment.js';
|
||||
import { ApiConfigModule } from '@app/unraid-api/config/api-config.module.js';
|
||||
import { createDynamicIntrospectionPlugin } from '@app/unraid-api/graph/introspection-plugin.js';
|
||||
import { ResolversModule } from '@app/unraid-api/graph/resolvers/resolvers.module.js';
|
||||
import { createSandboxPlugin } from '@app/unraid-api/graph/sandbox-plugin.js';
|
||||
import { GlobalDepsModule } from '@app/unraid-api/plugin/global-deps.module.js';
|
||||
@@ -34,7 +35,7 @@ import { PluginModule } from '@app/unraid-api/plugin/plugin.module.js';
|
||||
path: './generated-schema.graphql',
|
||||
}
|
||||
: true,
|
||||
introspection: isSandboxEnabled(),
|
||||
introspection: true,
|
||||
playground: false, // we handle this in the sandbox plugin
|
||||
context: async ({ req, connectionParams, extra }) => {
|
||||
return {
|
||||
@@ -43,7 +44,10 @@ import { PluginModule } from '@app/unraid-api/plugin/plugin.module.js';
|
||||
extra,
|
||||
};
|
||||
},
|
||||
plugins: [createSandboxPlugin(isSandboxEnabled)] as any[],
|
||||
plugins: [
|
||||
createDynamicIntrospectionPlugin(isSandboxEnabled),
|
||||
createSandboxPlugin(),
|
||||
] as any[],
|
||||
subscriptions: {
|
||||
'graphql-ws': {
|
||||
path: '/graphql',
|
||||
|
||||
271
api/src/unraid-api/graph/introspection-plugin.spec.ts
Normal file
271
api/src/unraid-api/graph/introspection-plugin.spec.ts
Normal file
@@ -0,0 +1,271 @@
|
||||
import { describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { createDynamicIntrospectionPlugin } from '@app/unraid-api/graph/introspection-plugin.js';
|
||||
|
||||
describe('Dynamic Introspection Plugin', () => {
|
||||
const mockResponse = () => ({
|
||||
body: null as any,
|
||||
http: {
|
||||
status: 200,
|
||||
},
|
||||
});
|
||||
|
||||
const runPlugin = async (
|
||||
query: string | undefined,
|
||||
operationName: string | undefined,
|
||||
sandboxEnabled: boolean
|
||||
) => {
|
||||
const isSandboxEnabled = vi.fn().mockReturnValue(sandboxEnabled);
|
||||
const plugin = createDynamicIntrospectionPlugin(isSandboxEnabled);
|
||||
|
||||
const response = mockResponse();
|
||||
const requestContext = {
|
||||
request: {
|
||||
query,
|
||||
operationName,
|
||||
},
|
||||
response,
|
||||
} as any;
|
||||
|
||||
const requestListener = await (plugin as any).requestDidStart();
|
||||
await requestListener.willSendResponse(requestContext);
|
||||
|
||||
return response;
|
||||
};
|
||||
|
||||
describe('when sandbox is enabled', () => {
|
||||
it('should allow introspection query with IntrospectionQuery operation name', async () => {
|
||||
const response = await runPlugin(
|
||||
'query IntrospectionQuery { __schema { queryType { name } } }',
|
||||
'IntrospectionQuery',
|
||||
true
|
||||
);
|
||||
|
||||
expect(response.http.status).toBe(200);
|
||||
expect(response.body).toBeNull();
|
||||
});
|
||||
|
||||
it('should allow direct __schema query', async () => {
|
||||
const response = await runPlugin('{ __schema { queryType { name } } }', undefined, true);
|
||||
|
||||
expect(response.http.status).toBe(200);
|
||||
expect(response.body).toBeNull();
|
||||
});
|
||||
|
||||
it('should allow regular queries with __type field', async () => {
|
||||
const response = await runPlugin(
|
||||
'query GetType { __type(name: "User") { name fields { name } } }',
|
||||
'GetType',
|
||||
true
|
||||
);
|
||||
|
||||
expect(response.http.status).toBe(200);
|
||||
expect(response.body).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('when sandbox is disabled', () => {
|
||||
it('should block introspection query with IntrospectionQuery operation name', async () => {
|
||||
const response = await runPlugin(
|
||||
'query IntrospectionQuery { __schema { queryType { name } } }',
|
||||
'IntrospectionQuery',
|
||||
false
|
||||
);
|
||||
|
||||
expect(response.http.status).toBe(400);
|
||||
expect(response.body).toEqual({
|
||||
kind: 'single',
|
||||
singleResult: {
|
||||
errors: [
|
||||
{
|
||||
message:
|
||||
'GraphQL introspection is not allowed, but the current request is for introspection.',
|
||||
extensions: {
|
||||
code: 'INTROSPECTION_DISABLED',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should block direct __schema query', async () => {
|
||||
const response = await runPlugin('{ __schema { queryType { name } } }', undefined, false);
|
||||
|
||||
expect(response.http.status).toBe(400);
|
||||
expect(response.body?.singleResult?.errors?.[0]?.extensions?.code).toBe(
|
||||
'INTROSPECTION_DISABLED'
|
||||
);
|
||||
});
|
||||
|
||||
it('should block __schema query with whitespace variations', async () => {
|
||||
const queries = [
|
||||
'{__schema{queryType{name}}}',
|
||||
'{ __schema { queryType { name } } }',
|
||||
'{\n __schema\n {\n queryType\n {\n name\n }\n }\n}',
|
||||
'query { __schema { types { name } } }',
|
||||
'query MyQuery { __schema { directives { name } } }',
|
||||
];
|
||||
|
||||
for (const query of queries) {
|
||||
const response = await runPlugin(query, undefined, false);
|
||||
expect(response.http.status).toBe(400);
|
||||
expect(response.body?.singleResult?.errors?.[0]?.extensions?.code).toBe(
|
||||
'INTROSPECTION_DISABLED'
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
it('should allow regular queries without introspection', async () => {
|
||||
const response = await runPlugin(
|
||||
'query GetUser { user(id: "123") { name email } }',
|
||||
'GetUser',
|
||||
false
|
||||
);
|
||||
|
||||
expect(response.http.status).toBe(200);
|
||||
expect(response.body).toBeNull();
|
||||
});
|
||||
|
||||
it('should allow queries with __type field (not full introspection)', async () => {
|
||||
const response = await runPlugin(
|
||||
'query GetType { __type(name: "User") { name fields { name } } }',
|
||||
'GetType',
|
||||
false
|
||||
);
|
||||
|
||||
expect(response.http.status).toBe(200);
|
||||
expect(response.body).toBeNull();
|
||||
});
|
||||
|
||||
it('should allow queries with __typename field', async () => {
|
||||
const response = await runPlugin(
|
||||
'query GetUser { user(id: "123") { __typename name email } }',
|
||||
'GetUser',
|
||||
false
|
||||
);
|
||||
|
||||
expect(response.http.status).toBe(200);
|
||||
expect(response.body).toBeNull();
|
||||
});
|
||||
|
||||
it('should allow mutations', async () => {
|
||||
const response = await runPlugin(
|
||||
'mutation CreateUser($input: UserInput!) { createUser(input: $input) { id name } }',
|
||||
'CreateUser',
|
||||
false
|
||||
);
|
||||
|
||||
expect(response.http.status).toBe(200);
|
||||
expect(response.body).toBeNull();
|
||||
});
|
||||
|
||||
it('should allow subscriptions', async () => {
|
||||
const response = await runPlugin(
|
||||
'subscription OnUserCreated { userCreated { id name } }',
|
||||
'OnUserCreated',
|
||||
false
|
||||
);
|
||||
|
||||
expect(response.http.status).toBe(200);
|
||||
expect(response.body).toBeNull();
|
||||
});
|
||||
|
||||
it('should not block when __schema appears in a string or comment', async () => {
|
||||
const response = await runPlugin(
|
||||
'query GetUser { user(id: "123") { name description } } # __schema is mentioned here',
|
||||
'GetUser',
|
||||
false
|
||||
);
|
||||
|
||||
expect(response.http.status).toBe(200);
|
||||
expect(response.body).toBeNull();
|
||||
});
|
||||
|
||||
it('should handle missing query gracefully', async () => {
|
||||
const response = await runPlugin(undefined, undefined, false);
|
||||
|
||||
expect(response.http.status).toBe(200);
|
||||
expect(response.body).toBeNull();
|
||||
});
|
||||
|
||||
it('should handle empty query gracefully', async () => {
|
||||
const response = await runPlugin('', undefined, false);
|
||||
|
||||
expect(response.http.status).toBe(200);
|
||||
expect(response.body).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases', () => {
|
||||
it('should handle response without http property', async () => {
|
||||
const isSandboxEnabled = vi.fn().mockReturnValue(false);
|
||||
const plugin = createDynamicIntrospectionPlugin(isSandboxEnabled);
|
||||
|
||||
const response = { body: null as any };
|
||||
const requestContext = {
|
||||
request: {
|
||||
query: '{ __schema { queryType { name } } }',
|
||||
operationName: undefined,
|
||||
},
|
||||
response,
|
||||
} as any;
|
||||
|
||||
const requestListener = await (plugin as any).requestDidStart();
|
||||
await requestListener.willSendResponse(requestContext);
|
||||
|
||||
expect(response.body).toEqual({
|
||||
kind: 'single',
|
||||
singleResult: {
|
||||
errors: [
|
||||
{
|
||||
message:
|
||||
'GraphQL introspection is not allowed, but the current request is for introspection.',
|
||||
extensions: {
|
||||
code: 'INTROSPECTION_DISABLED',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
// Should not throw even though response.http doesn't exist
|
||||
});
|
||||
|
||||
it('should check sandbox status dynamically on each request', async () => {
|
||||
const isSandboxEnabled = vi.fn();
|
||||
const plugin = createDynamicIntrospectionPlugin(isSandboxEnabled);
|
||||
|
||||
// First request - sandbox disabled
|
||||
isSandboxEnabled.mockReturnValue(false);
|
||||
const response1 = mockResponse();
|
||||
const requestContext1 = {
|
||||
request: {
|
||||
query: '{ __schema { queryType { name } } }',
|
||||
operationName: undefined,
|
||||
},
|
||||
response: response1,
|
||||
} as any;
|
||||
|
||||
let requestListener = await (plugin as any).requestDidStart();
|
||||
await requestListener.willSendResponse(requestContext1);
|
||||
expect(response1.http.status).toBe(400);
|
||||
|
||||
// Second request - sandbox enabled
|
||||
isSandboxEnabled.mockReturnValue(true);
|
||||
const response2 = mockResponse();
|
||||
const requestContext2 = {
|
||||
request: {
|
||||
query: '{ __schema { queryType { name } } }',
|
||||
operationName: undefined,
|
||||
},
|
||||
response: response2,
|
||||
} as any;
|
||||
|
||||
requestListener = await (plugin as any).requestDidStart();
|
||||
await requestListener.willSendResponse(requestContext2);
|
||||
expect(response2.http.status).toBe(200);
|
||||
|
||||
expect(isSandboxEnabled).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
});
|
||||
});
|
||||
43
api/src/unraid-api/graph/introspection-plugin.ts
Normal file
43
api/src/unraid-api/graph/introspection-plugin.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import type { ApolloServerPlugin, GraphQLRequestListener } from '@apollo/server';
|
||||
|
||||
export const createDynamicIntrospectionPlugin = (
|
||||
isSandboxEnabled: () => boolean
|
||||
): ApolloServerPlugin => ({
|
||||
requestDidStart: async () =>
|
||||
({
|
||||
willSendResponse: async (requestContext) => {
|
||||
const { request, response } = requestContext;
|
||||
|
||||
// Detect introspection queries:
|
||||
// 1. Standard operation name "IntrospectionQuery"
|
||||
// 2. Queries containing __schema at root level (main introspection entry point)
|
||||
// Note: __type and __typename are also used in regular queries, so we don't block them
|
||||
const isIntrospectionRequest =
|
||||
request.operationName === 'IntrospectionQuery' ||
|
||||
(request.query &&
|
||||
// Check for __schema which is the main introspection entry point
|
||||
// Match patterns like: { __schema { ... } } or query { __schema { ... } }
|
||||
/\{\s*__schema\s*[{(]/.test(request.query));
|
||||
|
||||
if (isIntrospectionRequest && !isSandboxEnabled()) {
|
||||
response.body = {
|
||||
kind: 'single',
|
||||
singleResult: {
|
||||
errors: [
|
||||
{
|
||||
message:
|
||||
'GraphQL introspection is not allowed, but the current request is for introspection.',
|
||||
extensions: {
|
||||
code: 'INTROSPECTION_DISABLED',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
if (response.http) {
|
||||
response.http.status = 400;
|
||||
}
|
||||
}
|
||||
},
|
||||
}) satisfies GraphQLRequestListener<any>,
|
||||
});
|
||||
@@ -4,7 +4,7 @@ import { Test } from '@nestjs/testing';
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { DisplayResolver } from '@app/unraid-api/graph/resolvers/display/display.resolver.js';
|
||||
import { DisplayService } from '@app/unraid-api/graph/resolvers/display/display.service.js';
|
||||
import { DisplayService } from '@app/unraid-api/graph/resolvers/info/display/display.service.js';
|
||||
|
||||
// Mock the pubsub module
|
||||
vi.mock('@app/core/pubsub.js', () => ({
|
||||
|
||||
@@ -8,8 +8,8 @@ import {
|
||||
} from '@unraid/shared/use-permissions.directive.js';
|
||||
|
||||
import { createSubscription, PUBSUB_CHANNEL } from '@app/core/pubsub.js';
|
||||
import { DisplayService } from '@app/unraid-api/graph/resolvers/display/display.service.js';
|
||||
import { Display } from '@app/unraid-api/graph/resolvers/info/info.model.js';
|
||||
import { Display } from '@app/unraid-api/graph/resolvers/info/display/display.model.js';
|
||||
import { DisplayService } from '@app/unraid-api/graph/resolvers/info/display/display.service.js';
|
||||
|
||||
@Resolver(() => Display)
|
||||
export class DisplayResolver {
|
||||
|
||||
@@ -2,10 +2,14 @@ import { Injectable } from '@nestjs/common';
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
|
||||
import { ConfigFilePersister } from '@unraid/shared/services/config-file.js';
|
||||
import { ValidationError } from 'class-validator';
|
||||
|
||||
import { AppError } from '@app/core/errors/app-error.js';
|
||||
import { validateObject } from '@app/unraid-api/graph/resolvers/validation.utils.js';
|
||||
import { OrganizerV1 } from '@app/unraid-api/organizer/organizer.dto.js';
|
||||
import {
|
||||
DEFAULT_ORGANIZER_ROOT_ID,
|
||||
DEFAULT_ORGANIZER_VIEW_ID,
|
||||
} from '@app/unraid-api/organizer/organizer.js';
|
||||
import { OrganizerV1 } from '@app/unraid-api/organizer/organizer.model.js';
|
||||
import { validateOrganizerIntegrity } from '@app/unraid-api/organizer/organizer.validation.js';
|
||||
|
||||
@Injectable()
|
||||
@@ -26,7 +30,21 @@ export class DockerConfigService extends ConfigFilePersister<OrganizerV1> {
|
||||
return {
|
||||
version: 1,
|
||||
resources: {},
|
||||
views: {},
|
||||
views: {
|
||||
default: {
|
||||
id: DEFAULT_ORGANIZER_VIEW_ID,
|
||||
name: 'Default',
|
||||
root: DEFAULT_ORGANIZER_ROOT_ID,
|
||||
entries: {
|
||||
root: {
|
||||
type: 'folder',
|
||||
id: DEFAULT_ORGANIZER_ROOT_ID,
|
||||
name: 'Root',
|
||||
children: [],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@@ -34,10 +52,7 @@ export class DockerConfigService extends ConfigFilePersister<OrganizerV1> {
|
||||
const organizer = await validateObject(OrganizerV1, config);
|
||||
const { isValid, errors } = await validateOrganizerIntegrity(organizer);
|
||||
if (!isValid) {
|
||||
const error = new ValidationError();
|
||||
error.target = organizer;
|
||||
error.contexts = errors;
|
||||
throw error;
|
||||
throw new AppError(`Docker organizer validation failed: ${JSON.stringify(errors, null, 2)}`);
|
||||
}
|
||||
return organizer;
|
||||
}
|
||||
|
||||
@@ -0,0 +1,758 @@
|
||||
import { Test } from '@nestjs/testing';
|
||||
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { DockerConfigService } from '@app/unraid-api/graph/resolvers/docker/docker-config.service.js';
|
||||
import {
|
||||
containerToResource,
|
||||
DockerOrganizerService,
|
||||
} from '@app/unraid-api/graph/resolvers/docker/docker-organizer.service.js';
|
||||
import {
|
||||
ContainerPortType,
|
||||
ContainerState,
|
||||
DockerContainer,
|
||||
} from '@app/unraid-api/graph/resolvers/docker/docker.model.js';
|
||||
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
|
||||
import { OrganizerV1 } from '@app/unraid-api/organizer/organizer.model.js';
|
||||
|
||||
describe('containerToResource', () => {
|
||||
it('should transform a DockerContainer to OrganizerResource', () => {
|
||||
const container: DockerContainer = {
|
||||
id: 'container-123',
|
||||
names: ['/my-app', '/my-app-alias'],
|
||||
image: 'nginx:latest',
|
||||
imageId: 'sha256:abc123',
|
||||
command: 'nginx -g "daemon off;"',
|
||||
created: 1640995200,
|
||||
ports: [
|
||||
{
|
||||
ip: '0.0.0.0',
|
||||
privatePort: 80,
|
||||
publicPort: 8080,
|
||||
type: ContainerPortType.TCP,
|
||||
},
|
||||
],
|
||||
state: ContainerState.RUNNING,
|
||||
status: 'Up 2 hours',
|
||||
autoStart: true,
|
||||
labels: {
|
||||
'com.docker.compose.service': 'web',
|
||||
},
|
||||
};
|
||||
|
||||
const result = containerToResource(container);
|
||||
|
||||
expect(result).toEqual({
|
||||
id: '/my-app',
|
||||
type: 'container',
|
||||
name: '/my-app',
|
||||
meta: container, // Now we store the entire container object
|
||||
});
|
||||
});
|
||||
|
||||
it('should use image as name when names array is empty', () => {
|
||||
const container: DockerContainer = {
|
||||
id: 'container-456',
|
||||
names: [],
|
||||
image: 'redis:alpine',
|
||||
imageId: 'sha256:def456',
|
||||
command: 'redis-server',
|
||||
created: 1640995300,
|
||||
ports: [],
|
||||
state: ContainerState.EXITED,
|
||||
status: 'Exited (0) 1 hour ago',
|
||||
autoStart: false,
|
||||
};
|
||||
|
||||
const result = containerToResource(container);
|
||||
|
||||
expect(result.name).toBe('redis:alpine');
|
||||
expect(result.type).toBe('container');
|
||||
expect(result.id).toBe('redis:alpine');
|
||||
});
|
||||
|
||||
it('should handle containers with minimal data', () => {
|
||||
const container: DockerContainer = {
|
||||
id: 'container-789',
|
||||
names: ['/minimal-container'],
|
||||
image: 'alpine:latest',
|
||||
imageId: 'sha256:ghi789',
|
||||
command: 'sh',
|
||||
created: 1640995400,
|
||||
ports: [],
|
||||
state: ContainerState.EXITED,
|
||||
status: 'Exited (0) 5 minutes ago',
|
||||
autoStart: false,
|
||||
};
|
||||
|
||||
const result = containerToResource(container);
|
||||
|
||||
expect(result).toEqual({
|
||||
id: '/minimal-container',
|
||||
type: 'container',
|
||||
name: '/minimal-container',
|
||||
meta: container, // Now we store the entire container object
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle containers with multiple ports', () => {
|
||||
const container: DockerContainer = {
|
||||
id: 'container-multiport',
|
||||
names: ['/web-app'],
|
||||
image: 'myapp:latest',
|
||||
imageId: 'sha256:jkl012',
|
||||
command: 'npm start',
|
||||
created: 1640995500,
|
||||
ports: [
|
||||
{
|
||||
ip: '0.0.0.0',
|
||||
privatePort: 3000,
|
||||
publicPort: 3000,
|
||||
type: ContainerPortType.TCP,
|
||||
},
|
||||
{
|
||||
ip: '0.0.0.0',
|
||||
privatePort: 3001,
|
||||
publicPort: 3001,
|
||||
type: ContainerPortType.TCP,
|
||||
},
|
||||
],
|
||||
state: ContainerState.RUNNING,
|
||||
status: 'Up 30 minutes',
|
||||
autoStart: true,
|
||||
labels: {
|
||||
maintainer: 'dev-team',
|
||||
version: '1.0.0',
|
||||
},
|
||||
};
|
||||
|
||||
const result = containerToResource(container);
|
||||
|
||||
expect(result.meta?.ports).toHaveLength(2);
|
||||
expect(result.meta?.labels).toEqual({
|
||||
maintainer: 'dev-team',
|
||||
version: '1.0.0',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('DockerOrganizerService', () => {
|
||||
let service: DockerOrganizerService;
|
||||
let configService: DockerConfigService;
|
||||
let dockerService: DockerService;
|
||||
|
||||
const mockOrganizer: OrganizerV1 = {
|
||||
version: 1,
|
||||
resources: {
|
||||
container1: {
|
||||
id: 'container1',
|
||||
type: 'container',
|
||||
name: 'container1',
|
||||
},
|
||||
container2: {
|
||||
id: 'container2',
|
||||
type: 'container',
|
||||
name: 'container2',
|
||||
},
|
||||
},
|
||||
views: {
|
||||
default: {
|
||||
id: 'default',
|
||||
name: 'Default',
|
||||
root: 'root',
|
||||
entries: {
|
||||
root: { id: 'root', type: 'folder', name: 'Root', children: [] },
|
||||
existingFolder: {
|
||||
id: 'existingFolder',
|
||||
type: 'folder',
|
||||
name: 'Existing',
|
||||
children: [],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
beforeEach(async () => {
|
||||
const moduleRef = await Test.createTestingModule({
|
||||
providers: [
|
||||
DockerOrganizerService,
|
||||
{
|
||||
provide: DockerConfigService,
|
||||
useValue: {
|
||||
getConfig: vi.fn().mockImplementation(() => structuredClone(mockOrganizer)),
|
||||
validate: vi.fn().mockImplementation((config) => Promise.resolve(config)),
|
||||
replaceConfig: vi.fn(),
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: DockerService,
|
||||
useValue: {
|
||||
getContainers: vi.fn().mockResolvedValue([
|
||||
{
|
||||
id: 'container1',
|
||||
names: ['container1'],
|
||||
image: 'nginx:latest',
|
||||
imageId: 'sha256:123',
|
||||
command: 'nginx',
|
||||
created: 1640995200,
|
||||
ports: [],
|
||||
state: 'running',
|
||||
status: 'Up 1 hour',
|
||||
autoStart: true,
|
||||
},
|
||||
{
|
||||
id: 'container2',
|
||||
names: ['container2'],
|
||||
image: 'redis:latest',
|
||||
imageId: 'sha256:456',
|
||||
command: 'redis-server',
|
||||
created: 1640995300,
|
||||
ports: [],
|
||||
state: 'running',
|
||||
status: 'Up 2 hours',
|
||||
autoStart: true,
|
||||
},
|
||||
]),
|
||||
},
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
service = moduleRef.get<DockerOrganizerService>(DockerOrganizerService);
|
||||
configService = moduleRef.get<DockerConfigService>(DockerConfigService);
|
||||
dockerService = moduleRef.get<DockerService>(DockerService);
|
||||
});
|
||||
|
||||
describe('createFolder', () => {
|
||||
it('should create a folder in root by default', async () => {
|
||||
const result = await service.createFolder({ name: 'New Folder' });
|
||||
|
||||
expect(result.version).toBe(1);
|
||||
expect(configService.validate).toHaveBeenCalledWith(expect.any(Object));
|
||||
expect(configService.replaceConfig).toHaveBeenCalledWith(result);
|
||||
|
||||
// Verify folder was created with correct properties
|
||||
const newFolder = Object.values(result.views.default.entries).find(
|
||||
(entry) => entry.type === 'folder' && entry.name === 'New Folder'
|
||||
);
|
||||
expect(newFolder).toBeDefined();
|
||||
});
|
||||
|
||||
it('should create a folder with children', async () => {
|
||||
const result = await service.createFolder({
|
||||
name: 'Folder with Children',
|
||||
parentId: 'root',
|
||||
childrenIds: ['container1', 'container2'],
|
||||
});
|
||||
|
||||
const newFolder = Object.values(result.views.default.entries).find(
|
||||
(entry) => entry.type === 'folder' && entry.name === 'Folder with Children'
|
||||
);
|
||||
expect(newFolder).toBeDefined();
|
||||
expect((newFolder as any).children).toEqual(['container1', 'container2']);
|
||||
});
|
||||
|
||||
it('should throw error if parent does not exist', async () => {
|
||||
await expect(
|
||||
service.createFolder({ name: 'Test', parentId: 'nonexistent' })
|
||||
).rejects.toThrow();
|
||||
});
|
||||
|
||||
it('should throw error if parent is not a folder', async () => {
|
||||
const organizerWithRef = structuredClone(mockOrganizer);
|
||||
organizerWithRef.views.default.entries.refEntry = {
|
||||
id: 'refEntry',
|
||||
type: 'ref',
|
||||
target: 'container1',
|
||||
};
|
||||
(configService.getConfig as any).mockReturnValue(organizerWithRef);
|
||||
|
||||
await expect(service.createFolder({ name: 'Test', parentId: 'refEntry' })).rejects.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('setFolderChildren', () => {
|
||||
it('should update folder children', async () => {
|
||||
const result = await service.setFolderChildren({
|
||||
folderId: 'existingFolder',
|
||||
childrenIds: ['container1', 'container2'],
|
||||
});
|
||||
|
||||
expect(result.version).toBe(1);
|
||||
expect(configService.validate).toHaveBeenCalledWith(expect.any(Object));
|
||||
expect(configService.replaceConfig).toHaveBeenCalledWith(result);
|
||||
|
||||
// Verify children were set
|
||||
const folder = result.views.default.entries.existingFolder as any;
|
||||
expect(folder.children).toEqual(['container1', 'container2']);
|
||||
});
|
||||
|
||||
it('should create refs for resources not in entries', async () => {
|
||||
const result = await service.setFolderChildren({
|
||||
folderId: 'existingFolder',
|
||||
childrenIds: ['container1'],
|
||||
});
|
||||
|
||||
// Verify ref was created
|
||||
expect(result.views.default.entries.container1).toEqual({
|
||||
id: 'container1',
|
||||
type: 'ref',
|
||||
target: 'container1',
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle empty children array', async () => {
|
||||
const result = await service.setFolderChildren({
|
||||
folderId: 'existingFolder',
|
||||
childrenIds: [],
|
||||
});
|
||||
|
||||
const folder = result.views.default.entries.existingFolder as any;
|
||||
expect(folder.children).toEqual([]);
|
||||
});
|
||||
|
||||
it('should use root as default folder', async () => {
|
||||
const result = await service.setFolderChildren({
|
||||
childrenIds: ['existingFolder'],
|
||||
});
|
||||
|
||||
const rootFolder = result.views.default.entries.root as any;
|
||||
expect(rootFolder.children).toContain('existingFolder');
|
||||
});
|
||||
|
||||
it('should throw error if folder does not exist', async () => {
|
||||
await expect(
|
||||
service.setFolderChildren({ folderId: 'nonexistent', childrenIds: [] })
|
||||
).rejects.toThrow();
|
||||
});
|
||||
|
||||
it('should throw error if target is not a folder', async () => {
|
||||
const organizerWithRef = structuredClone(mockOrganizer);
|
||||
organizerWithRef.views.default.entries.refEntry = {
|
||||
id: 'refEntry',
|
||||
type: 'ref',
|
||||
target: 'container1',
|
||||
};
|
||||
(configService.getConfig as any).mockReturnValue(organizerWithRef);
|
||||
|
||||
await expect(
|
||||
service.setFolderChildren({ folderId: 'refEntry', childrenIds: [] })
|
||||
).rejects.toThrow();
|
||||
});
|
||||
|
||||
it('should throw error if child does not exist', async () => {
|
||||
await expect(
|
||||
service.setFolderChildren({
|
||||
folderId: 'existingFolder',
|
||||
childrenIds: ['nonexistentChild'],
|
||||
})
|
||||
).rejects.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteEntries', () => {
|
||||
// Test constants to avoid magic values
|
||||
const TEST_FOLDER_ID = 'testFolder';
|
||||
const TEST_ENTRY_ID = 'testEntry';
|
||||
const PERFORMANCE_TEST_SIZE = 50; // Reduced for faster tests
|
||||
|
||||
// Helper function to create test organizer with specific entries
|
||||
const createTestOrganizer = (entries: Record<string, any> = {}) => {
|
||||
const organizer = structuredClone(mockOrganizer);
|
||||
Object.assign(organizer.views.default.entries, entries);
|
||||
return organizer;
|
||||
};
|
||||
|
||||
// Helper to get typed root folder
|
||||
const getRootFolder = (result: any) => result.views.default.entries.root;
|
||||
|
||||
it('should delete entries and maintain proper orchestration', async () => {
|
||||
const testOrganizer = createTestOrganizer({
|
||||
[TEST_FOLDER_ID]: {
|
||||
id: TEST_FOLDER_ID,
|
||||
type: 'folder',
|
||||
name: 'Test Folder',
|
||||
children: [],
|
||||
},
|
||||
});
|
||||
(configService.getConfig as any).mockReturnValue(testOrganizer);
|
||||
|
||||
const result = await service.deleteEntries({
|
||||
entryIds: new Set([TEST_FOLDER_ID]),
|
||||
});
|
||||
|
||||
// Verify service contract fulfillment
|
||||
expect(result).toBeDefined();
|
||||
expect(result.version).toBe(1);
|
||||
expect(result.views.default).toBeDefined();
|
||||
|
||||
// Verify service orchestration without being overly specific
|
||||
expect(configService.getConfig).toHaveBeenCalled();
|
||||
expect(configService.validate).toHaveBeenCalled();
|
||||
expect(configService.replaceConfig).toHaveBeenCalled();
|
||||
|
||||
// Verify the deletion outcome
|
||||
expect(result.views.default.entries[TEST_FOLDER_ID]).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should handle empty entryIds set gracefully', async () => {
|
||||
const originalEntryCount = Object.keys(mockOrganizer.views.default.entries).length;
|
||||
|
||||
const result = await service.deleteEntries({
|
||||
entryIds: new Set(),
|
||||
});
|
||||
|
||||
// Verify basic service contract
|
||||
expect(result).toBeDefined();
|
||||
expect(result.version).toBe(1);
|
||||
expect(configService.validate).toHaveBeenCalled();
|
||||
expect(configService.replaceConfig).toHaveBeenCalled();
|
||||
|
||||
// Verify no unintended deletions occurred
|
||||
expect(Object.keys(result.views.default.entries).length).toBeGreaterThanOrEqual(
|
||||
originalEntryCount
|
||||
);
|
||||
expect(result.views.default.entries.existingFolder).toBeDefined();
|
||||
});
|
||||
|
||||
it('should synchronize resources during operation', async () => {
|
||||
const result = await service.deleteEntries({
|
||||
entryIds: new Set(),
|
||||
});
|
||||
|
||||
// Verify resources structure is maintained and updated
|
||||
expect(result.resources).toBeDefined();
|
||||
expect(typeof result.resources).toBe('object');
|
||||
|
||||
// Verify container resources are properly structured
|
||||
const containerResources = Object.values(result.resources).filter(
|
||||
(resource: any) => resource.type === 'container'
|
||||
);
|
||||
expect(containerResources.length).toBeGreaterThan(0);
|
||||
|
||||
// Each container resource should have required properties
|
||||
containerResources.forEach((resource: any) => {
|
||||
expect(resource).toHaveProperty('id');
|
||||
expect(resource).toHaveProperty('type', 'container');
|
||||
expect(resource).toHaveProperty('name');
|
||||
expect(resource).toHaveProperty('meta');
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle deletion of non-existent entries gracefully', async () => {
|
||||
const NON_EXISTENT_ID = 'definitivelyDoesNotExist';
|
||||
const originalEntries = Object.keys(mockOrganizer.views.default.entries);
|
||||
|
||||
const result = await service.deleteEntries({
|
||||
entryIds: new Set([NON_EXISTENT_ID]),
|
||||
});
|
||||
|
||||
// Verify service completed successfully
|
||||
expect(result).toBeDefined();
|
||||
expect(result.version).toBe(1);
|
||||
|
||||
// Verify no existing entries were accidentally deleted
|
||||
originalEntries.forEach((entryId) => {
|
||||
expect(result.views.default.entries[entryId]).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle mixed valid and invalid entry deletion', async () => {
|
||||
const VALID_ENTRY = 'existingFolder';
|
||||
const INVALID_ENTRY = 'nonExistentEntry';
|
||||
|
||||
const result = await service.deleteEntries({
|
||||
entryIds: new Set([VALID_ENTRY, INVALID_ENTRY]),
|
||||
});
|
||||
|
||||
// Verify operation completed successfully despite invalid entry
|
||||
expect(result).toBeDefined();
|
||||
expect(result.version).toBe(1);
|
||||
|
||||
// Valid entry should be deleted, invalid entry should be ignored
|
||||
expect(result.views.default.entries[VALID_ENTRY]).toBeUndefined();
|
||||
expect(result.views.default.entries[INVALID_ENTRY]).toBeUndefined(); // Never existed
|
||||
});
|
||||
|
||||
it('should perform synchronization as part of operation', async () => {
|
||||
const syncSpy = vi.spyOn(service, 'syncAndGetOrganizer');
|
||||
|
||||
const result = await service.deleteEntries({
|
||||
entryIds: new Set(),
|
||||
});
|
||||
|
||||
// Verify sync occurred and result reflects synchronized state
|
||||
expect(syncSpy).toHaveBeenCalled();
|
||||
expect(result.resources).toBeDefined();
|
||||
expect(Object.keys(result.resources).length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it('should handle cascading deletions correctly', async () => {
|
||||
const PARENT_FOLDER = 'parentFolder';
|
||||
const CHILD_FOLDER = 'childFolder';
|
||||
|
||||
const hierarchicalOrganizer = createTestOrganizer({
|
||||
[PARENT_FOLDER]: {
|
||||
id: PARENT_FOLDER,
|
||||
type: 'folder',
|
||||
name: 'Parent Folder',
|
||||
children: [CHILD_FOLDER],
|
||||
},
|
||||
[CHILD_FOLDER]: {
|
||||
id: CHILD_FOLDER,
|
||||
type: 'folder',
|
||||
name: 'Child Folder',
|
||||
children: [],
|
||||
},
|
||||
});
|
||||
|
||||
const rootFolder = getRootFolder(hierarchicalOrganizer);
|
||||
rootFolder.children = [PARENT_FOLDER];
|
||||
(configService.getConfig as any).mockReturnValue(hierarchicalOrganizer);
|
||||
|
||||
const result = await service.deleteEntries({
|
||||
entryIds: new Set([PARENT_FOLDER]),
|
||||
});
|
||||
|
||||
// Both parent and child should be deleted due to cascading
|
||||
expect(result.views.default.entries[PARENT_FOLDER]).toBeUndefined();
|
||||
expect(result.views.default.entries[CHILD_FOLDER]).toBeUndefined();
|
||||
|
||||
// Root should no longer reference deleted parent
|
||||
const resultRoot = getRootFolder(result);
|
||||
expect(resultRoot.children).not.toContain(PARENT_FOLDER);
|
||||
});
|
||||
|
||||
it('should handle validation failure appropriately', async () => {
|
||||
const validationError = new Error('Configuration validation failed');
|
||||
(configService.validate as any).mockRejectedValue(validationError);
|
||||
|
||||
await expect(
|
||||
service.deleteEntries({
|
||||
entryIds: new Set([TEST_FOLDER_ID]),
|
||||
})
|
||||
).rejects.toThrow();
|
||||
|
||||
// Should not save invalid configuration
|
||||
expect(configService.replaceConfig).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle docker service failure gracefully', async () => {
|
||||
const dockerError = new Error('Docker service unavailable');
|
||||
(dockerService.getContainers as any).mockRejectedValue(dockerError);
|
||||
|
||||
await expect(
|
||||
service.deleteEntries({
|
||||
entryIds: new Set([TEST_FOLDER_ID]),
|
||||
})
|
||||
).rejects.toThrow();
|
||||
|
||||
// Should fail early before attempting validation/save
|
||||
expect(configService.replaceConfig).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle complex folder hierarchies correctly', async () => {
|
||||
const PARENT_FOLDER = 'parentFolder';
|
||||
const CHILD_FOLDER = 'childFolder';
|
||||
const SIBLING_FOLDER = 'siblingFolder';
|
||||
|
||||
const complexOrganizer = createTestOrganizer({
|
||||
[PARENT_FOLDER]: {
|
||||
id: PARENT_FOLDER,
|
||||
type: 'folder',
|
||||
name: 'Parent Folder',
|
||||
children: ['existingFolder'], // References existing mock entry
|
||||
},
|
||||
[SIBLING_FOLDER]: {
|
||||
id: SIBLING_FOLDER,
|
||||
type: 'folder',
|
||||
name: 'Sibling Folder',
|
||||
children: [],
|
||||
},
|
||||
});
|
||||
|
||||
const rootFolder = getRootFolder(complexOrganizer);
|
||||
rootFolder.children = [PARENT_FOLDER, SIBLING_FOLDER];
|
||||
(configService.getConfig as any).mockReturnValue(complexOrganizer);
|
||||
|
||||
const result = await service.deleteEntries({
|
||||
entryIds: new Set([PARENT_FOLDER]),
|
||||
});
|
||||
|
||||
// Verify targeted deletion occurred
|
||||
expect(result.views.default.entries[PARENT_FOLDER]).toBeUndefined();
|
||||
expect(result.views.default.entries.existingFolder).toBeUndefined(); // Cascaded deletion
|
||||
|
||||
// Verify unrelated entries are preserved
|
||||
expect(result.views.default.entries[SIBLING_FOLDER]).toBeDefined();
|
||||
|
||||
// Verify view structure integrity
|
||||
const resultRoot = getRootFolder(result);
|
||||
expect(resultRoot.children).not.toContain(PARENT_FOLDER);
|
||||
expect(resultRoot.children).toContain(SIBLING_FOLDER);
|
||||
});
|
||||
|
||||
it('should maintain resource integrity after operations', async () => {
|
||||
const result = await service.deleteEntries({
|
||||
entryIds: new Set(['existingFolder']),
|
||||
});
|
||||
|
||||
// Verify resources maintain expected structure and content
|
||||
expect(result.resources).toBeDefined();
|
||||
expect(typeof result.resources).toBe('object');
|
||||
|
||||
// Verify each resource has consistent structure
|
||||
Object.entries(result.resources).forEach(([resourceId, resource]: [string, any]) => {
|
||||
expect(resource).toHaveProperty('id', resourceId);
|
||||
expect(resource).toHaveProperty('type');
|
||||
expect(resource).toHaveProperty('name');
|
||||
|
||||
// Container resources should have metadata
|
||||
if (resource.type === 'container') {
|
||||
expect(resource).toHaveProperty('meta');
|
||||
expect(resource.meta).toBeDefined();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
it('should maintain data consistency throughout operation', async () => {
|
||||
// Test that the service maintains data integrity without testing specific call sequences
|
||||
let configGetCount = 0;
|
||||
let validateCount = 0;
|
||||
let replaceCount = 0;
|
||||
|
||||
(configService.getConfig as any).mockImplementation(() => {
|
||||
configGetCount++;
|
||||
return structuredClone(mockOrganizer);
|
||||
});
|
||||
|
||||
(configService.validate as any).mockImplementation((config: any) => {
|
||||
validateCount++;
|
||||
// Validate that we received a proper config object
|
||||
expect(config).toHaveProperty('version');
|
||||
expect(config).toHaveProperty('resources');
|
||||
expect(config).toHaveProperty('views');
|
||||
return Promise.resolve(config);
|
||||
});
|
||||
|
||||
(configService.replaceConfig as any).mockImplementation((config: any) => {
|
||||
replaceCount++;
|
||||
// Validate that we're saving a consistent config
|
||||
expect(config).toHaveProperty('version');
|
||||
expect(config.views.default).toBeDefined();
|
||||
});
|
||||
|
||||
const result = await service.deleteEntries({
|
||||
entryIds: new Set(['existingFolder']),
|
||||
});
|
||||
|
||||
// Verify essential operations occurred without being overly specific about sequence
|
||||
expect(configGetCount).toBeGreaterThan(0);
|
||||
expect(validateCount).toBeGreaterThan(0);
|
||||
expect(replaceCount).toBeGreaterThan(0);
|
||||
expect(result).toBeDefined();
|
||||
});
|
||||
|
||||
it('should handle deletion when default view is missing', async () => {
|
||||
const organizerWithoutDefaultView = structuredClone(mockOrganizer);
|
||||
delete organizerWithoutDefaultView.views.default;
|
||||
(configService.getConfig as any).mockReturnValue(organizerWithoutDefaultView);
|
||||
|
||||
const result = await service.deleteEntries({
|
||||
entryIds: new Set(['someEntry']),
|
||||
});
|
||||
|
||||
// Should still work and create/maintain proper structure
|
||||
expect(result.views.default).toBeDefined();
|
||||
expect(configService.validate).toHaveBeenCalled();
|
||||
expect(configService.replaceConfig).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should maintain relative order of remaining entries', async () => {
|
||||
const ENTRIES = ['entryA', 'entryB', 'entryC', 'entryD'];
|
||||
const TO_DELETE = ['entryB', 'entryD'];
|
||||
const EXPECTED_REMAINING = ['entryA', 'entryC'];
|
||||
|
||||
const organizerWithOrdering = createTestOrganizer();
|
||||
const rootFolder = getRootFolder(organizerWithOrdering);
|
||||
rootFolder.children = [...ENTRIES];
|
||||
|
||||
// Create the test entries
|
||||
ENTRIES.forEach((entryId) => {
|
||||
organizerWithOrdering.views.default.entries[entryId] = {
|
||||
id: entryId,
|
||||
type: 'ref',
|
||||
target: `target_${entryId}`,
|
||||
};
|
||||
});
|
||||
|
||||
(configService.getConfig as any).mockReturnValue(organizerWithOrdering);
|
||||
|
||||
const result = await service.deleteEntries({
|
||||
entryIds: new Set(TO_DELETE),
|
||||
});
|
||||
|
||||
const resultRoot = getRootFolder(result);
|
||||
|
||||
// Verify deleted entries are gone
|
||||
TO_DELETE.forEach((entryId) => {
|
||||
expect(result.views.default.entries[entryId]).toBeUndefined();
|
||||
expect(resultRoot.children).not.toContain(entryId);
|
||||
});
|
||||
|
||||
// Verify remaining entries are present and in relative order
|
||||
EXPECTED_REMAINING.forEach((entryId) => {
|
||||
expect(result.views.default.entries[entryId]).toBeDefined();
|
||||
expect(resultRoot.children).toContain(entryId);
|
||||
});
|
||||
|
||||
// Check that relative order is preserved among remaining entries
|
||||
const remainingPositions = EXPECTED_REMAINING.map((id) => resultRoot.children.indexOf(id));
|
||||
expect(remainingPositions[0]).toBeLessThan(remainingPositions[1]); // entryA before entryC
|
||||
});
|
||||
|
||||
it('should handle bulk operations efficiently', async () => {
|
||||
const bulkOrganizer = createTestOrganizer();
|
||||
const entriesToDelete = new Set<string>();
|
||||
|
||||
// Create test entries for bulk deletion
|
||||
for (let i = 0; i < PERFORMANCE_TEST_SIZE; i++) {
|
||||
const entryId = `bulkEntry${i}`;
|
||||
entriesToDelete.add(entryId);
|
||||
bulkOrganizer.views.default.entries[entryId] = {
|
||||
id: entryId,
|
||||
type: 'ref',
|
||||
target: `bulkTarget${i}`,
|
||||
};
|
||||
}
|
||||
|
||||
const rootFolder = getRootFolder(bulkOrganizer);
|
||||
rootFolder.children.push(...Array.from(entriesToDelete));
|
||||
(configService.getConfig as any).mockReturnValue(bulkOrganizer);
|
||||
|
||||
const startTime = Date.now();
|
||||
const result = await service.deleteEntries({
|
||||
entryIds: entriesToDelete,
|
||||
});
|
||||
const endTime = Date.now();
|
||||
|
||||
// Verify all bulk entries were deleted
|
||||
entriesToDelete.forEach((entryId) => {
|
||||
expect(result.views.default.entries[entryId]).toBeUndefined();
|
||||
});
|
||||
|
||||
const resultRoot = getRootFolder(result);
|
||||
entriesToDelete.forEach((entryId) => {
|
||||
expect(resultRoot.children).not.toContain(entryId);
|
||||
});
|
||||
|
||||
// Verify operation completed in reasonable time (not a strict performance test)
|
||||
expect(endTime - startTime).toBeLessThan(5000); // 5 seconds should be more than enough
|
||||
|
||||
// Verify service contract still fulfilled
|
||||
expect(result).toBeDefined();
|
||||
expect(result.version).toBe(1);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,225 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
|
||||
import type { ContainerListOptions } from 'dockerode';
|
||||
|
||||
import { AppError } from '@app/core/errors/app-error.js';
|
||||
import { DockerConfigService } from '@app/unraid-api/graph/resolvers/docker/docker-config.service.js';
|
||||
import { DockerContainer } from '@app/unraid-api/graph/resolvers/docker/docker.model.js';
|
||||
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
|
||||
import {
|
||||
addMissingResourcesToView,
|
||||
createFolderInView,
|
||||
DEFAULT_ORGANIZER_ROOT_ID,
|
||||
DEFAULT_ORGANIZER_VIEW_ID,
|
||||
deleteOrganizerEntries,
|
||||
moveEntriesToFolder,
|
||||
resolveOrganizer,
|
||||
setFolderChildrenInView,
|
||||
} from '@app/unraid-api/organizer/organizer.js';
|
||||
import {
|
||||
OrganizerContainerResource,
|
||||
OrganizerV1,
|
||||
ResolvedOrganizerV1,
|
||||
} from '@app/unraid-api/organizer/organizer.model.js';
|
||||
|
||||
export function containerToResource(container: DockerContainer): OrganizerContainerResource {
|
||||
const stableRef = container.names[0] || container.image;
|
||||
return {
|
||||
id: stableRef,
|
||||
type: 'container',
|
||||
name: stableRef,
|
||||
meta: container,
|
||||
};
|
||||
}
|
||||
|
||||
export function containerListToResourcesObject(containers: DockerContainer[]): OrganizerV1['resources'] {
|
||||
return containers.reduce(
|
||||
(acc, container) => {
|
||||
const resource = containerToResource(container);
|
||||
acc[resource.id] = resource;
|
||||
return acc;
|
||||
},
|
||||
{} as OrganizerV1['resources']
|
||||
);
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class DockerOrganizerService {
|
||||
private readonly logger = new Logger(DockerOrganizerService.name);
|
||||
constructor(
|
||||
private readonly dockerConfigService: DockerConfigService,
|
||||
private readonly dockerService: DockerService
|
||||
) {}
|
||||
|
||||
async getResources(opts?: ContainerListOptions): Promise<OrganizerV1['resources']> {
|
||||
const containers = await this.dockerService.getContainers(opts);
|
||||
return containerListToResourcesObject(containers);
|
||||
}
|
||||
|
||||
async syncDefaultView(
|
||||
organizer: OrganizerV1,
|
||||
resources?: OrganizerV1['resources']
|
||||
): Promise<OrganizerV1> {
|
||||
const newOrganizer = structuredClone(organizer);
|
||||
const view = newOrganizer.views.default ?? {
|
||||
id: DEFAULT_ORGANIZER_VIEW_ID,
|
||||
name: 'Default',
|
||||
root: DEFAULT_ORGANIZER_ROOT_ID,
|
||||
entries: {},
|
||||
};
|
||||
resources ??= await this.getResources();
|
||||
|
||||
const updatedView = addMissingResourcesToView(resources, view);
|
||||
newOrganizer.views.default = updatedView;
|
||||
return newOrganizer;
|
||||
}
|
||||
|
||||
async syncAndGetOrganizer(): Promise<OrganizerV1> {
|
||||
let organizer = this.dockerConfigService.getConfig();
|
||||
organizer.resources = await this.getResources();
|
||||
organizer = await this.syncDefaultView(organizer, organizer.resources);
|
||||
organizer = await this.dockerConfigService.validate(organizer);
|
||||
this.dockerConfigService.replaceConfig(organizer);
|
||||
return organizer;
|
||||
}
|
||||
|
||||
async resolveOrganizer(organizer?: OrganizerV1): Promise<ResolvedOrganizerV1> {
|
||||
organizer ??= await this.syncAndGetOrganizer();
|
||||
return resolveOrganizer(organizer);
|
||||
}
|
||||
|
||||
async createFolder(params: {
|
||||
name: string;
|
||||
parentId?: string;
|
||||
childrenIds?: string[];
|
||||
}): Promise<OrganizerV1> {
|
||||
const { name, parentId = DEFAULT_ORGANIZER_ROOT_ID, childrenIds = [] } = params;
|
||||
|
||||
if (name === DEFAULT_ORGANIZER_ROOT_ID) {
|
||||
throw new AppError(`Folder name '${name}' is reserved`);
|
||||
} else if (name === parentId) {
|
||||
throw new AppError(`Folder ID '${name}' cannot be the same as the parent ID`);
|
||||
} else if (!name) {
|
||||
throw new AppError(`Folder name cannot be empty`);
|
||||
}
|
||||
|
||||
const organizer = await this.syncAndGetOrganizer();
|
||||
// Validate parent exists and is a folder
|
||||
const defaultView = organizer.views.default;
|
||||
if (!defaultView) {
|
||||
throw new AppError('Default view not found');
|
||||
}
|
||||
|
||||
const parentEntry = defaultView.entries[parentId];
|
||||
if (!parentEntry || parentEntry.type !== 'folder') {
|
||||
throw new AppError(`Parent '${parentId}' not found or is not a folder`);
|
||||
}
|
||||
|
||||
// If folder already exists, we don't need to create it
|
||||
if (parentEntry.children.includes(name)) {
|
||||
return organizer;
|
||||
}
|
||||
|
||||
// Use pure function to create folder
|
||||
const updatedView = createFolderInView({
|
||||
view: defaultView,
|
||||
parentId,
|
||||
folderId: name,
|
||||
folderName: name,
|
||||
childrenIds,
|
||||
});
|
||||
|
||||
// Update organizer with new view
|
||||
const newOrganizer = structuredClone(organizer);
|
||||
newOrganizer.views.default = updatedView;
|
||||
|
||||
// Save and return updated organizer
|
||||
const validated = await this.dockerConfigService.validate(newOrganizer);
|
||||
this.dockerConfigService.replaceConfig(validated);
|
||||
return validated;
|
||||
}
|
||||
|
||||
async setFolderChildren(params: { folderId?: string; childrenIds: string[] }): Promise<OrganizerV1> {
|
||||
const { folderId = DEFAULT_ORGANIZER_ROOT_ID, childrenIds } = params;
|
||||
const organizer = await this.syncAndGetOrganizer();
|
||||
|
||||
// Validate view exists
|
||||
const defaultView = organizer.views.default;
|
||||
if (!defaultView) {
|
||||
throw new AppError('Default view not found');
|
||||
}
|
||||
|
||||
// Validate folder exists and is a folder
|
||||
const targetFolder = defaultView.entries[folderId];
|
||||
if (!targetFolder) {
|
||||
throw new AppError(`Folder '${folderId}' not found`);
|
||||
}
|
||||
if (targetFolder.type !== 'folder') {
|
||||
throw new AppError(`Entry '${folderId}' is not a folder`);
|
||||
}
|
||||
|
||||
// Validate all children exist
|
||||
for (const childId of childrenIds) {
|
||||
const childEntry = defaultView.entries[childId];
|
||||
const childResource = organizer.resources[childId];
|
||||
|
||||
if (!childEntry && !childResource) {
|
||||
throw new AppError(`Child '${childId}' not found in entries or resources`);
|
||||
}
|
||||
}
|
||||
|
||||
// Use pure function to update folder children
|
||||
const updatedView = setFolderChildrenInView({
|
||||
view: defaultView,
|
||||
folderId,
|
||||
childrenIds,
|
||||
resources: organizer.resources,
|
||||
});
|
||||
|
||||
// Update organizer with new view
|
||||
const newOrganizer = structuredClone(organizer);
|
||||
newOrganizer.views.default = updatedView;
|
||||
|
||||
// Save and return updated organizer
|
||||
const validated = await this.dockerConfigService.validate(newOrganizer);
|
||||
this.dockerConfigService.replaceConfig(validated);
|
||||
return validated;
|
||||
}
|
||||
|
||||
async deleteEntries(params: { entryIds: Set<string> }): Promise<OrganizerV1> {
|
||||
const { entryIds } = params;
|
||||
const organizer = await this.syncAndGetOrganizer();
|
||||
const newOrganizer = structuredClone(organizer);
|
||||
|
||||
deleteOrganizerEntries(newOrganizer.views.default, entryIds, { mutate: true });
|
||||
addMissingResourcesToView(newOrganizer.resources, newOrganizer.views.default);
|
||||
|
||||
const validated = await this.dockerConfigService.validate(newOrganizer);
|
||||
this.dockerConfigService.replaceConfig(validated);
|
||||
return validated;
|
||||
}
|
||||
|
||||
async moveEntriesToFolder(params: {
|
||||
sourceEntryIds: string[];
|
||||
destinationFolderId: string;
|
||||
}): Promise<OrganizerV1> {
|
||||
const { sourceEntryIds, destinationFolderId } = params;
|
||||
const organizer = await this.syncAndGetOrganizer();
|
||||
const newOrganizer = structuredClone(organizer);
|
||||
|
||||
const defaultView = newOrganizer.views.default;
|
||||
if (!defaultView) {
|
||||
throw new AppError('Default view not found');
|
||||
}
|
||||
|
||||
newOrganizer.views.default = moveEntriesToFolder({
|
||||
view: defaultView,
|
||||
sourceEntryIds: new Set(sourceEntryIds),
|
||||
destinationFolderId,
|
||||
});
|
||||
|
||||
const validated = await this.dockerConfigService.validate(newOrganizer);
|
||||
this.dockerConfigService.replaceConfig(validated);
|
||||
return validated;
|
||||
}
|
||||
}
|
||||
@@ -1,8 +1,11 @@
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
|
||||
import { describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { DockerConfigService } from '@app/unraid-api/graph/resolvers/docker/docker-config.service.js';
|
||||
import { DockerEventService } from '@app/unraid-api/graph/resolvers/docker/docker-event.service.js';
|
||||
import { DockerOrganizerService } from '@app/unraid-api/graph/resolvers/docker/docker-organizer.service.js';
|
||||
import { DockerModule } from '@app/unraid-api/graph/resolvers/docker/docker.module.js';
|
||||
import { DockerMutationsResolver } from '@app/unraid-api/graph/resolvers/docker/docker.mutations.resolver.js';
|
||||
import { DockerResolver } from '@app/unraid-api/graph/resolvers/docker/docker.resolver.js';
|
||||
@@ -15,6 +18,8 @@ describe('DockerModule', () => {
|
||||
})
|
||||
.overrideProvider(DockerService)
|
||||
.useValue({ getDockerClient: vi.fn() })
|
||||
.overrideProvider(DockerConfigService)
|
||||
.useValue({ getConfig: vi.fn() })
|
||||
.compile();
|
||||
|
||||
expect(module).toBeDefined();
|
||||
@@ -52,7 +57,11 @@ describe('DockerModule', () => {
|
||||
|
||||
it('should provide DockerResolver', async () => {
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [DockerResolver, { provide: DockerService, useValue: {} }],
|
||||
providers: [
|
||||
DockerResolver,
|
||||
{ provide: DockerService, useValue: {} },
|
||||
{ provide: DockerOrganizerService, useValue: {} },
|
||||
],
|
||||
}).compile();
|
||||
|
||||
const resolver = module.get<DockerResolver>(DockerResolver);
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
|
||||
import { DockerConfigService } from '@app/unraid-api/graph/resolvers/docker/docker-config.service.js';
|
||||
import { DockerOrganizerService } from '@app/unraid-api/graph/resolvers/docker/docker-organizer.service.js';
|
||||
import { DockerMutationsResolver } from '@app/unraid-api/graph/resolvers/docker/docker.mutations.resolver.js';
|
||||
import { DockerResolver } from '@app/unraid-api/graph/resolvers/docker/docker.resolver.js';
|
||||
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
|
||||
@@ -8,6 +10,8 @@ import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.ser
|
||||
providers: [
|
||||
// Services
|
||||
DockerService,
|
||||
DockerConfigService,
|
||||
DockerOrganizerService,
|
||||
// DockerEventService,
|
||||
|
||||
// Resolvers
|
||||
|
||||
@@ -3,6 +3,7 @@ import { Test } from '@nestjs/testing';
|
||||
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { DockerOrganizerService } from '@app/unraid-api/graph/resolvers/docker/docker-organizer.service.js';
|
||||
import { ContainerState, DockerContainer } from '@app/unraid-api/graph/resolvers/docker/docker.model.js';
|
||||
import { DockerResolver } from '@app/unraid-api/graph/resolvers/docker/docker.resolver.js';
|
||||
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
|
||||
@@ -22,6 +23,12 @@ describe('DockerResolver', () => {
|
||||
getNetworks: vi.fn(),
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: DockerOrganizerService,
|
||||
useValue: {
|
||||
getResolvedOrganizer: vi.fn(),
|
||||
},
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Args, Query, ResolveField, Resolver } from '@nestjs/graphql';
|
||||
import { Args, Mutation, Query, ResolveField, Resolver } from '@nestjs/graphql';
|
||||
|
||||
import { Resource } from '@unraid/shared/graphql.model.js';
|
||||
import {
|
||||
@@ -7,16 +7,22 @@ import {
|
||||
UsePermissions,
|
||||
} from '@unraid/shared/use-permissions.directive.js';
|
||||
|
||||
import { DockerOrganizerService } from '@app/unraid-api/graph/resolvers/docker/docker-organizer.service.js';
|
||||
import {
|
||||
Docker,
|
||||
DockerContainer,
|
||||
DockerNetwork,
|
||||
} from '@app/unraid-api/graph/resolvers/docker/docker.model.js';
|
||||
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
|
||||
import { DEFAULT_ORGANIZER_ROOT_ID } from '@app/unraid-api/organizer/organizer.js';
|
||||
import { OrganizerV1, ResolvedOrganizerV1 } from '@app/unraid-api/organizer/organizer.model.js';
|
||||
|
||||
@Resolver(() => Docker)
|
||||
export class DockerResolver {
|
||||
constructor(private readonly dockerService: DockerService) {}
|
||||
constructor(
|
||||
private readonly dockerService: DockerService,
|
||||
private readonly dockerOrganizerService: DockerOrganizerService
|
||||
) {}
|
||||
|
||||
@UsePermissions({
|
||||
action: AuthActionVerb.READ,
|
||||
@@ -53,4 +59,80 @@ export class DockerResolver {
|
||||
) {
|
||||
return this.dockerService.getNetworks({ skipCache });
|
||||
}
|
||||
|
||||
@UsePermissions({
|
||||
action: AuthActionVerb.READ,
|
||||
resource: Resource.DOCKER,
|
||||
possession: AuthPossession.ANY,
|
||||
})
|
||||
@ResolveField(() => ResolvedOrganizerV1)
|
||||
public async organizer() {
|
||||
return this.dockerOrganizerService.resolveOrganizer();
|
||||
}
|
||||
|
||||
@UsePermissions({
|
||||
action: AuthActionVerb.UPDATE,
|
||||
resource: Resource.DOCKER,
|
||||
possession: AuthPossession.ANY,
|
||||
})
|
||||
@Mutation(() => ResolvedOrganizerV1)
|
||||
public async createDockerFolder(
|
||||
@Args('name') name: string,
|
||||
@Args('parentId', { nullable: true }) parentId?: string,
|
||||
@Args('childrenIds', { type: () => [String], nullable: true }) childrenIds?: string[]
|
||||
) {
|
||||
const organizer = await this.dockerOrganizerService.createFolder({
|
||||
name,
|
||||
parentId: parentId ?? DEFAULT_ORGANIZER_ROOT_ID,
|
||||
childrenIds: childrenIds ?? [],
|
||||
});
|
||||
return this.dockerOrganizerService.resolveOrganizer(organizer);
|
||||
}
|
||||
|
||||
@UsePermissions({
|
||||
action: AuthActionVerb.UPDATE,
|
||||
resource: Resource.DOCKER,
|
||||
possession: AuthPossession.ANY,
|
||||
})
|
||||
@Mutation(() => ResolvedOrganizerV1)
|
||||
public async setDockerFolderChildren(
|
||||
@Args('folderId', { nullable: true, type: () => String }) folderId: string | undefined,
|
||||
@Args('childrenIds', { type: () => [String] }) childrenIds: string[]
|
||||
) {
|
||||
const organizer = await this.dockerOrganizerService.setFolderChildren({
|
||||
folderId: folderId ?? DEFAULT_ORGANIZER_ROOT_ID,
|
||||
childrenIds,
|
||||
});
|
||||
return this.dockerOrganizerService.resolveOrganizer(organizer);
|
||||
}
|
||||
|
||||
@UsePermissions({
|
||||
action: AuthActionVerb.UPDATE,
|
||||
resource: Resource.DOCKER,
|
||||
possession: AuthPossession.ANY,
|
||||
})
|
||||
@Mutation(() => ResolvedOrganizerV1)
|
||||
public async deleteDockerEntries(@Args('entryIds', { type: () => [String] }) entryIds: string[]) {
|
||||
const organizer = await this.dockerOrganizerService.deleteEntries({
|
||||
entryIds: new Set(entryIds),
|
||||
});
|
||||
return this.dockerOrganizerService.resolveOrganizer(organizer);
|
||||
}
|
||||
|
||||
@UsePermissions({
|
||||
action: AuthActionVerb.UPDATE,
|
||||
resource: Resource.DOCKER,
|
||||
possession: AuthPossession.ANY,
|
||||
})
|
||||
@Mutation(() => ResolvedOrganizerV1)
|
||||
public async moveDockerEntriesToFolder(
|
||||
@Args('sourceEntryIds', { type: () => [String] }) sourceEntryIds: string[],
|
||||
@Args('destinationFolderId') destinationFolderId: string
|
||||
) {
|
||||
const organizer = await this.dockerOrganizerService.moveEntriesToFolder({
|
||||
sourceEntryIds,
|
||||
destinationFolderId,
|
||||
});
|
||||
return this.dockerOrganizerService.resolveOrganizer(organizer);
|
||||
}
|
||||
}
|
||||
|
||||
93
api/src/unraid-api/graph/resolvers/info/cpu/cpu.model.ts
Normal file
93
api/src/unraid-api/graph/resolvers/info/cpu/cpu.model.ts
Normal file
@@ -0,0 +1,93 @@
|
||||
import { Field, Float, Int, ObjectType } from '@nestjs/graphql';
|
||||
|
||||
import { Node } from '@unraid/shared/graphql.model.js';
|
||||
import { GraphQLJSON } from 'graphql-scalars';
|
||||
|
||||
@ObjectType({ description: 'CPU load for a single core' })
|
||||
export class CpuLoad {
|
||||
@Field(() => Float, { description: 'The total CPU load on a single core, in percent.' })
|
||||
percentTotal!: number;
|
||||
|
||||
@Field(() => Float, { description: 'The percentage of time the CPU spent in user space.' })
|
||||
percentUser!: number;
|
||||
|
||||
@Field(() => Float, { description: 'The percentage of time the CPU spent in kernel space.' })
|
||||
percentSystem!: number;
|
||||
|
||||
@Field(() => Float, {
|
||||
description:
|
||||
'The percentage of time the CPU spent on low-priority (niced) user space processes.',
|
||||
})
|
||||
percentNice!: number;
|
||||
|
||||
@Field(() => Float, { description: 'The percentage of time the CPU was idle.' })
|
||||
percentIdle!: number;
|
||||
|
||||
@Field(() => Float, {
|
||||
description: 'The percentage of time the CPU spent servicing hardware interrupts.',
|
||||
})
|
||||
percentIrq!: number;
|
||||
}
|
||||
|
||||
@ObjectType({ implements: () => Node })
|
||||
export class CpuUtilization extends Node {
|
||||
@Field(() => Float, { description: 'Total CPU load in percent' })
|
||||
percentTotal!: number;
|
||||
|
||||
@Field(() => [CpuLoad], { description: 'CPU load for each core' })
|
||||
cpus!: CpuLoad[];
|
||||
}
|
||||
|
||||
@ObjectType({ implements: () => Node })
|
||||
export class InfoCpu extends Node {
|
||||
@Field(() => String, { nullable: true, description: 'CPU manufacturer' })
|
||||
manufacturer?: string;
|
||||
|
||||
@Field(() => String, { nullable: true, description: 'CPU brand name' })
|
||||
brand?: string;
|
||||
|
||||
@Field(() => String, { nullable: true, description: 'CPU vendor' })
|
||||
vendor?: string;
|
||||
|
||||
@Field(() => String, { nullable: true, description: 'CPU family' })
|
||||
family?: string;
|
||||
|
||||
@Field(() => String, { nullable: true, description: 'CPU model' })
|
||||
model?: string;
|
||||
|
||||
@Field(() => Int, { nullable: true, description: 'CPU stepping' })
|
||||
stepping?: number;
|
||||
|
||||
@Field(() => String, { nullable: true, description: 'CPU revision' })
|
||||
revision?: string;
|
||||
|
||||
@Field(() => String, { nullable: true, description: 'CPU voltage' })
|
||||
voltage?: string;
|
||||
|
||||
@Field(() => Float, { nullable: true, description: 'Current CPU speed in GHz' })
|
||||
speed?: number;
|
||||
|
||||
@Field(() => Float, { nullable: true, description: 'Minimum CPU speed in GHz' })
|
||||
speedmin?: number;
|
||||
|
||||
@Field(() => Float, { nullable: true, description: 'Maximum CPU speed in GHz' })
|
||||
speedmax?: number;
|
||||
|
||||
@Field(() => Int, { nullable: true, description: 'Number of CPU threads' })
|
||||
threads?: number;
|
||||
|
||||
@Field(() => Int, { nullable: true, description: 'Number of CPU cores' })
|
||||
cores?: number;
|
||||
|
||||
@Field(() => Int, { nullable: true, description: 'Number of physical processors' })
|
||||
processors?: number;
|
||||
|
||||
@Field(() => String, { nullable: true, description: 'CPU socket type' })
|
||||
socket?: string;
|
||||
|
||||
@Field(() => GraphQLJSON, { nullable: true, description: 'CPU cache information' })
|
||||
cache?: Record<string, any>;
|
||||
|
||||
@Field(() => [String], { nullable: true, description: 'CPU feature flags' })
|
||||
flags?: string[];
|
||||
}
|
||||
43
api/src/unraid-api/graph/resolvers/info/cpu/cpu.service.ts
Normal file
43
api/src/unraid-api/graph/resolvers/info/cpu/cpu.service.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
|
||||
import { cpu, cpuFlags, currentLoad } from 'systeminformation';
|
||||
|
||||
import { CpuUtilization, InfoCpu } from '@app/unraid-api/graph/resolvers/info/cpu/cpu.model.js';
|
||||
|
||||
@Injectable()
|
||||
export class CpuService {
|
||||
async generateCpu(): Promise<InfoCpu> {
|
||||
const { cores, physicalCores, speedMin, speedMax, stepping, ...rest } = await cpu();
|
||||
const flags = await cpuFlags()
|
||||
.then((flags) => flags.split(' '))
|
||||
.catch(() => []);
|
||||
|
||||
return {
|
||||
id: 'info/cpu',
|
||||
...rest,
|
||||
cores: physicalCores,
|
||||
threads: cores,
|
||||
flags,
|
||||
stepping: Number(stepping),
|
||||
speedmin: speedMin || -1,
|
||||
speedmax: speedMax || -1,
|
||||
};
|
||||
}
|
||||
|
||||
async generateCpuLoad(): Promise<CpuUtilization> {
|
||||
const loadData = await currentLoad();
|
||||
|
||||
return {
|
||||
id: 'info/cpu-load',
|
||||
percentTotal: loadData.currentLoad,
|
||||
cpus: loadData.cpus.map((cpu) => ({
|
||||
percentTotal: cpu.load,
|
||||
percentUser: cpu.loadUser,
|
||||
percentSystem: cpu.loadSystem,
|
||||
percentNice: cpu.loadNice,
|
||||
percentIdle: cpu.loadIdle,
|
||||
percentIrq: cpu.loadIrq,
|
||||
})),
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -1,24 +0,0 @@
|
||||
import { ResolveField, Resolver } from '@nestjs/graphql';
|
||||
|
||||
import { DevicesService } from '@app/unraid-api/graph/resolvers/info/devices.service.js';
|
||||
import { Devices, Gpu, Pci, Usb } from '@app/unraid-api/graph/resolvers/info/info.model.js';
|
||||
|
||||
@Resolver(() => Devices)
|
||||
export class DevicesResolver {
|
||||
constructor(private readonly devicesService: DevicesService) {}
|
||||
|
||||
@ResolveField(() => [Gpu])
|
||||
public async gpu(): Promise<Gpu[]> {
|
||||
return this.devicesService.generateGpu();
|
||||
}
|
||||
|
||||
@ResolveField(() => [Pci])
|
||||
public async pci(): Promise<Pci[]> {
|
||||
return this.devicesService.generatePci();
|
||||
}
|
||||
|
||||
@ResolveField(() => [Usb])
|
||||
public async usb(): Promise<Usb[]> {
|
||||
return this.devicesService.generateUsb();
|
||||
}
|
||||
}
|
||||
102
api/src/unraid-api/graph/resolvers/info/devices/devices.model.ts
Normal file
102
api/src/unraid-api/graph/resolvers/info/devices/devices.model.ts
Normal file
@@ -0,0 +1,102 @@
|
||||
import { Field, ObjectType } from '@nestjs/graphql';
|
||||
|
||||
import { Node } from '@unraid/shared/graphql.model.js';
|
||||
|
||||
@ObjectType({ implements: () => Node })
|
||||
export class InfoGpu extends Node {
|
||||
@Field(() => String, { description: 'GPU type/manufacturer' })
|
||||
type!: string;
|
||||
|
||||
@Field(() => String, { description: 'GPU type identifier' })
|
||||
typeid!: string;
|
||||
|
||||
@Field(() => Boolean, { description: 'Whether GPU is blacklisted' })
|
||||
blacklisted!: boolean;
|
||||
|
||||
@Field(() => String, { description: 'Device class' })
|
||||
class!: string;
|
||||
|
||||
@Field(() => String, { description: 'Product ID' })
|
||||
productid!: string;
|
||||
|
||||
@Field(() => String, { nullable: true, description: 'Vendor name' })
|
||||
vendorname?: string;
|
||||
}
|
||||
|
||||
@ObjectType({ implements: () => Node })
|
||||
export class InfoNetwork extends Node {
|
||||
@Field(() => String, { description: 'Network interface name' })
|
||||
iface!: string;
|
||||
|
||||
@Field(() => String, { nullable: true, description: 'Network interface model' })
|
||||
model?: string;
|
||||
|
||||
@Field(() => String, { nullable: true, description: 'Network vendor' })
|
||||
vendor?: string;
|
||||
|
||||
@Field(() => String, { nullable: true, description: 'MAC address' })
|
||||
mac?: string;
|
||||
|
||||
@Field(() => Boolean, { nullable: true, description: 'Virtual interface flag' })
|
||||
virtual?: boolean;
|
||||
|
||||
@Field(() => String, { nullable: true, description: 'Network speed' })
|
||||
speed?: string;
|
||||
|
||||
@Field(() => Boolean, { nullable: true, description: 'DHCP enabled flag' })
|
||||
dhcp?: boolean;
|
||||
}
|
||||
|
||||
@ObjectType({ implements: () => Node })
|
||||
export class InfoPci extends Node {
|
||||
@Field(() => String, { description: 'Device type/manufacturer' })
|
||||
type!: string;
|
||||
|
||||
@Field(() => String, { description: 'Type identifier' })
|
||||
typeid!: string;
|
||||
|
||||
@Field(() => String, { nullable: true, description: 'Vendor name' })
|
||||
vendorname?: string;
|
||||
|
||||
@Field(() => String, { description: 'Vendor ID' })
|
||||
vendorid!: string;
|
||||
|
||||
@Field(() => String, { nullable: true, description: 'Product name' })
|
||||
productname?: string;
|
||||
|
||||
@Field(() => String, { description: 'Product ID' })
|
||||
productid!: string;
|
||||
|
||||
@Field(() => String, { description: 'Blacklisted status' })
|
||||
blacklisted!: string;
|
||||
|
||||
@Field(() => String, { description: 'Device class' })
|
||||
class!: string;
|
||||
}
|
||||
|
||||
@ObjectType({ implements: () => Node })
|
||||
export class InfoUsb extends Node {
|
||||
@Field(() => String, { description: 'USB device name' })
|
||||
name!: string;
|
||||
|
||||
@Field(() => String, { nullable: true, description: 'USB bus number' })
|
||||
bus?: string;
|
||||
|
||||
@Field(() => String, { nullable: true, description: 'USB device number' })
|
||||
device?: string;
|
||||
}
|
||||
|
||||
@ObjectType({ implements: () => Node })
|
||||
export class InfoDevices extends Node {
|
||||
@Field(() => [InfoGpu], { nullable: true, description: 'List of GPU devices' })
|
||||
gpu?: InfoGpu[];
|
||||
|
||||
@Field(() => [InfoNetwork], { nullable: true, description: 'List of network interfaces' })
|
||||
network?: InfoNetwork[];
|
||||
|
||||
@Field(() => [InfoPci], { nullable: true, description: 'List of PCI devices' })
|
||||
pci?: InfoPci[];
|
||||
|
||||
@Field(() => [InfoUsb], { nullable: true, description: 'List of USB devices' })
|
||||
usb?: InfoUsb[];
|
||||
}
|
||||
@@ -3,8 +3,8 @@ import { Test } from '@nestjs/testing';
|
||||
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { DevicesResolver } from '@app/unraid-api/graph/resolvers/info/devices.resolver.js';
|
||||
import { DevicesService } from '@app/unraid-api/graph/resolvers/info/devices.service.js';
|
||||
import { DevicesResolver } from '@app/unraid-api/graph/resolvers/info/devices/devices.resolver.js';
|
||||
import { DevicesService } from '@app/unraid-api/graph/resolvers/info/devices/devices.service.js';
|
||||
|
||||
describe('DevicesResolver', () => {
|
||||
let resolver: DevicesResolver;
|
||||
@@ -0,0 +1,35 @@
|
||||
import { ResolveField, Resolver } from '@nestjs/graphql';
|
||||
|
||||
import {
|
||||
InfoDevices,
|
||||
InfoGpu,
|
||||
InfoNetwork,
|
||||
InfoPci,
|
||||
InfoUsb,
|
||||
} from '@app/unraid-api/graph/resolvers/info/devices/devices.model.js';
|
||||
import { DevicesService } from '@app/unraid-api/graph/resolvers/info/devices/devices.service.js';
|
||||
|
||||
@Resolver(() => InfoDevices)
|
||||
export class DevicesResolver {
|
||||
constructor(private readonly devicesService: DevicesService) {}
|
||||
|
||||
@ResolveField(() => [InfoGpu])
|
||||
public async gpu(): Promise<InfoGpu[]> {
|
||||
return this.devicesService.generateGpu();
|
||||
}
|
||||
|
||||
@ResolveField(() => [InfoNetwork])
|
||||
public async network(): Promise<InfoNetwork[]> {
|
||||
return this.devicesService.generateNetwork();
|
||||
}
|
||||
|
||||
@ResolveField(() => [InfoPci])
|
||||
public async pci(): Promise<InfoPci[]> {
|
||||
return this.devicesService.generatePci();
|
||||
}
|
||||
|
||||
@ResolveField(() => [InfoUsb])
|
||||
public async usb(): Promise<InfoUsb[]> {
|
||||
return this.devicesService.generateUsb();
|
||||
}
|
||||
}
|
||||
@@ -3,7 +3,7 @@ import { Test } from '@nestjs/testing';
|
||||
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { DevicesService } from '@app/unraid-api/graph/resolvers/info/devices.service.js';
|
||||
import { DevicesService } from '@app/unraid-api/graph/resolvers/info/devices/devices.service.js';
|
||||
|
||||
// Mock external dependencies
|
||||
vi.mock('fs/promises', () => ({
|
||||
@@ -13,24 +13,35 @@ import { filterDevices } from '@app/core/utils/vms/filter-devices.js';
|
||||
import { getPciDevices } from '@app/core/utils/vms/get-pci-devices.js';
|
||||
import { getters } from '@app/store/index.js';
|
||||
import {
|
||||
Gpu,
|
||||
Pci,
|
||||
RawUsbDeviceData,
|
||||
Usb,
|
||||
UsbDevice,
|
||||
} from '@app/unraid-api/graph/resolvers/info/info.model.js';
|
||||
InfoGpu,
|
||||
InfoNetwork,
|
||||
InfoPci,
|
||||
InfoUsb,
|
||||
} from '@app/unraid-api/graph/resolvers/info/devices/devices.model.js';
|
||||
|
||||
interface RawUsbDeviceData {
|
||||
id: string;
|
||||
n?: string;
|
||||
}
|
||||
|
||||
interface UsbDevice {
|
||||
id: string;
|
||||
name: string;
|
||||
guid: string;
|
||||
vendorname?: string;
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class DevicesService {
|
||||
private readonly logger = new Logger(DevicesService.name);
|
||||
|
||||
async generateGpu(): Promise<Gpu[]> {
|
||||
async generateGpu(): Promise<InfoGpu[]> {
|
||||
try {
|
||||
const systemPciDevices = await this.getSystemPciDevices();
|
||||
return systemPciDevices
|
||||
.filter((device) => device.class === 'vga' && !device.allowed)
|
||||
.map((entry) => {
|
||||
const gpu: Gpu = {
|
||||
const gpu: InfoGpu = {
|
||||
id: `gpu/${entry.id}`,
|
||||
blacklisted: entry.allowed,
|
||||
class: entry.class,
|
||||
@@ -50,7 +61,7 @@ export class DevicesService {
|
||||
}
|
||||
}
|
||||
|
||||
async generatePci(): Promise<Pci[]> {
|
||||
async generatePci(): Promise<InfoPci[]> {
|
||||
try {
|
||||
const devices = await this.getSystemPciDevices();
|
||||
return devices.map((device) => ({
|
||||
@@ -73,7 +84,21 @@ export class DevicesService {
|
||||
}
|
||||
}
|
||||
|
||||
async generateUsb(): Promise<Usb[]> {
|
||||
async generateNetwork(): Promise<InfoNetwork[]> {
|
||||
try {
|
||||
// For now, return empty array. This can be implemented later to fetch actual network interfaces
|
||||
// using systeminformation or similar libraries
|
||||
return [];
|
||||
} catch (error: unknown) {
|
||||
this.logger.error(
|
||||
`Failed to generate network devices: ${error instanceof Error ? error.message : String(error)}`,
|
||||
error instanceof Error ? error.stack : undefined
|
||||
);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
async generateUsb(): Promise<InfoUsb[]> {
|
||||
try {
|
||||
const usbDevices = await this.getSystemUSBDevices();
|
||||
return usbDevices.map((device) => ({
|
||||
@@ -0,0 +1,82 @@
|
||||
import { Field, Float, Int, ObjectType, registerEnumType } from '@nestjs/graphql';
|
||||
|
||||
import { Node } from '@unraid/shared/graphql.model.js';
|
||||
|
||||
import { ThemeName } from '@app/unraid-api/graph/resolvers/customization/theme.model.js';
|
||||
|
||||
export enum Temperature {
|
||||
CELSIUS = 'C',
|
||||
FAHRENHEIT = 'F',
|
||||
}
|
||||
|
||||
registerEnumType(Temperature, {
|
||||
name: 'Temperature',
|
||||
description: 'Temperature unit',
|
||||
});
|
||||
|
||||
@ObjectType({ implements: () => Node })
|
||||
export class InfoDisplayCase extends Node {
|
||||
@Field(() => String, { description: 'Case image URL' })
|
||||
url!: string;
|
||||
|
||||
@Field(() => String, { description: 'Case icon identifier' })
|
||||
icon!: string;
|
||||
|
||||
@Field(() => String, { description: 'Error message if any' })
|
||||
error!: string;
|
||||
|
||||
@Field(() => String, { description: 'Base64 encoded case image' })
|
||||
base64!: string;
|
||||
}
|
||||
|
||||
@ObjectType({ implements: () => Node })
|
||||
export class InfoDisplay extends Node {
|
||||
@Field(() => InfoDisplayCase, { description: 'Case display configuration' })
|
||||
case!: InfoDisplayCase;
|
||||
|
||||
@Field(() => ThemeName, { description: 'UI theme name' })
|
||||
theme!: ThemeName;
|
||||
|
||||
@Field(() => Temperature, { description: 'Temperature unit (C or F)' })
|
||||
unit!: Temperature;
|
||||
|
||||
@Field(() => Boolean, { description: 'Enable UI scaling' })
|
||||
scale!: boolean;
|
||||
|
||||
@Field(() => Boolean, { description: 'Show tabs in UI' })
|
||||
tabs!: boolean;
|
||||
|
||||
@Field(() => Boolean, { description: 'Enable UI resize' })
|
||||
resize!: boolean;
|
||||
|
||||
@Field(() => Boolean, { description: 'Show WWN identifiers' })
|
||||
wwn!: boolean;
|
||||
|
||||
@Field(() => Boolean, { description: 'Show totals' })
|
||||
total!: boolean;
|
||||
|
||||
@Field(() => Boolean, { description: 'Show usage statistics' })
|
||||
usage!: boolean;
|
||||
|
||||
@Field(() => Boolean, { description: 'Show text labels' })
|
||||
text!: boolean;
|
||||
|
||||
@Field(() => Int, { description: 'Warning temperature threshold' })
|
||||
warning!: number;
|
||||
|
||||
@Field(() => Int, { description: 'Critical temperature threshold' })
|
||||
critical!: number;
|
||||
|
||||
@Field(() => Int, { description: 'Hot temperature threshold' })
|
||||
hot!: number;
|
||||
|
||||
@Field(() => Int, { nullable: true, description: 'Maximum temperature threshold' })
|
||||
max?: number;
|
||||
|
||||
@Field(() => String, { nullable: true, description: 'Locale setting' })
|
||||
locale?: string;
|
||||
}
|
||||
|
||||
// Export aliases for backward compatibility with the main DisplayResolver
|
||||
export { InfoDisplay as Display };
|
||||
export { InfoDisplayCase as DisplayCase };
|
||||
@@ -3,7 +3,7 @@ import { Test } from '@nestjs/testing';
|
||||
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { DisplayService } from '@app/unraid-api/graph/resolvers/display/display.service.js';
|
||||
import { DisplayService } from '@app/unraid-api/graph/resolvers/info/display/display.service.js';
|
||||
|
||||
// Mock fs/promises at the module level only for specific test cases
|
||||
vi.mock('node:fs/promises', async () => {
|
||||
@@ -37,7 +37,7 @@ describe('DisplayService', () => {
|
||||
const result = await service.generateDisplay();
|
||||
|
||||
// Verify basic structure
|
||||
expect(result).toHaveProperty('id', 'display');
|
||||
expect(result).toHaveProperty('id', 'info/display');
|
||||
expect(result).toHaveProperty('case');
|
||||
expect(result.case).toHaveProperty('url');
|
||||
expect(result.case).toHaveProperty('icon');
|
||||
@@ -69,6 +69,7 @@ describe('DisplayService', () => {
|
||||
const result = await service.generateDisplay();
|
||||
|
||||
expect(result.case).toEqual({
|
||||
id: 'display/case',
|
||||
url: '',
|
||||
icon: 'custom',
|
||||
error: 'could-not-read-config-file',
|
||||
@@ -90,7 +91,7 @@ describe('DisplayService', () => {
|
||||
const result = await service.generateDisplay();
|
||||
|
||||
// Should still return basic structure even if some config is missing
|
||||
expect(result).toHaveProperty('id', 'display');
|
||||
expect(result).toHaveProperty('id', 'info/display');
|
||||
expect(result).toHaveProperty('case');
|
||||
// The actual config depends on what's in the dev files
|
||||
});
|
||||
@@ -114,11 +115,6 @@ describe('DisplayService', () => {
|
||||
expect(result.critical).toBe(90);
|
||||
expect(result.hot).toBe(45);
|
||||
expect(result.max).toBe(55);
|
||||
expect(result.date).toBe('%c');
|
||||
expect(result.number).toBe('.,');
|
||||
expect(result.users).toBe('Tasks:3');
|
||||
expect(result.banner).toBe('image');
|
||||
expect(result.dashapps).toBe('icons');
|
||||
expect(result.locale).toBe('en_US'); // default fallback when not specified
|
||||
});
|
||||
|
||||
@@ -140,6 +136,7 @@ describe('DisplayService', () => {
|
||||
const result = await service.generateDisplay();
|
||||
|
||||
expect(result.case).toEqual({
|
||||
id: 'display/case',
|
||||
url: '',
|
||||
icon: 'default',
|
||||
error: '',
|
||||
@@ -6,19 +6,22 @@ import { type DynamixConfig } from '@app/core/types/ini.js';
|
||||
import { toBoolean } from '@app/core/utils/casting.js';
|
||||
import { fileExists } from '@app/core/utils/files/file-exists.js';
|
||||
import { loadState } from '@app/core/utils/misc/load-state.js';
|
||||
import { validateEnumValue } from '@app/core/utils/validation/enum-validator.js';
|
||||
import { getters } from '@app/store/index.js';
|
||||
import { ThemeName } from '@app/unraid-api/graph/resolvers/customization/theme.model.js';
|
||||
import { Display, Temperature } from '@app/unraid-api/graph/resolvers/info/info.model.js';
|
||||
import { Display, Temperature } from '@app/unraid-api/graph/resolvers/info/display/display.model.js';
|
||||
|
||||
const states = {
|
||||
// Success
|
||||
custom: {
|
||||
id: 'display/case',
|
||||
url: '',
|
||||
icon: 'custom',
|
||||
error: '',
|
||||
base64: '',
|
||||
},
|
||||
default: {
|
||||
id: 'display/case',
|
||||
url: '',
|
||||
icon: 'default',
|
||||
error: '',
|
||||
@@ -27,30 +30,35 @@ const states = {
|
||||
|
||||
// Errors
|
||||
couldNotReadConfigFile: {
|
||||
id: 'display/case',
|
||||
url: '',
|
||||
icon: 'custom',
|
||||
error: 'could-not-read-config-file',
|
||||
base64: '',
|
||||
},
|
||||
couldNotReadImage: {
|
||||
id: 'display/case',
|
||||
url: '',
|
||||
icon: 'custom',
|
||||
error: 'could-not-read-image',
|
||||
base64: '',
|
||||
},
|
||||
imageMissing: {
|
||||
id: 'display/case',
|
||||
url: '',
|
||||
icon: 'custom',
|
||||
error: 'image-missing',
|
||||
base64: '',
|
||||
},
|
||||
imageTooBig: {
|
||||
id: 'display/case',
|
||||
url: '',
|
||||
icon: 'custom',
|
||||
error: 'image-too-big',
|
||||
base64: '',
|
||||
},
|
||||
imageCorrupt: {
|
||||
id: 'display/case',
|
||||
url: '',
|
||||
icon: 'custom',
|
||||
error: 'image-corrupt',
|
||||
@@ -67,11 +75,26 @@ export class DisplayService {
|
||||
// Get display configuration
|
||||
const config = await this.getDisplayConfig();
|
||||
|
||||
return {
|
||||
id: 'display',
|
||||
const display: Display = {
|
||||
id: 'info/display',
|
||||
case: caseInfo,
|
||||
...config,
|
||||
theme: config.theme ?? ThemeName.white,
|
||||
unit: config.unit ?? Temperature.CELSIUS,
|
||||
scale: config.scale ?? false,
|
||||
tabs: config.tabs ?? true,
|
||||
resize: config.resize ?? true,
|
||||
wwn: config.wwn ?? false,
|
||||
total: config.total ?? true,
|
||||
usage: config.usage ?? true,
|
||||
text: config.text ?? true,
|
||||
warning: config.warning ?? 60,
|
||||
critical: config.critical ?? 80,
|
||||
hot: config.hot ?? 90,
|
||||
max: config.max,
|
||||
locale: config.locale,
|
||||
};
|
||||
|
||||
return display;
|
||||
}
|
||||
|
||||
private async getCaseInfo() {
|
||||
@@ -102,11 +125,12 @@ export class DisplayService {
|
||||
// Non-custom icon
|
||||
return {
|
||||
...states.default,
|
||||
id: 'display/case',
|
||||
icon: serverCase,
|
||||
};
|
||||
}
|
||||
|
||||
private async getDisplayConfig() {
|
||||
private async getDisplayConfig(): Promise<Partial<Omit<Display, 'id' | 'case'>>> {
|
||||
const filePaths = getters.paths()['dynamix-config'];
|
||||
|
||||
const state = filePaths.reduce<Partial<DynamixConfig>>((acc, filePath) => {
|
||||
@@ -122,10 +146,11 @@ export class DisplayService {
|
||||
}
|
||||
|
||||
const { theme, unit, ...display } = state.display;
|
||||
|
||||
return {
|
||||
...display,
|
||||
theme: theme as ThemeName,
|
||||
unit: unit as Temperature,
|
||||
theme: validateEnumValue(theme, ThemeName),
|
||||
unit: validateEnumValue(unit, Temperature),
|
||||
scale: toBoolean(display.scale),
|
||||
tabs: toBoolean(display.tabs),
|
||||
resize: toBoolean(display.resize),
|
||||
@@ -1,552 +1,44 @@
|
||||
import {
|
||||
Field,
|
||||
Float,
|
||||
GraphQLISODateTime,
|
||||
ID,
|
||||
Int,
|
||||
ObjectType,
|
||||
registerEnumType,
|
||||
} from '@nestjs/graphql';
|
||||
import { Field, GraphQLISODateTime, ID, ObjectType } from '@nestjs/graphql';
|
||||
|
||||
import { Node } from '@unraid/shared/graphql.model.js';
|
||||
import { PrefixedID } from '@unraid/shared/prefixed-id-scalar.js';
|
||||
import { GraphQLBigInt, GraphQLJSON } from 'graphql-scalars';
|
||||
|
||||
import { ThemeName } from '@app/unraid-api/graph/resolvers/customization/theme.model.js';
|
||||
|
||||
// USB device interface for type safety
|
||||
export interface UsbDevice {
|
||||
id: string;
|
||||
name: string;
|
||||
guid: string;
|
||||
vendorname: string;
|
||||
}
|
||||
|
||||
// Raw USB device data from lsusb parsing
|
||||
export interface RawUsbDeviceData {
|
||||
id: string;
|
||||
n?: string;
|
||||
}
|
||||
|
||||
export enum Temperature {
|
||||
C = 'C',
|
||||
F = 'F',
|
||||
}
|
||||
|
||||
registerEnumType(Temperature, {
|
||||
name: 'Temperature',
|
||||
description: 'Temperature unit (Celsius or Fahrenheit)',
|
||||
});
|
||||
|
||||
@ObjectType({ implements: () => Node })
|
||||
export class InfoApps extends Node {
|
||||
@Field(() => Int, { description: 'How many docker containers are installed' })
|
||||
installed!: number;
|
||||
|
||||
@Field(() => Int, { description: 'How many docker containers are running' })
|
||||
started!: number;
|
||||
}
|
||||
|
||||
@ObjectType({ implements: () => Node })
|
||||
export class Baseboard extends Node {
|
||||
@Field(() => String)
|
||||
manufacturer!: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
model?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
version?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
serial?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
assetTag?: string;
|
||||
}
|
||||
|
||||
@ObjectType({ implements: () => Node })
|
||||
export class InfoCpu extends Node {
|
||||
@Field(() => String)
|
||||
manufacturer!: string;
|
||||
|
||||
@Field(() => String)
|
||||
brand!: string;
|
||||
|
||||
@Field(() => String)
|
||||
vendor!: string;
|
||||
|
||||
@Field(() => String)
|
||||
family!: string;
|
||||
|
||||
@Field(() => String)
|
||||
model!: string;
|
||||
|
||||
@Field(() => Int)
|
||||
stepping!: number;
|
||||
|
||||
@Field(() => String)
|
||||
revision!: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
voltage?: string;
|
||||
|
||||
@Field(() => Float)
|
||||
speed!: number;
|
||||
|
||||
@Field(() => Float)
|
||||
speedmin!: number;
|
||||
|
||||
@Field(() => Float)
|
||||
speedmax!: number;
|
||||
|
||||
@Field(() => Int)
|
||||
threads!: number;
|
||||
|
||||
@Field(() => Int)
|
||||
cores!: number;
|
||||
|
||||
@Field(() => Int)
|
||||
processors!: number;
|
||||
|
||||
@Field(() => String)
|
||||
socket!: string;
|
||||
|
||||
@Field(() => GraphQLJSON)
|
||||
cache!: Record<string, any>;
|
||||
|
||||
@Field(() => [String])
|
||||
flags!: string[];
|
||||
}
|
||||
|
||||
@ObjectType({ implements: () => Node })
|
||||
export class Gpu extends Node {
|
||||
@Field(() => String)
|
||||
type!: string;
|
||||
|
||||
@Field(() => String)
|
||||
typeid!: string;
|
||||
|
||||
@Field(() => String)
|
||||
vendorname!: string;
|
||||
|
||||
@Field(() => String)
|
||||
productid!: string;
|
||||
|
||||
@Field(() => Boolean)
|
||||
blacklisted!: boolean;
|
||||
|
||||
@Field(() => String)
|
||||
class!: string;
|
||||
}
|
||||
|
||||
@ObjectType({ implements: () => Node })
|
||||
export class Network extends Node {
|
||||
@Field(() => String, { nullable: true })
|
||||
iface?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
ifaceName?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
ipv4?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
ipv6?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
mac?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
internal?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
operstate?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
type?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
duplex?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
mtu?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
speed?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
carrierChanges?: string;
|
||||
}
|
||||
|
||||
@ObjectType({ implements: () => Node })
|
||||
export class Pci extends Node {
|
||||
@Field(() => String, { nullable: true })
|
||||
type?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
typeid?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
vendorname?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
vendorid?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
productname?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
productid?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
blacklisted?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
class?: string;
|
||||
}
|
||||
|
||||
@ObjectType({ implements: () => Node })
|
||||
export class Usb extends Node {
|
||||
@Field(() => String, { nullable: true })
|
||||
name?: string;
|
||||
}
|
||||
|
||||
@ObjectType({ implements: () => Node })
|
||||
export class Devices extends Node {
|
||||
@Field(() => [Gpu])
|
||||
gpu!: Gpu[];
|
||||
|
||||
@Field(() => [Pci])
|
||||
pci!: Pci[];
|
||||
|
||||
@Field(() => [Usb])
|
||||
usb!: Usb[];
|
||||
}
|
||||
|
||||
@ObjectType({ implements: () => Node })
|
||||
export class Case {
|
||||
@Field(() => String, { nullable: true })
|
||||
icon?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
url?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
error?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
base64?: string;
|
||||
}
|
||||
|
||||
@ObjectType({ implements: () => Node })
|
||||
export class Display extends Node {
|
||||
@Field(() => Case, { nullable: true })
|
||||
case?: Case;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
date?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
number?: string;
|
||||
|
||||
@Field(() => Boolean, { nullable: true })
|
||||
scale?: boolean;
|
||||
|
||||
@Field(() => Boolean, { nullable: true })
|
||||
tabs?: boolean;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
users?: string;
|
||||
|
||||
@Field(() => Boolean, { nullable: true })
|
||||
resize?: boolean;
|
||||
|
||||
@Field(() => Boolean, { nullable: true })
|
||||
wwn?: boolean;
|
||||
|
||||
@Field(() => Boolean, { nullable: true })
|
||||
total?: boolean;
|
||||
|
||||
@Field(() => Boolean, { nullable: true })
|
||||
usage?: boolean;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
banner?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
dashapps?: string;
|
||||
|
||||
@Field(() => ThemeName, { nullable: true })
|
||||
theme?: ThemeName;
|
||||
|
||||
@Field(() => Boolean, { nullable: true })
|
||||
text?: boolean;
|
||||
|
||||
@Field(() => Temperature, { nullable: true })
|
||||
unit?: Temperature;
|
||||
|
||||
@Field(() => Int, { nullable: true })
|
||||
warning?: number;
|
||||
|
||||
@Field(() => Int, { nullable: true })
|
||||
critical?: number;
|
||||
|
||||
@Field(() => Int, { nullable: true })
|
||||
hot?: number;
|
||||
|
||||
@Field(() => Int, { nullable: true })
|
||||
max?: number;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
locale?: string;
|
||||
}
|
||||
|
||||
@ObjectType({ implements: () => Node })
|
||||
export class MemoryLayout extends Node {
|
||||
@Field(() => GraphQLBigInt)
|
||||
size!: number;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
bank?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
type?: string;
|
||||
|
||||
@Field(() => Int, { nullable: true })
|
||||
clockSpeed?: number;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
formFactor?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
manufacturer?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
partNum?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
serialNum?: string;
|
||||
|
||||
@Field(() => Int, { nullable: true })
|
||||
voltageConfigured?: number;
|
||||
|
||||
@Field(() => Int, { nullable: true })
|
||||
voltageMin?: number;
|
||||
|
||||
@Field(() => Int, { nullable: true })
|
||||
voltageMax?: number;
|
||||
}
|
||||
|
||||
@ObjectType({ implements: () => Node })
|
||||
export class InfoMemory extends Node {
|
||||
@Field(() => GraphQLBigInt)
|
||||
max!: number;
|
||||
|
||||
@Field(() => GraphQLBigInt)
|
||||
total!: number;
|
||||
|
||||
@Field(() => GraphQLBigInt)
|
||||
free!: number;
|
||||
|
||||
@Field(() => GraphQLBigInt)
|
||||
used!: number;
|
||||
|
||||
@Field(() => GraphQLBigInt)
|
||||
active!: number;
|
||||
|
||||
@Field(() => GraphQLBigInt)
|
||||
available!: number;
|
||||
|
||||
@Field(() => GraphQLBigInt)
|
||||
buffcache!: number;
|
||||
|
||||
@Field(() => GraphQLBigInt)
|
||||
swaptotal!: number;
|
||||
|
||||
@Field(() => GraphQLBigInt)
|
||||
swapused!: number;
|
||||
|
||||
@Field(() => GraphQLBigInt)
|
||||
swapfree!: number;
|
||||
|
||||
@Field(() => [MemoryLayout])
|
||||
layout!: MemoryLayout[];
|
||||
}
|
||||
|
||||
@ObjectType({ implements: () => Node })
|
||||
export class Os extends Node {
|
||||
@Field(() => String, { nullable: true })
|
||||
platform?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
distro?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
release?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
codename?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
kernel?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
arch?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
hostname?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
codepage?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
logofile?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
serial?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
build?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
uptime?: string;
|
||||
}
|
||||
|
||||
@ObjectType({ implements: () => Node })
|
||||
export class System extends Node {
|
||||
@Field(() => String, { nullable: true })
|
||||
manufacturer?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
model?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
version?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
serial?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
uuid?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
sku?: string;
|
||||
}
|
||||
|
||||
@ObjectType({ implements: () => Node })
|
||||
export class Versions extends Node {
|
||||
@Field(() => String, { nullable: true })
|
||||
kernel?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
openssl?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
systemOpenssl?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
systemOpensslLib?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
node?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
v8?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
npm?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
yarn?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
pm2?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
gulp?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
grunt?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
git?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
tsc?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
mysql?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
redis?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
mongodb?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
apache?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
nginx?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
php?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
docker?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
postfix?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
postgresql?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
perl?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
python?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
gcc?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
unraid?: string;
|
||||
}
|
||||
import { InfoCpu } from '@app/unraid-api/graph/resolvers/info/cpu/cpu.model.js';
|
||||
import { InfoDevices } from '@app/unraid-api/graph/resolvers/info/devices/devices.model.js';
|
||||
import { InfoDisplay } from '@app/unraid-api/graph/resolvers/info/display/display.model.js';
|
||||
import { InfoMemory } from '@app/unraid-api/graph/resolvers/info/memory/memory.model.js';
|
||||
import { InfoOs } from '@app/unraid-api/graph/resolvers/info/os/os.model.js';
|
||||
import { InfoBaseboard, InfoSystem } from '@app/unraid-api/graph/resolvers/info/system/system.model.js';
|
||||
import { InfoVersions } from '@app/unraid-api/graph/resolvers/info/versions/versions.model.js';
|
||||
|
||||
@ObjectType({ implements: () => Node })
|
||||
export class Info extends Node {
|
||||
@Field(() => InfoApps, { description: 'Count of docker containers' })
|
||||
apps!: InfoApps;
|
||||
|
||||
@Field(() => Baseboard)
|
||||
baseboard!: Baseboard;
|
||||
|
||||
@Field(() => InfoCpu)
|
||||
cpu!: InfoCpu;
|
||||
|
||||
@Field(() => Devices)
|
||||
devices!: Devices;
|
||||
|
||||
@Field(() => Display)
|
||||
display!: Display;
|
||||
|
||||
@Field(() => PrefixedID, { description: 'Machine ID', nullable: true })
|
||||
machineId?: string;
|
||||
|
||||
@Field(() => InfoMemory)
|
||||
memory!: InfoMemory;
|
||||
|
||||
@Field(() => Os)
|
||||
os!: Os;
|
||||
|
||||
@Field(() => System)
|
||||
system!: System;
|
||||
|
||||
@Field(() => GraphQLISODateTime)
|
||||
@Field(() => GraphQLISODateTime, { description: 'Current server time' })
|
||||
time!: Date;
|
||||
|
||||
@Field(() => Versions)
|
||||
versions!: Versions;
|
||||
@Field(() => InfoBaseboard, { description: 'Motherboard information' })
|
||||
baseboard!: InfoBaseboard;
|
||||
|
||||
@Field(() => InfoCpu, { description: 'CPU information' })
|
||||
cpu!: InfoCpu;
|
||||
|
||||
@Field(() => InfoDevices, { description: 'Device information' })
|
||||
devices!: InfoDevices;
|
||||
|
||||
@Field(() => InfoDisplay, { description: 'Display configuration' })
|
||||
display!: InfoDisplay;
|
||||
|
||||
@Field(() => ID, { nullable: true, description: 'Machine ID' })
|
||||
machineId?: string;
|
||||
|
||||
@Field(() => InfoMemory, { description: 'Memory information' })
|
||||
memory!: InfoMemory;
|
||||
|
||||
@Field(() => InfoOs, { description: 'Operating system information' })
|
||||
os!: InfoOs;
|
||||
|
||||
@Field(() => InfoSystem, { description: 'System information' })
|
||||
system!: InfoSystem;
|
||||
|
||||
@Field(() => InfoVersions, { description: 'Software versions' })
|
||||
versions!: InfoVersions;
|
||||
}
|
||||
|
||||
37
api/src/unraid-api/graph/resolvers/info/info.module.ts
Normal file
37
api/src/unraid-api/graph/resolvers/info/info.module.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
import { ConfigModule } from '@nestjs/config';
|
||||
|
||||
import { CpuService } from '@app/unraid-api/graph/resolvers/info/cpu/cpu.service.js';
|
||||
import { DevicesResolver } from '@app/unraid-api/graph/resolvers/info/devices/devices.resolver.js';
|
||||
import { DevicesService } from '@app/unraid-api/graph/resolvers/info/devices/devices.service.js';
|
||||
import { DisplayService } from '@app/unraid-api/graph/resolvers/info/display/display.service.js';
|
||||
import { InfoResolver } from '@app/unraid-api/graph/resolvers/info/info.resolver.js';
|
||||
import { MemoryService } from '@app/unraid-api/graph/resolvers/info/memory/memory.service.js';
|
||||
import { OsService } from '@app/unraid-api/graph/resolvers/info/os/os.service.js';
|
||||
import { CoreVersionsResolver } from '@app/unraid-api/graph/resolvers/info/versions/core-versions.resolver.js';
|
||||
import { VersionsResolver } from '@app/unraid-api/graph/resolvers/info/versions/versions.resolver.js';
|
||||
import { VersionsService } from '@app/unraid-api/graph/resolvers/info/versions/versions.service.js';
|
||||
import { ServicesModule } from '@app/unraid-api/graph/services/services.module.js';
|
||||
|
||||
@Module({
|
||||
imports: [ConfigModule, ServicesModule],
|
||||
providers: [
|
||||
// Main resolver
|
||||
InfoResolver,
|
||||
|
||||
// Sub-resolvers
|
||||
DevicesResolver,
|
||||
VersionsResolver,
|
||||
CoreVersionsResolver,
|
||||
|
||||
// Services
|
||||
CpuService,
|
||||
MemoryService,
|
||||
DevicesService,
|
||||
OsService,
|
||||
VersionsService,
|
||||
DisplayService,
|
||||
],
|
||||
exports: [InfoResolver, DevicesResolver, VersionsResolver, CoreVersionsResolver, DisplayService],
|
||||
})
|
||||
export class InfoModule {}
|
||||
@@ -0,0 +1,190 @@
|
||||
import type { TestingModule } from '@nestjs/testing';
|
||||
import { CACHE_MANAGER } from '@nestjs/cache-manager';
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import { Test } from '@nestjs/testing';
|
||||
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
|
||||
import { CpuService } from '@app/unraid-api/graph/resolvers/info/cpu/cpu.service.js';
|
||||
import { DevicesResolver } from '@app/unraid-api/graph/resolvers/info/devices/devices.resolver.js';
|
||||
import { DevicesService } from '@app/unraid-api/graph/resolvers/info/devices/devices.service.js';
|
||||
import { DisplayService } from '@app/unraid-api/graph/resolvers/info/display/display.service.js';
|
||||
import { InfoResolver } from '@app/unraid-api/graph/resolvers/info/info.resolver.js';
|
||||
import { MemoryService } from '@app/unraid-api/graph/resolvers/info/memory/memory.service.js';
|
||||
import { OsService } from '@app/unraid-api/graph/resolvers/info/os/os.service.js';
|
||||
import { VersionsService } from '@app/unraid-api/graph/resolvers/info/versions/versions.service.js';
|
||||
import { SubscriptionHelperService } from '@app/unraid-api/graph/services/subscription-helper.service.js';
|
||||
import { SubscriptionTrackerService } from '@app/unraid-api/graph/services/subscription-tracker.service.js';
|
||||
|
||||
describe('InfoResolver Integration Tests', () => {
|
||||
let infoResolver: InfoResolver;
|
||||
let devicesResolver: DevicesResolver;
|
||||
let module: TestingModule;
|
||||
|
||||
beforeEach(async () => {
|
||||
module = await Test.createTestingModule({
|
||||
providers: [
|
||||
InfoResolver,
|
||||
DevicesResolver,
|
||||
CpuService,
|
||||
MemoryService,
|
||||
DevicesService,
|
||||
OsService,
|
||||
VersionsService,
|
||||
DisplayService,
|
||||
{
|
||||
provide: SubscriptionTrackerService,
|
||||
useValue: {
|
||||
trackActiveSubscriptions: vi.fn(),
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: SubscriptionHelperService,
|
||||
useValue: {},
|
||||
},
|
||||
{
|
||||
provide: ConfigService,
|
||||
useValue: {
|
||||
get: (key: string) => {
|
||||
if (key === 'store.emhttp.var.version') {
|
||||
return '6.12.0';
|
||||
}
|
||||
return undefined;
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: DockerService,
|
||||
useValue: {
|
||||
getContainers: async () => [],
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: CACHE_MANAGER,
|
||||
useValue: {
|
||||
get: async () => null,
|
||||
set: async () => {},
|
||||
},
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
infoResolver = module.get<InfoResolver>(InfoResolver);
|
||||
devicesResolver = module.get<DevicesResolver>(DevicesResolver);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
if (module) {
|
||||
await module.close();
|
||||
}
|
||||
});
|
||||
|
||||
describe('InfoResolver ResolveFields', () => {
|
||||
it('should return basic info object', async () => {
|
||||
const result = await infoResolver.info();
|
||||
expect(result).toEqual({
|
||||
id: 'info',
|
||||
});
|
||||
});
|
||||
|
||||
it('should return current time', async () => {
|
||||
const before = new Date();
|
||||
const result = await infoResolver.time();
|
||||
const after = new Date();
|
||||
|
||||
expect(result).toBeInstanceOf(Date);
|
||||
expect(result.getTime()).toBeGreaterThanOrEqual(before.getTime());
|
||||
expect(result.getTime()).toBeLessThanOrEqual(after.getTime());
|
||||
});
|
||||
|
||||
it('should return full cpu object from service', async () => {
|
||||
const result = await infoResolver.cpu();
|
||||
|
||||
expect(result).toHaveProperty('id', 'info/cpu');
|
||||
expect(result).toHaveProperty('manufacturer');
|
||||
expect(result).toHaveProperty('brand');
|
||||
});
|
||||
|
||||
it('should return full memory object from service', async () => {
|
||||
const result = await infoResolver.memory();
|
||||
|
||||
expect(result).toHaveProperty('id', 'info/memory');
|
||||
expect(result).toHaveProperty('layout');
|
||||
expect(result.layout).toBeInstanceOf(Array);
|
||||
});
|
||||
|
||||
it('should return minimal devices stub for sub-resolver', () => {
|
||||
const result = infoResolver.devices();
|
||||
|
||||
expect(result).toHaveProperty('id', 'info/devices');
|
||||
expect(Object.keys(result)).toEqual(['id']);
|
||||
});
|
||||
|
||||
it('should return full display object from service', async () => {
|
||||
const result = await infoResolver.display();
|
||||
|
||||
expect(result).toHaveProperty('id', 'info/display');
|
||||
expect(result).toHaveProperty('theme');
|
||||
expect(result).toHaveProperty('unit');
|
||||
});
|
||||
|
||||
it('should return baseboard data', async () => {
|
||||
const result = await infoResolver.baseboard();
|
||||
|
||||
expect(result).toHaveProperty('id', 'info/baseboard');
|
||||
expect(result).toHaveProperty('manufacturer');
|
||||
expect(result).toHaveProperty('model');
|
||||
expect(result).toHaveProperty('version');
|
||||
// These are the actual properties from systeminformation
|
||||
expect(typeof result.manufacturer).toBe('string');
|
||||
});
|
||||
|
||||
it('should return system data', async () => {
|
||||
const result = await infoResolver.system();
|
||||
|
||||
expect(result).toHaveProperty('id', 'info/system');
|
||||
expect(result).toHaveProperty('manufacturer');
|
||||
expect(result).toHaveProperty('model');
|
||||
expect(result).toHaveProperty('version');
|
||||
expect(result).toHaveProperty('serial');
|
||||
expect(result).toHaveProperty('uuid');
|
||||
// Verify types
|
||||
expect(typeof result.manufacturer).toBe('string');
|
||||
});
|
||||
|
||||
it('should return os data from service', async () => {
|
||||
const result = await infoResolver.os();
|
||||
|
||||
expect(result).toHaveProperty('id', 'info/os');
|
||||
expect(result).toHaveProperty('platform');
|
||||
expect(result).toHaveProperty('distro');
|
||||
expect(result).toHaveProperty('release');
|
||||
expect(result).toHaveProperty('kernel');
|
||||
// Verify platform is a string (could be linux, darwin, win32, etc)
|
||||
expect(typeof result.platform).toBe('string');
|
||||
});
|
||||
|
||||
it('should return versions stub for field resolvers', () => {
|
||||
const result = infoResolver.versions();
|
||||
|
||||
expect(result).toHaveProperty('id', 'info/versions');
|
||||
// Versions now returns a stub object, with actual data resolved via field resolvers
|
||||
expect(Object.keys(result)).toEqual(['id']);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Sub-Resolver Integration', () => {
|
||||
it('should resolve device fields through DevicesResolver', async () => {
|
||||
const gpu = await devicesResolver.gpu();
|
||||
const network = await devicesResolver.network();
|
||||
const pci = await devicesResolver.pci();
|
||||
const usb = await devicesResolver.usb();
|
||||
|
||||
expect(gpu).toBeInstanceOf(Array);
|
||||
expect(network).toBeInstanceOf(Array);
|
||||
expect(pci).toBeInstanceOf(Array);
|
||||
expect(usb).toBeInstanceOf(Array);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,225 +1,115 @@
|
||||
import type { TestingModule } from '@nestjs/testing';
|
||||
import { CACHE_MANAGER } from '@nestjs/cache-manager';
|
||||
import { Test } from '@nestjs/testing';
|
||||
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { DisplayService } from '@app/unraid-api/graph/resolvers/display/display.service.js';
|
||||
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
|
||||
import { CpuService } from '@app/unraid-api/graph/resolvers/info/cpu/cpu.service.js';
|
||||
import { DisplayService } from '@app/unraid-api/graph/resolvers/info/display/display.service.js';
|
||||
import { InfoResolver } from '@app/unraid-api/graph/resolvers/info/info.resolver.js';
|
||||
import { InfoService } from '@app/unraid-api/graph/resolvers/info/info.service.js';
|
||||
import { MemoryService } from '@app/unraid-api/graph/resolvers/info/memory/memory.service.js';
|
||||
import { OsService } from '@app/unraid-api/graph/resolvers/info/os/os.service.js';
|
||||
import { VersionsService } from '@app/unraid-api/graph/resolvers/info/versions/versions.service.js';
|
||||
|
||||
// Mock necessary modules
|
||||
vi.mock('fs/promises', () => ({
|
||||
readFile: vi.fn().mockResolvedValue(''),
|
||||
}));
|
||||
|
||||
vi.mock('@app/core/pubsub.js', () => ({
|
||||
pubsub: {
|
||||
publish: vi.fn().mockResolvedValue(undefined),
|
||||
},
|
||||
PUBSUB_CHANNEL: {
|
||||
INFO: 'info',
|
||||
},
|
||||
createSubscription: vi.fn().mockReturnValue('mock-subscription'),
|
||||
}));
|
||||
|
||||
vi.mock('dockerode', () => {
|
||||
return {
|
||||
default: vi.fn().mockImplementation(() => ({
|
||||
listContainers: vi.fn(),
|
||||
listNetworks: vi.fn(),
|
||||
})),
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock('@app/store/index.js', () => ({
|
||||
getters: {
|
||||
paths: () => ({
|
||||
'docker-autostart': '/path/to/docker-autostart',
|
||||
}),
|
||||
},
|
||||
vi.mock('@app/core/utils/misc/get-machine-id.js', () => ({
|
||||
getMachineId: vi.fn().mockResolvedValue('test-machine-id-123'),
|
||||
}));
|
||||
|
||||
vi.mock('systeminformation', () => ({
|
||||
baseboard: vi.fn().mockResolvedValue({
|
||||
manufacturer: 'ASUS',
|
||||
model: 'PRIME X570-P',
|
||||
version: 'Rev X.0x',
|
||||
serial: 'ABC123',
|
||||
assetTag: 'Default string',
|
||||
model: 'ROG STRIX',
|
||||
version: '1.0',
|
||||
}),
|
||||
system: vi.fn().mockResolvedValue({
|
||||
manufacturer: 'ASUS',
|
||||
model: 'System Product Name',
|
||||
version: 'System Version',
|
||||
serial: 'System Serial Number',
|
||||
uuid: '550e8400-e29b-41d4-a716-446655440000',
|
||||
sku: 'SKU',
|
||||
model: 'System Model',
|
||||
version: '1.0',
|
||||
serial: '123456',
|
||||
uuid: 'test-uuid',
|
||||
}),
|
||||
}));
|
||||
|
||||
vi.mock('@app/core/utils/misc/get-machine-id.js', () => ({
|
||||
getMachineId: vi.fn().mockResolvedValue('test-machine-id-123'),
|
||||
}));
|
||||
|
||||
// Mock Cache Manager
|
||||
const mockCacheManager = {
|
||||
get: vi.fn(),
|
||||
set: vi.fn(),
|
||||
del: vi.fn(),
|
||||
};
|
||||
|
||||
describe('InfoResolver', () => {
|
||||
let resolver: InfoResolver;
|
||||
|
||||
// Mock data for testing
|
||||
const mockAppsData = {
|
||||
id: 'info/apps',
|
||||
installed: 5,
|
||||
started: 3,
|
||||
};
|
||||
|
||||
const mockCpuData = {
|
||||
id: 'info/cpu',
|
||||
manufacturer: 'AMD',
|
||||
brand: 'AMD Ryzen 9 5900X',
|
||||
vendor: 'AMD',
|
||||
family: '19',
|
||||
model: '33',
|
||||
stepping: 0,
|
||||
revision: '',
|
||||
voltage: '1.4V',
|
||||
speed: 3.7,
|
||||
speedmin: 2.2,
|
||||
speedmax: 4.8,
|
||||
threads: 24,
|
||||
cores: 12,
|
||||
processors: 1,
|
||||
socket: 'AM4',
|
||||
cache: { l1d: 32768, l1i: 32768, l2: 524288, l3: 33554432 },
|
||||
flags: ['fpu', 'vme', 'de', 'pse'],
|
||||
};
|
||||
|
||||
const mockDevicesData = {
|
||||
id: 'info/devices',
|
||||
gpu: [],
|
||||
pci: [],
|
||||
usb: [],
|
||||
};
|
||||
|
||||
const mockDisplayData = {
|
||||
id: 'display',
|
||||
case: {
|
||||
url: '',
|
||||
icon: 'default',
|
||||
error: '',
|
||||
base64: '',
|
||||
},
|
||||
theme: 'black',
|
||||
unit: 'C',
|
||||
scale: true,
|
||||
tabs: false,
|
||||
resize: true,
|
||||
wwn: false,
|
||||
total: true,
|
||||
usage: false,
|
||||
text: true,
|
||||
warning: 40,
|
||||
critical: 50,
|
||||
hot: 60,
|
||||
max: 80,
|
||||
locale: 'en_US',
|
||||
};
|
||||
|
||||
const mockMemoryData = {
|
||||
id: 'info/memory',
|
||||
max: 68719476736,
|
||||
total: 67108864000,
|
||||
free: 33554432000,
|
||||
used: 33554432000,
|
||||
active: 16777216000,
|
||||
available: 50331648000,
|
||||
buffcache: 8388608000,
|
||||
swaptotal: 4294967296,
|
||||
swapused: 0,
|
||||
swapfree: 4294967296,
|
||||
layout: [],
|
||||
};
|
||||
|
||||
const mockOsData = {
|
||||
id: 'info/os',
|
||||
platform: 'linux',
|
||||
distro: 'Unraid',
|
||||
release: '6.12.0',
|
||||
codename: '',
|
||||
kernel: '6.1.0-unraid',
|
||||
arch: 'x64',
|
||||
hostname: 'Tower',
|
||||
codepage: 'UTF-8',
|
||||
logofile: 'unraid',
|
||||
serial: '',
|
||||
build: '',
|
||||
uptime: '2024-01-01T00:00:00.000Z',
|
||||
};
|
||||
|
||||
const mockVersionsData = {
|
||||
id: 'info/versions',
|
||||
unraid: '6.12.0',
|
||||
kernel: '6.1.0',
|
||||
node: '20.10.0',
|
||||
npm: '10.2.3',
|
||||
docker: '24.0.7',
|
||||
};
|
||||
|
||||
// Mock InfoService
|
||||
const mockInfoService = {
|
||||
generateApps: vi.fn().mockResolvedValue(mockAppsData),
|
||||
generateCpu: vi.fn().mockResolvedValue(mockCpuData),
|
||||
generateDevices: vi.fn().mockResolvedValue(mockDevicesData),
|
||||
generateMemory: vi.fn().mockResolvedValue(mockMemoryData),
|
||||
generateOs: vi.fn().mockResolvedValue(mockOsData),
|
||||
generateVersions: vi.fn().mockResolvedValue(mockVersionsData),
|
||||
};
|
||||
|
||||
// Mock DisplayService
|
||||
const mockDisplayService = {
|
||||
generateDisplay: vi.fn().mockResolvedValue(mockDisplayData),
|
||||
};
|
||||
let cpuService: CpuService;
|
||||
let memoryService: MemoryService;
|
||||
let module: TestingModule;
|
||||
|
||||
beforeEach(async () => {
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
module = await Test.createTestingModule({
|
||||
providers: [
|
||||
InfoResolver,
|
||||
{
|
||||
provide: InfoService,
|
||||
useValue: mockInfoService,
|
||||
provide: CpuService,
|
||||
useValue: {
|
||||
generateCpu: vi.fn().mockResolvedValue({
|
||||
id: 'info/cpu',
|
||||
manufacturer: 'Intel',
|
||||
brand: 'Core i7',
|
||||
cores: 8,
|
||||
threads: 16,
|
||||
}),
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: MemoryService,
|
||||
useValue: {
|
||||
generateMemory: vi.fn().mockResolvedValue({
|
||||
id: 'info/memory',
|
||||
layout: [
|
||||
{
|
||||
id: 'mem-1',
|
||||
size: 8589934592,
|
||||
bank: 'BANK 0',
|
||||
type: 'DDR4',
|
||||
},
|
||||
],
|
||||
}),
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: DisplayService,
|
||||
useValue: mockDisplayService,
|
||||
useValue: {
|
||||
generateDisplay: vi.fn().mockResolvedValue({
|
||||
id: 'info/display',
|
||||
theme: 'dark',
|
||||
unit: 'metric',
|
||||
scale: true,
|
||||
}),
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: DockerService,
|
||||
useValue: {},
|
||||
provide: OsService,
|
||||
useValue: {
|
||||
generateOs: vi.fn().mockResolvedValue({
|
||||
id: 'info/os',
|
||||
platform: 'linux',
|
||||
distro: 'Unraid',
|
||||
release: '6.12.0',
|
||||
}),
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: CACHE_MANAGER,
|
||||
useValue: mockCacheManager,
|
||||
provide: VersionsService,
|
||||
useValue: {
|
||||
generateVersions: vi.fn().mockResolvedValue({
|
||||
id: 'info/versions',
|
||||
unraid: '6.12.0',
|
||||
}),
|
||||
},
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
resolver = module.get<InfoResolver>(InfoResolver);
|
||||
|
||||
// Reset mocks before each test
|
||||
vi.clearAllMocks();
|
||||
cpuService = module.get<CpuService>(CpuService);
|
||||
memoryService = module.get<MemoryService>(MemoryService);
|
||||
});
|
||||
|
||||
describe('info', () => {
|
||||
it('should return basic info object', async () => {
|
||||
const result = await resolver.info();
|
||||
|
||||
expect(result).toEqual({
|
||||
id: 'info',
|
||||
});
|
||||
@@ -228,155 +118,129 @@ describe('InfoResolver', () => {
|
||||
|
||||
describe('time', () => {
|
||||
it('should return current date', async () => {
|
||||
const beforeCall = new Date();
|
||||
const before = new Date();
|
||||
const result = await resolver.time();
|
||||
const afterCall = new Date();
|
||||
const after = new Date();
|
||||
|
||||
expect(result).toBeInstanceOf(Date);
|
||||
expect(result.getTime()).toBeGreaterThanOrEqual(beforeCall.getTime());
|
||||
expect(result.getTime()).toBeLessThanOrEqual(afterCall.getTime());
|
||||
});
|
||||
});
|
||||
|
||||
describe('apps', () => {
|
||||
it('should return apps info from service', async () => {
|
||||
const result = await resolver.apps();
|
||||
|
||||
expect(mockInfoService.generateApps).toHaveBeenCalledOnce();
|
||||
expect(result).toEqual(mockAppsData);
|
||||
expect(result.getTime()).toBeGreaterThanOrEqual(before.getTime());
|
||||
expect(result.getTime()).toBeLessThanOrEqual(after.getTime());
|
||||
});
|
||||
});
|
||||
|
||||
describe('baseboard', () => {
|
||||
it('should return baseboard info with id', async () => {
|
||||
it('should return baseboard data from systeminformation', async () => {
|
||||
const result = await resolver.baseboard();
|
||||
|
||||
expect(result).toEqual({
|
||||
id: 'baseboard',
|
||||
id: 'info/baseboard',
|
||||
manufacturer: 'ASUS',
|
||||
model: 'PRIME X570-P',
|
||||
version: 'Rev X.0x',
|
||||
serial: 'ABC123',
|
||||
assetTag: 'Default string',
|
||||
model: 'ROG STRIX',
|
||||
version: '1.0',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('cpu', () => {
|
||||
it('should return cpu info from service', async () => {
|
||||
it('should return full cpu data from service', async () => {
|
||||
const result = await resolver.cpu();
|
||||
|
||||
expect(mockInfoService.generateCpu).toHaveBeenCalledOnce();
|
||||
expect(result).toEqual(mockCpuData);
|
||||
expect(cpuService.generateCpu).toHaveBeenCalled();
|
||||
expect(result).toEqual({
|
||||
id: 'info/cpu',
|
||||
manufacturer: 'Intel',
|
||||
brand: 'Core i7',
|
||||
cores: 8,
|
||||
threads: 16,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('devices', () => {
|
||||
it('should return devices info from service', async () => {
|
||||
const result = await resolver.devices();
|
||||
|
||||
expect(mockInfoService.generateDevices).toHaveBeenCalledOnce();
|
||||
expect(result).toEqual(mockDevicesData);
|
||||
it('should return devices stub for sub-resolver', () => {
|
||||
const result = resolver.devices();
|
||||
expect(result).toEqual({
|
||||
id: 'info/devices',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('display', () => {
|
||||
it('should return display info from display service', async () => {
|
||||
it('should return display data from service', async () => {
|
||||
const displayService = module.get<DisplayService>(DisplayService);
|
||||
const result = await resolver.display();
|
||||
|
||||
expect(mockDisplayService.generateDisplay).toHaveBeenCalledOnce();
|
||||
expect(result).toEqual(mockDisplayData);
|
||||
expect(displayService.generateDisplay).toHaveBeenCalled();
|
||||
expect(result).toEqual({
|
||||
id: 'info/display',
|
||||
theme: 'dark',
|
||||
unit: 'metric',
|
||||
scale: true,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('machineId', () => {
|
||||
it('should return machine id', async () => {
|
||||
const result = await resolver.machineId();
|
||||
|
||||
expect(result).toBe('test-machine-id-123');
|
||||
});
|
||||
|
||||
it('should handle getMachineId errors gracefully', async () => {
|
||||
const { getMachineId } = await import('@app/core/utils/misc/get-machine-id.js');
|
||||
vi.mocked(getMachineId).mockRejectedValueOnce(new Error('Machine ID error'));
|
||||
|
||||
await expect(resolver.machineId()).rejects.toThrow('Machine ID error');
|
||||
const result = await resolver.machineId();
|
||||
expect(getMachineId).toHaveBeenCalled();
|
||||
expect(result).toBe('test-machine-id-123');
|
||||
});
|
||||
});
|
||||
|
||||
describe('memory', () => {
|
||||
it('should return memory info from service', async () => {
|
||||
it('should return full memory data from service', async () => {
|
||||
const result = await resolver.memory();
|
||||
|
||||
expect(mockInfoService.generateMemory).toHaveBeenCalledOnce();
|
||||
expect(result).toEqual(mockMemoryData);
|
||||
expect(memoryService.generateMemory).toHaveBeenCalled();
|
||||
expect(result).toEqual({
|
||||
id: 'info/memory',
|
||||
layout: [
|
||||
{
|
||||
id: 'mem-1',
|
||||
size: 8589934592,
|
||||
bank: 'BANK 0',
|
||||
type: 'DDR4',
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('os', () => {
|
||||
it('should return os info from service', async () => {
|
||||
it('should return os data from service', async () => {
|
||||
const osService = module.get<OsService>(OsService);
|
||||
const result = await resolver.os();
|
||||
|
||||
expect(mockInfoService.generateOs).toHaveBeenCalledOnce();
|
||||
expect(result).toEqual(mockOsData);
|
||||
expect(osService.generateOs).toHaveBeenCalled();
|
||||
expect(result).toEqual({
|
||||
id: 'info/os',
|
||||
platform: 'linux',
|
||||
distro: 'Unraid',
|
||||
release: '6.12.0',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('system', () => {
|
||||
it('should return system info with id', async () => {
|
||||
it('should return system data from systeminformation', async () => {
|
||||
const result = await resolver.system();
|
||||
|
||||
expect(result).toEqual({
|
||||
id: 'system',
|
||||
id: 'info/system',
|
||||
manufacturer: 'ASUS',
|
||||
model: 'System Product Name',
|
||||
version: 'System Version',
|
||||
serial: 'System Serial Number',
|
||||
uuid: '550e8400-e29b-41d4-a716-446655440000',
|
||||
sku: 'SKU',
|
||||
model: 'System Model',
|
||||
version: '1.0',
|
||||
serial: '123456',
|
||||
uuid: 'test-uuid',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('versions', () => {
|
||||
it('should return versions info from service', async () => {
|
||||
it('should return versions data from service', async () => {
|
||||
const versionsService = module.get<VersionsService>(VersionsService);
|
||||
const result = await resolver.versions();
|
||||
|
||||
expect(mockInfoService.generateVersions).toHaveBeenCalledOnce();
|
||||
expect(result).toEqual(mockVersionsData);
|
||||
});
|
||||
});
|
||||
|
||||
describe('infoSubscription', () => {
|
||||
it('should create and return subscription', async () => {
|
||||
const { createSubscription, PUBSUB_CHANNEL } = await import('@app/core/pubsub.js');
|
||||
|
||||
const result = await resolver.infoSubscription();
|
||||
|
||||
expect(createSubscription).toHaveBeenCalledWith(PUBSUB_CHANNEL.INFO);
|
||||
expect(result).toBe('mock-subscription');
|
||||
});
|
||||
});
|
||||
|
||||
describe('error handling', () => {
|
||||
it('should handle baseboard errors gracefully', async () => {
|
||||
const { baseboard } = await import('systeminformation');
|
||||
vi.mocked(baseboard).mockRejectedValueOnce(new Error('Baseboard error'));
|
||||
|
||||
await expect(resolver.baseboard()).rejects.toThrow('Baseboard error');
|
||||
});
|
||||
|
||||
it('should handle system errors gracefully', async () => {
|
||||
const { system } = await import('systeminformation');
|
||||
vi.mocked(system).mockRejectedValueOnce(new Error('System error'));
|
||||
|
||||
await expect(resolver.system()).rejects.toThrow('System error');
|
||||
});
|
||||
|
||||
it('should handle service errors gracefully', async () => {
|
||||
mockInfoService.generateApps.mockRejectedValueOnce(new Error('Service error'));
|
||||
|
||||
await expect(resolver.apps()).rejects.toThrow('Service error');
|
||||
expect(versionsService.generateVersions).toHaveBeenCalled();
|
||||
expect(result).toEqual({
|
||||
id: 'info/versions',
|
||||
unraid: '6.12.0',
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user