mirror of
https://github.com/unraid/api.git
synced 2026-01-02 14:40:01 -06:00
Compare commits
72 Commits
4.22.2-bui
...
v4.27.2
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
375dcd0598 | ||
|
|
64875edbba | ||
|
|
330e81a484 | ||
|
|
b8f0fdf8d2 | ||
|
|
36c104915e | ||
|
|
dc9a036c73 | ||
|
|
c71b0487ad | ||
|
|
e7340431a5 | ||
|
|
e4a9b8291b | ||
|
|
6b6b78fa2e | ||
|
|
e2fdf6cadb | ||
|
|
3d4f193fa4 | ||
|
|
b28ef1ea33 | ||
|
|
ee0f240233 | ||
|
|
3aacaa1fb5 | ||
|
|
0cd4c0ae16 | ||
|
|
66625ded6a | ||
|
|
f8a6785e9c | ||
|
|
d7aca81c60 | ||
|
|
854b403fbd | ||
|
|
c264a1843c | ||
|
|
45cda4af80 | ||
|
|
64eb9ce9b5 | ||
|
|
d56797c59f | ||
|
|
92af3b6115 | ||
|
|
35f8bc2258 | ||
|
|
c4cd0c6352 | ||
|
|
818e7ce997 | ||
|
|
7e13202aa1 | ||
|
|
d18eaf2364 | ||
|
|
42406e795d | ||
|
|
11d2de5d08 | ||
|
|
031c1ab5dc | ||
|
|
34075e44c5 | ||
|
|
ff2906e52a | ||
|
|
a0d6cc92c8 | ||
|
|
57acfaacf0 | ||
|
|
ea816c7a5c | ||
|
|
cafde72d38 | ||
|
|
2b481c397c | ||
|
|
8c4e9dd7ae | ||
|
|
f212dce88b | ||
|
|
8cd2a4c124 | ||
|
|
10f048ee1f | ||
|
|
e9e271ade5 | ||
|
|
31c41027fc | ||
|
|
fabe6a2c4b | ||
|
|
754966d5d3 | ||
|
|
ed594e9147 | ||
|
|
50d83313a1 | ||
|
|
e57ec00627 | ||
|
|
84f4a7221d | ||
|
|
d73953f8ff | ||
|
|
0d165a6087 | ||
|
|
f4f3e3c44b | ||
|
|
cd5eff11bc | ||
|
|
7bdeca8338 | ||
|
|
661865f976 | ||
|
|
b7afaf4632 | ||
|
|
b3ca40c639 | ||
|
|
378cdb7f10 | ||
|
|
d9c561bfeb | ||
|
|
9972a5f178 | ||
|
|
a44473c1d1 | ||
|
|
ed9a5c5ff9 | ||
|
|
d8b166e4b6 | ||
|
|
8b862ecef5 | ||
|
|
16913627de | ||
|
|
6b2f331941 | ||
|
|
8f02d96464 | ||
|
|
caff5a78ba | ||
|
|
810be7a679 |
@@ -241,4 +241,3 @@ const pinia = createTestingPinia({
|
||||
- Set initial state for focused testing
|
||||
- Test computed properties by accessing them directly
|
||||
- Verify state changes by updating the store
|
||||
|
||||
|
||||
201
.github/workflows/build-artifacts.yml
vendored
Normal file
201
.github/workflows/build-artifacts.yml
vendored
Normal file
@@ -0,0 +1,201 @@
|
||||
name: Build Artifacts
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ref:
|
||||
type: string
|
||||
required: false
|
||||
description: "Git ref to checkout (commit SHA, branch, or tag)"
|
||||
version_override:
|
||||
type: string
|
||||
required: false
|
||||
description: "Override version (for manual releases)"
|
||||
outputs:
|
||||
build_number:
|
||||
description: "Build number for the artifacts"
|
||||
value: ${{ jobs.build-api.outputs.build_number }}
|
||||
secrets:
|
||||
VITE_ACCOUNT:
|
||||
required: true
|
||||
VITE_CONNECT:
|
||||
required: true
|
||||
VITE_UNRAID_NET:
|
||||
required: true
|
||||
VITE_CALLBACK_KEY:
|
||||
required: true
|
||||
UNRAID_BOT_GITHUB_ADMIN_TOKEN:
|
||||
required: false
|
||||
|
||||
jobs:
|
||||
build-api:
|
||||
name: Build API
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
build_number: ${{ steps.buildnumber.outputs.build_number }}
|
||||
defaults:
|
||||
run:
|
||||
working-directory: api
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
ref: ${{ inputs.ref || github.ref }}
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential
|
||||
version: 1.0
|
||||
|
||||
- name: PNPM Install
|
||||
run: |
|
||||
cd ${{ github.workspace }}
|
||||
pnpm install --frozen-lockfile
|
||||
|
||||
- name: Get Git Short Sha and API version
|
||||
id: vars
|
||||
run: |
|
||||
GIT_SHA=$(git rev-parse --short HEAD)
|
||||
IS_TAGGED=$(git describe --tags --abbrev=0 --exact-match || echo '')
|
||||
PACKAGE_LOCK_VERSION=$(jq -r '.version' package.json)
|
||||
API_VERSION=${{ inputs.version_override && format('"{0}"', inputs.version_override) || '${PACKAGE_LOCK_VERSION}' }}
|
||||
if [ -z "${{ inputs.version_override }}" ] && [ -z "$IS_TAGGED" ]; then
|
||||
API_VERSION="${PACKAGE_LOCK_VERSION}+${GIT_SHA}"
|
||||
fi
|
||||
export API_VERSION
|
||||
echo "API_VERSION=${API_VERSION}" >> $GITHUB_ENV
|
||||
echo "PACKAGE_LOCK_VERSION=${PACKAGE_LOCK_VERSION}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Generate build number
|
||||
id: buildnumber
|
||||
uses: onyxmueller/build-tag-number@v1
|
||||
with:
|
||||
token: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN || github.token }}
|
||||
prefix: ${{ inputs.version_override || steps.vars.outputs.PACKAGE_LOCK_VERSION }}
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
pnpm run build:release
|
||||
tar -czf deploy/unraid-api.tgz -C deploy/pack/ .
|
||||
|
||||
- name: Upload tgz to Github artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: unraid-api
|
||||
path: ${{ github.workspace }}/api/deploy/unraid-api.tgz
|
||||
|
||||
build-unraid-ui-webcomponents:
|
||||
name: Build Unraid UI Library (Webcomponent Version)
|
||||
defaults:
|
||||
run:
|
||||
working-directory: unraid-ui
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
ref: ${{ inputs.ref || github.ref }}
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential
|
||||
version: 1.0
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
cd ${{ github.workspace }}
|
||||
pnpm install --frozen-lockfile --filter @unraid/ui
|
||||
|
||||
- name: Lint
|
||||
run: pnpm run lint
|
||||
|
||||
- name: Build
|
||||
run: pnpm run build:wc
|
||||
|
||||
- name: Upload Artifact to Github
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: unraid-wc-ui
|
||||
path: unraid-ui/dist-wc/
|
||||
|
||||
build-web:
|
||||
name: Build Web App
|
||||
defaults:
|
||||
run:
|
||||
working-directory: web
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
ref: ${{ inputs.ref || github.ref }}
|
||||
|
||||
- name: Create env file
|
||||
run: |
|
||||
touch .env
|
||||
echo VITE_ACCOUNT=${{ secrets.VITE_ACCOUNT }} >> .env
|
||||
echo VITE_CONNECT=${{ secrets.VITE_CONNECT }} >> .env
|
||||
echo VITE_UNRAID_NET=${{ secrets.VITE_UNRAID_NET }} >> .env
|
||||
echo VITE_CALLBACK_KEY=${{ secrets.VITE_CALLBACK_KEY }} >> .env
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: PNPM Install
|
||||
run: |
|
||||
cd ${{ github.workspace }}
|
||||
pnpm install --frozen-lockfile --filter @unraid/web --filter @unraid/ui
|
||||
|
||||
- name: Build Unraid UI
|
||||
run: |
|
||||
cd ${{ github.workspace }}/unraid-ui
|
||||
pnpm run build
|
||||
|
||||
- name: Lint files
|
||||
run: pnpm run lint
|
||||
|
||||
- name: Type Check
|
||||
run: pnpm run type-check
|
||||
|
||||
- name: Build
|
||||
run: pnpm run build
|
||||
|
||||
- name: Upload build to Github artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: unraid-wc-rich
|
||||
path: web/dist
|
||||
|
||||
12
.github/workflows/build-plugin.yml
vendored
12
.github/workflows/build-plugin.yml
vendored
@@ -27,6 +27,15 @@ on:
|
||||
type: string
|
||||
required: true
|
||||
description: "Build number for the plugin builds"
|
||||
ref:
|
||||
type: string
|
||||
required: false
|
||||
description: "Git ref (commit SHA, branch, or tag) to checkout"
|
||||
TRIGGER_PRODUCTION_RELEASE:
|
||||
type: boolean
|
||||
required: false
|
||||
default: false
|
||||
description: "Whether to automatically trigger the release-production workflow (default: false)"
|
||||
secrets:
|
||||
CF_ACCESS_KEY_ID:
|
||||
required: true
|
||||
@@ -49,6 +58,7 @@ jobs:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
ref: ${{ inputs.ref }}
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
@@ -136,7 +146,7 @@ jobs:
|
||||
done
|
||||
|
||||
- name: Workflow Dispatch and wait
|
||||
if: inputs.RELEASE_CREATED == 'true'
|
||||
if: inputs.RELEASE_CREATED == 'true' && inputs.TRIGGER_PRODUCTION_RELEASE == true
|
||||
uses: the-actions-org/workflow-dispatch@v4.0.0
|
||||
with:
|
||||
workflow: release-production.yml
|
||||
|
||||
103
.github/workflows/claude-code-review.yml
vendored
103
.github/workflows/claude-code-review.yml
vendored
@@ -1,103 +0,0 @@
|
||||
name: Claude Code Review
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize]
|
||||
# Skip reviews for non-code changes
|
||||
paths-ignore:
|
||||
- "**/*.md"
|
||||
- "**/package-lock.json"
|
||||
- "**/pnpm-lock.yaml"
|
||||
- "**/.gitignore"
|
||||
- "**/LICENSE"
|
||||
- "**/*.config.js"
|
||||
- "**/*.config.ts"
|
||||
- "**/tsconfig.json"
|
||||
- "**/.github/workflows/*.yml"
|
||||
- "**/docs/**"
|
||||
|
||||
jobs:
|
||||
claude-review:
|
||||
# Skip review for bot PRs and WIP/skip-review PRs
|
||||
# Only run if changes are significant (>10 lines)
|
||||
if: |
|
||||
(github.event.pull_request.additions > 10 || github.event.pull_request.deletions > 10) &&
|
||||
!contains(github.event.pull_request.title, '[skip-review]') &&
|
||||
!contains(github.event.pull_request.title, '[WIP]') &&
|
||||
!endsWith(github.event.pull_request.user.login, '[bot]') &&
|
||||
github.event.pull_request.user.login != 'dependabot' &&
|
||||
github.event.pull_request.user.login != 'renovate'
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: read
|
||||
issues: read
|
||||
id-token: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Run Claude Code Review
|
||||
id: claude-review
|
||||
uses: anthropics/claude-code-action@beta
|
||||
with:
|
||||
claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
|
||||
|
||||
# Optional: Specify model (defaults to Claude Sonnet 4, uncomment for Claude Opus 4)
|
||||
# model: "claude-opus-4-20250514"
|
||||
|
||||
# Direct prompt for automated review (no @claude mention needed)
|
||||
direct_prompt: |
|
||||
IMPORTANT: Review ONLY the DIFF/CHANGESET - the actual lines that were added or modified in this PR.
|
||||
DO NOT review the entire file context, only analyze the specific changes being made.
|
||||
|
||||
Look for HIGH-PRIORITY issues in the CHANGED LINES ONLY:
|
||||
|
||||
1. CRITICAL BUGS: Logic errors, null pointer issues, infinite loops, race conditions
|
||||
2. SECURITY: SQL injection, XSS, authentication bypass, exposed secrets, unsafe operations
|
||||
3. BREAKING CHANGES: API contract violations, removed exports, changed function signatures
|
||||
4. DATA LOSS RISKS: Destructive operations without safeguards, missing data validation
|
||||
|
||||
DO NOT comment on:
|
||||
- Code that wasn't changed in this PR
|
||||
- Style, formatting, or documentation
|
||||
- Test coverage (unless tests are broken by the changes)
|
||||
- Minor optimizations or best practices
|
||||
- Existing code issues that weren't introduced by this PR
|
||||
|
||||
If you find no critical issues in the DIFF, respond with: "✅ No critical issues found in changes"
|
||||
|
||||
Keep response under 10 lines. Reference specific line numbers from the diff when reporting issues.
|
||||
|
||||
# Optional: Use sticky comments to make Claude reuse the same comment on subsequent pushes to the same PR
|
||||
use_sticky_comment: true
|
||||
|
||||
# Context-aware review based on PR characteristics
|
||||
# Uncomment to enable different review strategies based on context
|
||||
# direct_prompt: |
|
||||
# ${{
|
||||
# (github.event.pull_request.additions > 500) &&
|
||||
# 'Large PR detected. Focus only on architectural issues and breaking changes. Skip minor issues.' ||
|
||||
# contains(github.event.pull_request.title, 'fix') &&
|
||||
# 'Bug fix PR: Verify the fix addresses the root cause and check for regression risks.' ||
|
||||
# contains(github.event.pull_request.title, 'deps') &&
|
||||
# 'Dependency update: Check for breaking changes and security advisories only.' ||
|
||||
# contains(github.event.pull_request.title, 'refactor') &&
|
||||
# 'Refactor PR: Verify no behavior changes and check for performance regressions.' ||
|
||||
# contains(github.event.pull_request.title, 'feat') &&
|
||||
# 'New feature: Check for security issues, edge cases, and integration problems only.' ||
|
||||
# 'Standard review: Check for critical bugs, security issues, and breaking changes only.'
|
||||
# }}
|
||||
|
||||
# Optional: Add specific tools for running tests or linting
|
||||
# allowed_tools: "Bash(npm run test),Bash(npm run lint),Bash(npm run typecheck)"
|
||||
|
||||
# Optional: Skip review for certain conditions
|
||||
# if: |
|
||||
# !contains(github.event.pull_request.title, '[skip-review]') &&
|
||||
# !contains(github.event.pull_request.title, '[WIP]')
|
||||
|
||||
64
.github/workflows/claude.yml
vendored
64
.github/workflows/claude.yml
vendored
@@ -1,64 +0,0 @@
|
||||
name: Claude Code
|
||||
|
||||
on:
|
||||
issue_comment:
|
||||
types: [created]
|
||||
pull_request_review_comment:
|
||||
types: [created]
|
||||
issues:
|
||||
types: [opened, assigned]
|
||||
pull_request_review:
|
||||
types: [submitted]
|
||||
|
||||
jobs:
|
||||
claude:
|
||||
if: |
|
||||
(github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude')) ||
|
||||
(github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude')) ||
|
||||
(github.event_name == 'pull_request_review' && contains(github.event.review.body, '@claude')) ||
|
||||
(github.event_name == 'issues' && (contains(github.event.issue.body, '@claude') || contains(github.event.issue.title, '@claude')))
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: read
|
||||
issues: read
|
||||
id-token: write
|
||||
actions: read # Required for Claude to read CI results on PRs
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Run Claude Code
|
||||
id: claude
|
||||
uses: anthropics/claude-code-action@beta
|
||||
with:
|
||||
claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
|
||||
|
||||
# This is an optional setting that allows Claude to read CI results on PRs
|
||||
additional_permissions: |
|
||||
actions: read
|
||||
|
||||
# Optional: Specify model (defaults to Claude Sonnet 4, uncomment for Claude Opus 4)
|
||||
# model: "claude-opus-4-20250514"
|
||||
|
||||
# Optional: Customize the trigger phrase (default: @claude)
|
||||
# trigger_phrase: "/claude"
|
||||
|
||||
# Optional: Trigger when specific user is assigned to an issue
|
||||
# assignee_trigger: "claude-bot"
|
||||
|
||||
# Optional: Allow Claude to run specific commands
|
||||
# allowed_tools: "Bash(npm install),Bash(npm run build),Bash(npm run test:*),Bash(npm run lint:*)"
|
||||
|
||||
# Optional: Add custom instructions for Claude to customize its behavior for your project
|
||||
# custom_instructions: |
|
||||
# Follow our coding standards
|
||||
# Ensure all new code has tests
|
||||
# Use TypeScript for new files
|
||||
|
||||
# Optional: Custom environment variables for Claude
|
||||
# claude_env: |
|
||||
# NODE_ENV: test
|
||||
|
||||
82
.github/workflows/create-docusaurus-pr.yml
vendored
82
.github/workflows/create-docusaurus-pr.yml
vendored
@@ -1,82 +0,0 @@
|
||||
name: Update API Documentation
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- 'api/docs/**'
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
# Add permissions for GITHUB_TOKEN
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
jobs:
|
||||
create-docs-pr:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout source repository
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
path: source-repo
|
||||
|
||||
- name: Checkout docs repository
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
repository: unraid/docs
|
||||
path: docs-repo
|
||||
token: ${{ secrets.DOCS_PAT_UNRAID_BOT }}
|
||||
|
||||
- name: Copy and process docs
|
||||
run: |
|
||||
if [ ! -d "source-repo/api/docs" ]; then
|
||||
echo "Source directory does not exist!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Remove old API docs but preserve other folders
|
||||
rm -rf docs-repo/docs/API/
|
||||
mkdir -p docs-repo/docs/API
|
||||
|
||||
# Copy all markdown files and maintain directory structure
|
||||
cp -r source-repo/api/docs/public/. docs-repo/docs/API/
|
||||
|
||||
# Copy images to Docusaurus static directory
|
||||
mkdir -p docs-repo/static/img/api
|
||||
|
||||
# Copy images from public/images if they exist
|
||||
if [ -d "source-repo/api/docs/public/images" ]; then
|
||||
cp -r source-repo/api/docs/public/images/. docs-repo/static/img/api/
|
||||
fi
|
||||
|
||||
# Also copy any images from the parent docs/images directory
|
||||
if [ -d "source-repo/api/docs/images" ]; then
|
||||
cp -r source-repo/api/docs/images/. docs-repo/static/img/api/
|
||||
fi
|
||||
|
||||
# Update image paths in markdown files
|
||||
# Replace relative image paths with absolute paths pointing to /img/api/
|
||||
find docs-repo/docs/API -name "*.md" -type f -exec sed -i 's|!\[\([^]]*\)\](\./images/\([^)]*\))||g' {} \;
|
||||
find docs-repo/docs/API -name "*.md" -type f -exec sed -i 's|!\[\([^]]*\)\](images/\([^)]*\))||g' {} \;
|
||||
find docs-repo/docs/API -name "*.md" -type f -exec sed -i 's|!\[\([^]]*\)\](../images/\([^)]*\))||g' {} \;
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.DOCS_PAT_UNRAID_BOT }}
|
||||
path: docs-repo
|
||||
commit-message: 'docs: update API documentation'
|
||||
title: 'Update API Documentation'
|
||||
body: |
|
||||
This PR updates the API documentation based on changes from the main repository.
|
||||
|
||||
Changes were automatically generated from api/docs/* directory.
|
||||
|
||||
@coderabbitai ignore
|
||||
reviewers: ljm42, elibosley
|
||||
branch: update-api-docs
|
||||
base: main
|
||||
delete-branch: true
|
||||
210
.github/workflows/generate-release-notes.yml
vendored
Normal file
210
.github/workflows/generate-release-notes.yml
vendored
Normal file
@@ -0,0 +1,210 @@
|
||||
name: Generate Release Notes
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
version:
|
||||
description: 'Version number (e.g., 4.25.3)'
|
||||
required: true
|
||||
type: string
|
||||
target_commitish:
|
||||
description: 'Commit SHA or branch (leave empty for current HEAD)'
|
||||
required: false
|
||||
type: string
|
||||
release_notes:
|
||||
description: 'Custom release notes (leave empty to auto-generate)'
|
||||
required: false
|
||||
type: string
|
||||
outputs:
|
||||
release_notes:
|
||||
description: 'Generated or provided release notes'
|
||||
value: ${{ jobs.generate.outputs.release_notes }}
|
||||
secrets:
|
||||
UNRAID_BOT_GITHUB_ADMIN_TOKEN:
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
generate:
|
||||
name: Generate Release Notes
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
release_notes: ${{ steps.generate_notes.outputs.release_notes }}
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
ref: ${{ inputs.target_commitish || github.ref }}
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN }}
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
- name: Generate Release Notes
|
||||
id: generate_notes
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
TAG_NAME="v${{ inputs.version }}"
|
||||
VERSION="${{ inputs.version }}"
|
||||
|
||||
if [ -n "${{ inputs.release_notes }}" ]; then
|
||||
NOTES="${{ inputs.release_notes }}"
|
||||
else
|
||||
CHANGELOG_PATH="api/CHANGELOG.md"
|
||||
|
||||
if [ -f "$CHANGELOG_PATH" ]; then
|
||||
echo "Extracting release notes from CHANGELOG.md for version ${VERSION}"
|
||||
|
||||
NOTES=$(awk -v ver="$VERSION" '
|
||||
BEGIN {
|
||||
found=0; capture=0; output="";
|
||||
gsub(/\./, "\\.", ver);
|
||||
}
|
||||
/^## \[/ {
|
||||
if (capture) exit;
|
||||
if ($0 ~ "\\[" ver "\\]") {
|
||||
found=1;
|
||||
capture=1;
|
||||
}
|
||||
}
|
||||
capture {
|
||||
if (output != "") output = output "\n";
|
||||
output = output $0;
|
||||
}
|
||||
END {
|
||||
if (found) print output;
|
||||
else exit 1;
|
||||
}
|
||||
' "$CHANGELOG_PATH") || EXTRACTION_STATUS=$?
|
||||
|
||||
if [ ${EXTRACTION_STATUS:-0} -eq 0 ] && [ -n "$NOTES" ]; then
|
||||
echo "✓ Found release notes in CHANGELOG.md"
|
||||
else
|
||||
echo "⚠ Version ${VERSION} not found in CHANGELOG.md, generating with conventional-changelog"
|
||||
|
||||
PREV_TAG=$(git describe --tags --abbrev=0 HEAD^ 2>/dev/null || echo "")
|
||||
CHANGELOG_GENERATED=false
|
||||
|
||||
if [ -n "$PREV_TAG" ]; then
|
||||
echo "Generating changelog from ${PREV_TAG}..HEAD using conventional-changelog"
|
||||
|
||||
npm install -g conventional-changelog-cli
|
||||
|
||||
TEMP_NOTES=$(mktemp)
|
||||
conventional-changelog -p conventionalcommits \
|
||||
--release-count 1 \
|
||||
--output-unreleased \
|
||||
> "$TEMP_NOTES" 2>/dev/null || true
|
||||
|
||||
if [ -s "$TEMP_NOTES" ]; then
|
||||
NOTES=$(cat "$TEMP_NOTES")
|
||||
|
||||
if [ -n "$NOTES" ]; then
|
||||
echo "✓ Generated changelog with conventional-changelog"
|
||||
CHANGELOG_GENERATED=true
|
||||
|
||||
TEMP_CHANGELOG=$(mktemp)
|
||||
{
|
||||
if [ -f "$CHANGELOG_PATH" ]; then
|
||||
head -n 1 "$CHANGELOG_PATH"
|
||||
echo ""
|
||||
echo "$NOTES"
|
||||
echo ""
|
||||
tail -n +2 "$CHANGELOG_PATH"
|
||||
else
|
||||
echo "# Changelog"
|
||||
echo ""
|
||||
echo "$NOTES"
|
||||
fi
|
||||
} > "$TEMP_CHANGELOG"
|
||||
|
||||
mv "$TEMP_CHANGELOG" "$CHANGELOG_PATH"
|
||||
echo "✓ Updated CHANGELOG.md with generated notes"
|
||||
else
|
||||
echo "⚠ conventional-changelog produced empty output, using GitHub auto-generation"
|
||||
NOTES=$(gh api repos/${{ github.repository }}/releases/generate-notes \
|
||||
-f tag_name="${TAG_NAME}" \
|
||||
-f target_commitish="${{ inputs.target_commitish || github.sha }}" \
|
||||
-f previous_tag_name="${PREV_TAG}" \
|
||||
--jq '.body')
|
||||
fi
|
||||
else
|
||||
echo "⚠ conventional-changelog failed, using GitHub auto-generation"
|
||||
NOTES=$(gh api repos/${{ github.repository }}/releases/generate-notes \
|
||||
-f tag_name="${TAG_NAME}" \
|
||||
-f target_commitish="${{ inputs.target_commitish || github.sha }}" \
|
||||
-f previous_tag_name="${PREV_TAG}" \
|
||||
--jq '.body')
|
||||
fi
|
||||
|
||||
rm -f "$TEMP_NOTES"
|
||||
else
|
||||
echo "⚠ No previous tag found, using GitHub auto-generation"
|
||||
NOTES=$(gh api repos/${{ github.repository }}/releases/generate-notes \
|
||||
-f tag_name="${TAG_NAME}" \
|
||||
-f target_commitish="${{ inputs.target_commitish || github.sha }}" \
|
||||
--jq '.body' || echo "Release ${VERSION}")
|
||||
fi
|
||||
|
||||
if [ "$CHANGELOG_GENERATED" = true ]; then
|
||||
BRANCH_OR_SHA="${{ inputs.target_commitish || github.ref }}"
|
||||
|
||||
if git show-ref --verify --quiet "refs/heads/${BRANCH_OR_SHA}"; then
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "CHANGELOG GENERATED AND COMMITTED"
|
||||
echo "=========================================="
|
||||
echo ""
|
||||
|
||||
git config user.name "github-actions[bot]"
|
||||
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||
|
||||
BEFORE_SHA=$(git rev-parse HEAD)
|
||||
|
||||
git add "$CHANGELOG_PATH"
|
||||
git commit -m "chore: add changelog for version ${VERSION}"
|
||||
git push origin "HEAD:${BRANCH_OR_SHA}"
|
||||
|
||||
AFTER_SHA=$(git rev-parse HEAD)
|
||||
|
||||
echo "✓ Changelog committed and pushed successfully"
|
||||
echo ""
|
||||
echo "Previous SHA: ${BEFORE_SHA}"
|
||||
echo "New SHA: ${AFTER_SHA}"
|
||||
echo ""
|
||||
echo "⚠️ CRITICAL: A new commit was created, but github.sha is immutable."
|
||||
echo "⚠️ github.sha = ${BEFORE_SHA} (original workflow trigger)"
|
||||
echo "⚠️ The release tag must point to ${AFTER_SHA} (with changelog)"
|
||||
echo ""
|
||||
echo "Re-run this workflow to create the release with the correct commit."
|
||||
echo ""
|
||||
exit 1
|
||||
else
|
||||
echo "⚠ Target is a commit SHA, not a branch. Cannot push changelog updates."
|
||||
echo "Changelog was generated but not committed."
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
else
|
||||
echo "⚠ CHANGELOG.md not found, using GitHub auto-generation"
|
||||
PREV_TAG=$(git describe --tags --abbrev=0 HEAD^ 2>/dev/null || echo "")
|
||||
|
||||
if [ -n "$PREV_TAG" ]; then
|
||||
NOTES=$(gh api repos/${{ github.repository }}/releases/generate-notes \
|
||||
-f tag_name="${TAG_NAME}" \
|
||||
-f target_commitish="${{ inputs.target_commitish || github.sha }}" \
|
||||
-f previous_tag_name="${PREV_TAG}" \
|
||||
--jq '.body')
|
||||
else
|
||||
NOTES="Release ${VERSION}"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "release_notes<<EOF" >> $GITHUB_OUTPUT
|
||||
echo "$NOTES" >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
|
||||
197
.github/workflows/main.yml
vendored
197
.github/workflows/main.yml
vendored
@@ -154,173 +154,15 @@ jobs:
|
||||
files: ./coverage/coverage-final.json,../web/coverage/coverage-final.json,../unraid-ui/coverage/coverage-final.json,../packages/unraid-api-plugin-connect/coverage/coverage-final.json,../packages/unraid-shared/coverage/coverage-final.json
|
||||
fail_ci_if_error: false
|
||||
|
||||
build-api:
|
||||
name: Build API
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
build_number: ${{ steps.buildnumber.outputs.build_number }}
|
||||
defaults:
|
||||
run:
|
||||
working-directory: api
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential
|
||||
version: 1.0
|
||||
|
||||
- name: PNPM Install
|
||||
run: |
|
||||
cd ${{ github.workspace }}
|
||||
pnpm install --frozen-lockfile
|
||||
|
||||
- name: Build
|
||||
run: pnpm run build
|
||||
|
||||
- name: Get Git Short Sha and API version
|
||||
id: vars
|
||||
run: |
|
||||
GIT_SHA=$(git rev-parse --short HEAD)
|
||||
IS_TAGGED=$(git describe --tags --abbrev=0 --exact-match || echo '')
|
||||
PACKAGE_LOCK_VERSION=$(jq -r '.version' package.json)
|
||||
API_VERSION=$([[ -n "$IS_TAGGED" ]] && echo "$PACKAGE_LOCK_VERSION" || echo "${PACKAGE_LOCK_VERSION}+${GIT_SHA}")
|
||||
export API_VERSION
|
||||
echo "API_VERSION=${API_VERSION}" >> $GITHUB_ENV
|
||||
echo "PACKAGE_LOCK_VERSION=${PACKAGE_LOCK_VERSION}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Generate build number
|
||||
id: buildnumber
|
||||
uses: onyxmueller/build-tag-number@v1
|
||||
with:
|
||||
token: ${{secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN}}
|
||||
prefix: ${{steps.vars.outputs.PACKAGE_LOCK_VERSION}}
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
pnpm run build:release
|
||||
tar -czf deploy/unraid-api.tgz -C deploy/pack/ .
|
||||
|
||||
- name: Upload tgz to Github artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: unraid-api
|
||||
path: ${{ github.workspace }}/api/deploy/unraid-api.tgz
|
||||
|
||||
build-unraid-ui-webcomponents:
|
||||
name: Build Unraid UI Library (Webcomponent Version)
|
||||
defaults:
|
||||
run:
|
||||
working-directory: unraid-ui
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential
|
||||
version: 1.0
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
cd ${{ github.workspace }}
|
||||
pnpm install --frozen-lockfile --filter @unraid/ui
|
||||
|
||||
- name: Lint
|
||||
run: pnpm run lint
|
||||
|
||||
- name: Build
|
||||
run: pnpm run build:wc
|
||||
|
||||
- name: Upload Artifact to Github
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: unraid-wc-ui
|
||||
path: unraid-ui/dist-wc/
|
||||
|
||||
build-web:
|
||||
# needs: [build-unraid-ui]
|
||||
name: Build Web App
|
||||
defaults:
|
||||
run:
|
||||
working-directory: web
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Create env file
|
||||
run: |
|
||||
touch .env
|
||||
echo VITE_ACCOUNT=${{ secrets.VITE_ACCOUNT }} >> .env
|
||||
echo VITE_CONNECT=${{ secrets.VITE_CONNECT }} >> .env
|
||||
echo VITE_UNRAID_NET=${{ secrets.VITE_UNRAID_NET }} >> .env
|
||||
echo VITE_CALLBACK_KEY=${{ secrets.VITE_CALLBACK_KEY }} >> .env
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: PNPM Install
|
||||
run: |
|
||||
cd ${{ github.workspace }}
|
||||
pnpm install --frozen-lockfile --filter @unraid/web --filter @unraid/ui
|
||||
|
||||
- name: Build Unraid UI
|
||||
run: |
|
||||
cd ${{ github.workspace }}/unraid-ui
|
||||
pnpm run build
|
||||
|
||||
- name: Lint files
|
||||
run: pnpm run lint
|
||||
|
||||
- name: Type Check
|
||||
run: pnpm run type-check
|
||||
|
||||
- name: Test
|
||||
run: pnpm run test:ci
|
||||
|
||||
- name: Build
|
||||
run: pnpm run build
|
||||
|
||||
- name: Upload build to Github artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: unraid-wc-rich
|
||||
path: web/dist
|
||||
build-artifacts:
|
||||
name: Build All Artifacts
|
||||
uses: ./.github/workflows/build-artifacts.yml
|
||||
secrets:
|
||||
VITE_ACCOUNT: ${{ secrets.VITE_ACCOUNT }}
|
||||
VITE_CONNECT: ${{ secrets.VITE_CONNECT }}
|
||||
VITE_UNRAID_NET: ${{ secrets.VITE_UNRAID_NET }}
|
||||
VITE_CALLBACK_KEY: ${{ secrets.VITE_CALLBACK_KEY }}
|
||||
UNRAID_BOT_GITHUB_ADMIN_TOKEN: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN }}
|
||||
|
||||
release-please:
|
||||
name: Release Please
|
||||
@@ -329,15 +171,15 @@ jobs:
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
||||
needs:
|
||||
- test-api
|
||||
- build-api
|
||||
- build-web
|
||||
- build-unraid-ui-webcomponents
|
||||
- build-artifacts
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- id: release
|
||||
uses: googleapis/release-please-action@v4
|
||||
@@ -348,17 +190,15 @@ jobs:
|
||||
build-plugin-staging-pr:
|
||||
name: Build and Deploy Plugin
|
||||
needs:
|
||||
- build-api
|
||||
- build-web
|
||||
- build-unraid-ui-webcomponents
|
||||
- build-artifacts
|
||||
- test-api
|
||||
uses: ./.github/workflows/build-plugin.yml
|
||||
with:
|
||||
RELEASE_CREATED: false
|
||||
RELEASE_CREATED: 'false'
|
||||
TAG: ${{ github.event.pull_request.number && format('PR{0}', github.event.pull_request.number) || '' }}
|
||||
BUCKET_PATH: ${{ github.event.pull_request.number && format('unraid-api/tag/PR{0}', github.event.pull_request.number) || 'unraid-api' }}
|
||||
BASE_URL: "https://preview.dl.unraid.net/unraid-api"
|
||||
BUILD_NUMBER: ${{ needs.build-api.outputs.build_number }}
|
||||
BUILD_NUMBER: ${{ needs.build-artifacts.outputs.build_number }}
|
||||
secrets:
|
||||
CF_ACCESS_KEY_ID: ${{ secrets.CF_ACCESS_KEY_ID }}
|
||||
CF_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
@@ -370,15 +210,16 @@ jobs:
|
||||
name: Build and Deploy Production Plugin
|
||||
needs:
|
||||
- release-please
|
||||
- build-api
|
||||
- build-artifacts
|
||||
uses: ./.github/workflows/build-plugin.yml
|
||||
with:
|
||||
RELEASE_CREATED: true
|
||||
RELEASE_CREATED: 'true'
|
||||
RELEASE_TAG: ${{ needs.release-please.outputs.tag_name }}
|
||||
TAG: ""
|
||||
BUCKET_PATH: unraid-api
|
||||
BASE_URL: "https://stable.dl.unraid.net/unraid-api"
|
||||
BUILD_NUMBER: ${{ needs.build-api.outputs.build_number }}
|
||||
BUILD_NUMBER: ${{ needs.build-artifacts.outputs.build_number }}
|
||||
TRIGGER_PRODUCTION_RELEASE: true
|
||||
secrets:
|
||||
CF_ACCESS_KEY_ID: ${{ secrets.CF_ACCESS_KEY_ID }}
|
||||
CF_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
|
||||
239
.github/workflows/manual-release.yml
vendored
Normal file
239
.github/workflows/manual-release.yml
vendored
Normal file
@@ -0,0 +1,239 @@
|
||||
name: Manual Release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: 'Version to release (e.g., 4.25.3)'
|
||||
required: true
|
||||
type: string
|
||||
target_commitish:
|
||||
description: 'Commit SHA or branch (leave empty for current HEAD)'
|
||||
required: false
|
||||
type: string
|
||||
release_notes:
|
||||
description: 'Release notes/changelog (leave empty to auto-generate from commits)'
|
||||
required: false
|
||||
type: string
|
||||
prerelease:
|
||||
description: 'Mark as prerelease'
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
validate-version:
|
||||
name: Validate and Update Package Versions
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
ref: ${{ inputs.target_commitish || github.ref }}
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN }}
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
- name: Check and Update Package Versions
|
||||
run: |
|
||||
EXPECTED_VERSION="${{ inputs.version }}"
|
||||
MISMATCHES_FOUND=false
|
||||
|
||||
PACKAGE_JSONS=(
|
||||
"package.json"
|
||||
"api/package.json"
|
||||
"web/package.json"
|
||||
"unraid-ui/package.json"
|
||||
"plugin/package.json"
|
||||
"packages/unraid-shared/package.json"
|
||||
"packages/unraid-api-plugin-health/package.json"
|
||||
"packages/unraid-api-plugin-generator/package.json"
|
||||
"packages/unraid-api-plugin-connect/package.json"
|
||||
)
|
||||
|
||||
echo "Checking package.json versions against expected version: ${EXPECTED_VERSION}"
|
||||
|
||||
for pkg in "${PACKAGE_JSONS[@]}"; do
|
||||
if [ -f "$pkg" ]; then
|
||||
CURRENT_VERSION=$(node -p "require('./$pkg').version")
|
||||
if [ "$CURRENT_VERSION" != "$EXPECTED_VERSION" ]; then
|
||||
echo "❌ Version mismatch in $pkg: $CURRENT_VERSION != $EXPECTED_VERSION"
|
||||
MISMATCHES_FOUND=true
|
||||
|
||||
# Detect indentation by checking the first property line
|
||||
INDENT_SPACES=$(head -10 "$pkg" | grep '^ *"' | head -1 | sed 's/".*//g' | wc -c)
|
||||
INDENT_SPACES=$((INDENT_SPACES - 1))
|
||||
|
||||
jq --indent "$INDENT_SPACES" --arg version "$EXPECTED_VERSION" '.version = $version' "$pkg" > "$pkg.tmp" && mv "$pkg.tmp" "$pkg"
|
||||
echo "✓ Updated $pkg to version $EXPECTED_VERSION"
|
||||
else
|
||||
echo "✓ $pkg version matches: $CURRENT_VERSION"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
if [ "$MISMATCHES_FOUND" = true ]; then
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "Version mismatches found!"
|
||||
echo "=========================================="
|
||||
echo ""
|
||||
|
||||
BRANCH_OR_SHA="${{ inputs.target_commitish || github.ref }}"
|
||||
|
||||
if git show-ref --verify --quiet "refs/heads/${BRANCH_OR_SHA}"; then
|
||||
echo "Creating commit with version updates and pushing to branch: ${BRANCH_OR_SHA}"
|
||||
|
||||
git config user.name "github-actions[bot]"
|
||||
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||
|
||||
BEFORE_SHA=$(git rev-parse HEAD)
|
||||
|
||||
git add ${PACKAGE_JSONS[@]}
|
||||
git commit -m "chore: update package versions to ${{ inputs.version }}"
|
||||
git push origin "HEAD:${BRANCH_OR_SHA}"
|
||||
|
||||
AFTER_SHA=$(git rev-parse HEAD)
|
||||
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "WORKFLOW MUST BE RE-RUN"
|
||||
echo "=========================================="
|
||||
echo ""
|
||||
echo "✓ Version updates committed and pushed successfully"
|
||||
echo ""
|
||||
echo "Previous SHA: ${BEFORE_SHA}"
|
||||
echo "New SHA: ${AFTER_SHA}"
|
||||
echo ""
|
||||
echo "⚠️ CRITICAL: A new commit was created, but github.sha is immutable."
|
||||
echo "⚠️ github.sha = ${BEFORE_SHA} (original workflow trigger)"
|
||||
echo "⚠️ The release tag must point to ${AFTER_SHA} (with version updates)"
|
||||
echo ""
|
||||
echo "Re-run this workflow to create the release with the correct commit."
|
||||
echo ""
|
||||
exit 1
|
||||
else
|
||||
echo "Target is a commit SHA, not a branch. Cannot push version updates."
|
||||
echo "Please update the package.json versions manually and re-run the workflow."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "✓ All package.json versions match the expected version: ${EXPECTED_VERSION}"
|
||||
|
||||
build-artifacts:
|
||||
name: Build All Artifacts
|
||||
needs:
|
||||
- validate-version
|
||||
uses: ./.github/workflows/build-artifacts.yml
|
||||
with:
|
||||
ref: ${{ inputs.target_commitish || github.ref }}
|
||||
version_override: ${{ inputs.version }}
|
||||
secrets:
|
||||
VITE_ACCOUNT: ${{ secrets.VITE_ACCOUNT }}
|
||||
VITE_CONNECT: ${{ secrets.VITE_CONNECT }}
|
||||
VITE_UNRAID_NET: ${{ secrets.VITE_UNRAID_NET }}
|
||||
VITE_CALLBACK_KEY: ${{ secrets.VITE_CALLBACK_KEY }}
|
||||
UNRAID_BOT_GITHUB_ADMIN_TOKEN: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN }}
|
||||
|
||||
generate-release-notes:
|
||||
name: Generate Release Notes
|
||||
needs:
|
||||
- build-artifacts
|
||||
uses: ./.github/workflows/generate-release-notes.yml
|
||||
with:
|
||||
version: ${{ inputs.version }}
|
||||
target_commitish: ${{ inputs.target_commitish || github.ref }}
|
||||
release_notes: ${{ inputs.release_notes }}
|
||||
secrets:
|
||||
UNRAID_BOT_GITHUB_ADMIN_TOKEN: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN }}
|
||||
|
||||
create-release:
|
||||
name: Create GitHub Release (Draft)
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- generate-release-notes
|
||||
outputs:
|
||||
tag_name: ${{ steps.create_release.outputs.tag_name }}
|
||||
release_notes: ${{ needs.generate-release-notes.outputs.release_notes }}
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
ref: ${{ inputs.target_commitish || github.ref }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Create or Update Release as Draft
|
||||
id: create_release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
TAG_NAME="v${{ inputs.version }}"
|
||||
TARGET="${{ inputs.target_commitish || github.sha }}"
|
||||
|
||||
echo "tag_name=${TAG_NAME}" >> $GITHUB_OUTPUT
|
||||
|
||||
if gh release view "${TAG_NAME}" > /dev/null 2>&1; then
|
||||
echo "Release ${TAG_NAME} already exists, updating as draft..."
|
||||
gh release edit "${TAG_NAME}" \
|
||||
--draft \
|
||||
--notes "${{ needs.generate-release-notes.outputs.release_notes }}" \
|
||||
${{ inputs.prerelease && '--prerelease' || '' }}
|
||||
else
|
||||
echo "Creating new draft release ${TAG_NAME}..."
|
||||
git tag "${TAG_NAME}" "${TARGET}" || true
|
||||
git push origin "${TAG_NAME}" || true
|
||||
|
||||
gh release create "${TAG_NAME}" \
|
||||
--draft \
|
||||
--title "${{ inputs.version }}" \
|
||||
--notes "${{ needs.generate-release-notes.outputs.release_notes }}" \
|
||||
--target "${TARGET}" \
|
||||
${{ inputs.prerelease && '--prerelease' || '' }}
|
||||
fi
|
||||
|
||||
build-plugin-production:
|
||||
name: Build and Deploy Production Plugin
|
||||
needs:
|
||||
- create-release
|
||||
- build-artifacts
|
||||
uses: ./.github/workflows/build-plugin.yml
|
||||
with:
|
||||
RELEASE_CREATED: 'true'
|
||||
RELEASE_TAG: ${{ needs.create-release.outputs.tag_name }}
|
||||
TAG: ""
|
||||
BUCKET_PATH: unraid-api
|
||||
BASE_URL: "https://stable.dl.unraid.net/unraid-api"
|
||||
BUILD_NUMBER: ${{ needs.build-artifacts.outputs.build_number }}
|
||||
ref: ${{ inputs.target_commitish || github.ref }}
|
||||
secrets:
|
||||
CF_ACCESS_KEY_ID: ${{ secrets.CF_ACCESS_KEY_ID }}
|
||||
CF_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
CF_BUCKET_PREVIEW: ${{ secrets.CF_BUCKET_PREVIEW }}
|
||||
CF_ENDPOINT: ${{ secrets.CF_ENDPOINT }}
|
||||
UNRAID_BOT_GITHUB_ADMIN_TOKEN: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN }}
|
||||
|
||||
publish-release:
|
||||
name: Publish Release
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- create-release
|
||||
- build-plugin-production
|
||||
steps:
|
||||
- name: Publish Release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
TAG_NAME="${{ needs.create-release.outputs.tag_name }}"
|
||||
echo "Publishing release ${TAG_NAME}..."
|
||||
gh release edit "${TAG_NAME}" --draft=false --repo ${{ github.repository }}
|
||||
|
||||
30
.github/workflows/publish-schema.yml
vendored
Normal file
30
.github/workflows/publish-schema.yml
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
name: Publish GraphQL Schema
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- 'api/generated-schema.graphql'
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
publish-schema:
|
||||
name: Publish Schema to Apollo Studio
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Install Apollo Rover CLI
|
||||
run: |
|
||||
curl -sSL https://rover.apollo.dev/nix/latest | sh
|
||||
echo "$HOME/.rover/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Publish schema to Apollo Studio
|
||||
env:
|
||||
APOLLO_KEY: ${{ secrets.APOLLO_KEY }}
|
||||
run: |
|
||||
rover graph publish Unraid-API@current \
|
||||
--schema api/generated-schema.graphql
|
||||
|
||||
1
.github/workflows/release-production.yml
vendored
1
.github/workflows/release-production.yml
vendored
@@ -143,4 +143,3 @@ jobs:
|
||||
${{ steps.release-info.outputs.body }}
|
||||
embed-color: 16734296
|
||||
embed-footer-text: "Unraid API • Automated Release"
|
||||
embed-timestamp: true
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -123,3 +123,6 @@ api/dev/Unraid.net/myservers.cfg
|
||||
# local Mise settings
|
||||
.mise.toml
|
||||
|
||||
# Compiled test pages (generated from Nunjucks templates)
|
||||
web/public/test-pages/*.html
|
||||
|
||||
|
||||
@@ -1 +1 @@
|
||||
{".":"4.22.2"}
|
||||
{".":"4.27.2"}
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
@custom-variant dark (&:where(.dark, .dark *));
|
||||
|
||||
/* Utility defaults for web components (when we were using shadow DOM) */
|
||||
:host {
|
||||
:host,
|
||||
.unapi {
|
||||
--tw-divide-y-reverse: 0;
|
||||
--tw-border-style: solid;
|
||||
--tw-font-weight: initial;
|
||||
@@ -61,7 +62,7 @@
|
||||
}
|
||||
*/
|
||||
|
||||
body {
|
||||
.unapi {
|
||||
--color-alpha: #1c1b1b;
|
||||
--color-beta: #f2f2f2;
|
||||
--color-gamma: #999999;
|
||||
@@ -73,13 +74,14 @@ body {
|
||||
--ring-shadow: 0 0 var(--color-beta);
|
||||
}
|
||||
|
||||
button:not(:disabled),
|
||||
[role='button']:not(:disabled) {
|
||||
.unapi button:not(:disabled),
|
||||
.unapi [role='button']:not(:disabled) {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
/* Font size overrides for SSO button component */
|
||||
unraid-sso-button {
|
||||
.unapi unraid-sso-button,
|
||||
unraid-sso-button.unapi {
|
||||
--text-xs: 0.75rem;
|
||||
--text-sm: 0.875rem;
|
||||
--text-base: 1rem;
|
||||
@@ -93,4 +95,4 @@ unraid-sso-button {
|
||||
--text-7xl: 4.5rem;
|
||||
--text-8xl: 6rem;
|
||||
--text-9xl: 8rem;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,13 +5,7 @@
|
||||
*/
|
||||
|
||||
/* Default/White Theme */
|
||||
:root,
|
||||
.theme-white {
|
||||
--header-text-primary: #ffffff;
|
||||
--header-text-secondary: #999999;
|
||||
--header-background-color: #1c1b1b;
|
||||
--header-gradient-start: rgba(28, 27, 27, 0);
|
||||
--header-gradient-end: rgba(28, 27, 27, 0.7);
|
||||
.Theme--white {
|
||||
--color-border: #383735;
|
||||
--color-alpha: #ff8c2f;
|
||||
--color-beta: #1c1b1b;
|
||||
@@ -20,13 +14,8 @@
|
||||
}
|
||||
|
||||
/* Black Theme */
|
||||
.theme-black,
|
||||
.theme-black.dark {
|
||||
--header-text-primary: #1c1b1b;
|
||||
--header-text-secondary: #999999;
|
||||
--header-background-color: #f2f2f2;
|
||||
--header-gradient-start: rgba(242, 242, 242, 0);
|
||||
--header-gradient-end: rgba(242, 242, 242, 0.7);
|
||||
.Theme--black,
|
||||
.Theme--black.dark {
|
||||
--color-border: #e0e0e0;
|
||||
--color-alpha: #ff8c2f;
|
||||
--color-beta: #f2f2f2;
|
||||
@@ -35,12 +24,7 @@
|
||||
}
|
||||
|
||||
/* Gray Theme */
|
||||
.theme-gray {
|
||||
--header-text-primary: #ffffff;
|
||||
--header-text-secondary: #999999;
|
||||
--header-background-color: #1c1b1b;
|
||||
--header-gradient-start: rgba(28, 27, 27, 0);
|
||||
--header-gradient-end: rgba(28, 27, 27, 0.7);
|
||||
.Theme--gray {
|
||||
--color-border: #383735;
|
||||
--color-alpha: #ff8c2f;
|
||||
--color-beta: #383735;
|
||||
@@ -49,12 +33,7 @@
|
||||
}
|
||||
|
||||
/* Azure Theme */
|
||||
.theme-azure {
|
||||
--header-text-primary: #1c1b1b;
|
||||
--header-text-secondary: #999999;
|
||||
--header-background-color: #f2f2f2;
|
||||
--header-gradient-start: rgba(242, 242, 242, 0);
|
||||
--header-gradient-end: rgba(242, 242, 242, 0.7);
|
||||
.Theme--azure {
|
||||
--color-border: #5a8bb8;
|
||||
--color-alpha: #ff8c2f;
|
||||
--color-beta: #e7f2f8;
|
||||
@@ -66,27 +45,3 @@
|
||||
.dark {
|
||||
--color-border: #383735;
|
||||
}
|
||||
|
||||
/*
|
||||
* Dynamic color variables for user overrides from GraphQL
|
||||
* These are set via JavaScript and override the theme defaults
|
||||
* Using :root with class for higher specificity to override theme classes
|
||||
*/
|
||||
:root.has-custom-header-text {
|
||||
--header-text-primary: var(--custom-header-text-primary);
|
||||
--color-header-text-primary: var(--custom-header-text-primary);
|
||||
}
|
||||
|
||||
:root.has-custom-header-meta {
|
||||
--header-text-secondary: var(--custom-header-text-secondary);
|
||||
--color-header-text-secondary: var(--custom-header-text-secondary);
|
||||
}
|
||||
|
||||
:root.has-custom-header-bg {
|
||||
--header-background-color: var(--custom-header-background-color);
|
||||
--color-header-background: var(--custom-header-background-color);
|
||||
--header-gradient-start: var(--custom-header-gradient-start);
|
||||
--header-gradient-end: var(--custom-header-gradient-end);
|
||||
--color-header-gradient-start: var(--custom-header-gradient-start);
|
||||
--color-header-gradient-end: var(--custom-header-gradient-end);
|
||||
}
|
||||
@@ -32,3 +32,4 @@ CHOKIDAR_USEPOLLING=true
|
||||
LOG_TRANSPORT=console
|
||||
LOG_LEVEL=trace
|
||||
ENABLE_NEXT_DOCKER_RELEASE=true
|
||||
SKIP_CONNECT_PLUGIN_CHECK=true
|
||||
|
||||
@@ -42,7 +42,10 @@ export default tseslint.config(
|
||||
'ignorePackages',
|
||||
{
|
||||
js: 'always',
|
||||
ts: 'always',
|
||||
mjs: 'always',
|
||||
cjs: 'always',
|
||||
ts: 'never',
|
||||
tsx: 'never',
|
||||
},
|
||||
],
|
||||
'no-restricted-globals': [
|
||||
|
||||
135
api/CHANGELOG.md
135
api/CHANGELOG.md
@@ -1,5 +1,140 @@
|
||||
# Changelog
|
||||
|
||||
## [4.27.2](https://github.com/unraid/api/compare/v4.27.1...v4.27.2) (2025-11-21)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* issue with header flashing + issue with trial date ([64875ed](https://github.com/unraid/api/commit/64875edbba786a0d1ba0113c9e9a3d38594eafcc))
|
||||
|
||||
## [4.27.1](https://github.com/unraid/api/compare/v4.27.0...v4.27.1) (2025-11-21)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* missing translations for expiring trials ([#1800](https://github.com/unraid/api/issues/1800)) ([36c1049](https://github.com/unraid/api/commit/36c104915ece203a3cac9e1a13e0c325e536a839))
|
||||
* resolve header flash when background color is set ([#1796](https://github.com/unraid/api/issues/1796)) ([dc9a036](https://github.com/unraid/api/commit/dc9a036c73d8ba110029364e0d044dc24c7d0dfa))
|
||||
|
||||
## [4.27.0](https://github.com/unraid/api/compare/v4.26.2...v4.27.0) (2025-11-19)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* remove Unraid API log download functionality ([#1793](https://github.com/unraid/api/issues/1793)) ([e4a9b82](https://github.com/unraid/api/commit/e4a9b8291b049752a9ff59b17ff50cf464fe0535))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* auto-uninstallation of connect api plugin ([#1791](https://github.com/unraid/api/issues/1791)) ([e734043](https://github.com/unraid/api/commit/e7340431a58821ec1b4f5d1b452fba6613b01fa5))
|
||||
|
||||
## [4.26.2](https://github.com/unraid/api/compare/v4.26.1...v4.26.2) (2025-11-19)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **theme:** Missing header background color ([e2fdf6c](https://github.com/unraid/api/commit/e2fdf6cadbd816559b8c82546c2bc771a81ffa9e))
|
||||
|
||||
## [4.26.1](https://github.com/unraid/api/compare/v4.26.0...v4.26.1) (2025-11-18)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **theme:** update theme class naming and scoping logic ([b28ef1e](https://github.com/unraid/api/commit/b28ef1ea334cb4842f01fa992effa7024185c6c9))
|
||||
|
||||
## [4.26.0](https://github.com/unraid/api/compare/v4.25.3...v4.26.0) (2025-11-17)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add cpu power query & subscription ([#1745](https://github.com/unraid/api/issues/1745)) ([d7aca81](https://github.com/unraid/api/commit/d7aca81c60281bfa47fb9113929c1ead6ed3361b))
|
||||
* add schema publishing to apollo studio ([#1772](https://github.com/unraid/api/issues/1772)) ([7e13202](https://github.com/unraid/api/commit/7e13202aa1c02803095bb72bb1bcb2472716f53a))
|
||||
* add workflow_dispatch trigger to schema publishing workflow ([818e7ce](https://github.com/unraid/api/commit/818e7ce997059663e07efcf1dab706bf0d7fc9da))
|
||||
* apollo studio readme link ([c4cd0c6](https://github.com/unraid/api/commit/c4cd0c63520deec15d735255f38811f0360fe3a1))
|
||||
* **cli:** make `unraid-api plugins remove` scriptable ([#1774](https://github.com/unraid/api/issues/1774)) ([64eb9ce](https://github.com/unraid/api/commit/64eb9ce9b5d1ff4fb1f08d9963522c5d32221ba7))
|
||||
* use persisted theme css to fix flashes on header ([#1784](https://github.com/unraid/api/issues/1784)) ([854b403](https://github.com/unraid/api/commit/854b403fbd85220a3012af58ce033cf0b8418516))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **api:** decode html entities before parsing notifications ([#1768](https://github.com/unraid/api/issues/1768)) ([42406e7](https://github.com/unraid/api/commit/42406e795da1e5b95622951a467722dde72d51a8))
|
||||
* **connect:** disable api plugin if unraid plugin is absent ([#1773](https://github.com/unraid/api/issues/1773)) ([c264a18](https://github.com/unraid/api/commit/c264a1843cf115e8cc1add1ab4f12fdcc932405a))
|
||||
* detection of flash backup activation state ([#1769](https://github.com/unraid/api/issues/1769)) ([d18eaf2](https://github.com/unraid/api/commit/d18eaf2364e0c04992c52af38679ff0a0c570440))
|
||||
* re-add missing header gradient styles ([#1787](https://github.com/unraid/api/issues/1787)) ([f8a6785](https://github.com/unraid/api/commit/f8a6785e9c92f81acaef76ac5eb78a4a769e69da))
|
||||
* respect OS safe mode in plugin loader ([#1775](https://github.com/unraid/api/issues/1775)) ([92af3b6](https://github.com/unraid/api/commit/92af3b61156cabae70368cf5222a2f7ac5b4d083))
|
||||
|
||||
## [4.25.3](https://github.com/unraid/unraid-api/compare/v4.25.2...v4.25.3) (2025-10-22)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* flaky watch on boot drive's dynamix config ([ec7aa06](https://github.com/unraid/unraid-api/commit/ec7aa06d4a5fb1f0e84420266b0b0d7ee09a3663))
|
||||
|
||||
## [4.25.2](https://github.com/unraid/api/compare/v4.25.1...v4.25.2) (2025-09-30)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* enhance activation code modal visibility logic ([#1733](https://github.com/unraid/api/issues/1733)) ([e57ec00](https://github.com/unraid/api/commit/e57ec00627e54ce76d903fd0fa8686ad02b393f3))
|
||||
|
||||
## [4.25.1](https://github.com/unraid/api/compare/v4.25.0...v4.25.1) (2025-09-30)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* add cache busting to web component extractor ([#1731](https://github.com/unraid/api/issues/1731)) ([0d165a6](https://github.com/unraid/api/commit/0d165a608740505bdc505dcf69fb615225969741))
|
||||
* Connect won't appear within Apps - Previous Apps ([#1727](https://github.com/unraid/api/issues/1727)) ([d73953f](https://github.com/unraid/api/commit/d73953f8ff3d7425c0aed32d16236ededfd948e1))
|
||||
|
||||
## [4.25.0](https://github.com/unraid/api/compare/v4.24.1...v4.25.0) (2025-09-26)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add Tailwind scoping plugin and integrate into Vite config ([#1722](https://github.com/unraid/api/issues/1722)) ([b7afaf4](https://github.com/unraid/api/commit/b7afaf463243b073e1ab1083961a16a12ac6c4a3))
|
||||
* notification filter controls pill buttons ([#1718](https://github.com/unraid/api/issues/1718)) ([661865f](https://github.com/unraid/api/commit/661865f97611cf802f239fde8232f3109281dde6))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* enable auth guard for nested fields - thanks [@ingel81](https://github.com/ingel81) ([7bdeca8](https://github.com/unraid/api/commit/7bdeca8338a3901f15fde06fd7aede3b0c16e087))
|
||||
* enhance user context validation in auth module ([#1726](https://github.com/unraid/api/issues/1726)) ([cd5eff1](https://github.com/unraid/api/commit/cd5eff11bcb4398581472966cb7ec124eac7ad0a))
|
||||
|
||||
## [4.24.1](https://github.com/unraid/api/compare/v4.24.0...v4.24.1) (2025-09-23)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* cleanup leftover removed packages on upgrade ([#1719](https://github.com/unraid/api/issues/1719)) ([9972a5f](https://github.com/unraid/api/commit/9972a5f178f9a251e6c129d85c5f11cfd25e6281))
|
||||
* enhance version comparison logic in installation script ([d9c561b](https://github.com/unraid/api/commit/d9c561bfebed0c553fe4bfa26b088ae71ca59755))
|
||||
* issue with incorrect permissions on viewer / other roles ([378cdb7](https://github.com/unraid/api/commit/378cdb7f102f63128dd236c13f1a3745902d5a2c))
|
||||
|
||||
## [4.24.0](https://github.com/unraid/api/compare/v4.23.1...v4.24.0) (2025-09-18)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* improve dom content loading by being more efficient about component mounting ([#1716](https://github.com/unraid/api/issues/1716)) ([d8b166e](https://github.com/unraid/api/commit/d8b166e4b6a718e07783d9c8ac8393b50ec89ae3))
|
||||
|
||||
## [4.23.1](https://github.com/unraid/api/compare/v4.23.0...v4.23.1) (2025-09-17)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* cleanup ini parser logic with better fallbacks ([#1713](https://github.com/unraid/api/issues/1713)) ([1691362](https://github.com/unraid/api/commit/16913627de9497a5d2f71edb710cec6e2eb9f890))
|
||||
|
||||
## [4.23.0](https://github.com/unraid/api/compare/v4.22.2...v4.23.0) (2025-09-16)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add unraid api status manager ([#1708](https://github.com/unraid/api/issues/1708)) ([1d9ce0a](https://github.com/unraid/api/commit/1d9ce0aa3d067726c2c880929408c68f53e13e0d))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **logging:** remove colorized logs ([#1705](https://github.com/unraid/api/issues/1705)) ([1d2c670](https://github.com/unraid/api/commit/1d2c6701ce56b1d40afdb776065295e9273d08e9))
|
||||
* no sizeRootFs unless queried ([#1710](https://github.com/unraid/api/issues/1710)) ([9714b21](https://github.com/unraid/api/commit/9714b21c5c07160b92a11512e8b703908adb0620))
|
||||
* use virtual-modal-container ([#1709](https://github.com/unraid/api/issues/1709)) ([44b4d77](https://github.com/unraid/api/commit/44b4d77d803aa724968307cfa463f7c440791a10))
|
||||
|
||||
## [4.22.2](https://github.com/unraid/api/compare/v4.22.1...v4.22.2) (2025-09-15)
|
||||
|
||||
|
||||
|
||||
@@ -71,6 +71,10 @@ unraid-api report -vv
|
||||
|
||||
If you found this file you're likely a developer. If you'd like to know more about the API and when it's available please join [our discord](https://discord.unraid.net/).
|
||||
|
||||
## Internationalization
|
||||
|
||||
- Run `pnpm --filter @unraid/api i18n:extract` to scan the Nest.js source for translation helper usages and update `src/i18n/en.json` with any new keys. The extractor keeps existing translations intact and appends new keys with their English source text.
|
||||
|
||||
## License
|
||||
|
||||
Copyright Lime Technology Inc. All rights reserved.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"version": "4.22.2",
|
||||
"version": "4.25.3",
|
||||
"extraOrigins": [],
|
||||
"sandbox": true,
|
||||
"ssoSubIds": [],
|
||||
|
||||
@@ -0,0 +1,6 @@
|
||||
timestamp=1730937600
|
||||
event=Hashtag Test
|
||||
subject=Warning [UNRAID] - #1 OS is cooking
|
||||
description=Disk 1 temperature has reached #epic # levels of proportion
|
||||
importance=warning
|
||||
|
||||
@@ -0,0 +1,6 @@
|
||||
timestamp=1730937600
|
||||
event=Temperature Test
|
||||
subject=Warning [UNRAID] - High disk temperature detected: 45 °C
|
||||
description=Disk 1 temperature has reached 45 °C (threshold: 40 °C)<br><br>Current temperatures:<br>Parity - 32 °C [OK]<br>Disk 1 - 45 °C [WARNING]<br>Disk 2 - 38 °C [OK]<br>Cache - 28 °C [OK]<br><br>Please check cooling system.
|
||||
importance=warning
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
{
|
||||
"label": "Unraid API",
|
||||
"position": 4
|
||||
}
|
||||
@@ -1,100 +0,0 @@
|
||||
# API Key Authorization Flow
|
||||
|
||||
This document describes the self-service API key creation flow for third-party applications.
|
||||
|
||||
## Overview
|
||||
|
||||
Applications can request API access to an Unraid server by redirecting users to a special authorization page where users can review requested permissions and create an API key with one click.
|
||||
|
||||
## Flow
|
||||
|
||||
1. **Application initiates request**: The app redirects the user to:
|
||||
|
||||
```
|
||||
https://[unraid-server]/ApiKeyAuthorize?name=MyApp&scopes=docker:read,vm:*&redirect_uri=https://myapp.com/callback&state=abc123
|
||||
```
|
||||
|
||||
2. **User authentication**: If not already logged in, the user is redirected to login first (standard Unraid auth)
|
||||
|
||||
3. **Consent screen**: User sees:
|
||||
- Application name and description
|
||||
- Requested permissions (with checkboxes to approve/deny specific scopes)
|
||||
- API key name field (pre-filled)
|
||||
- Authorize & Cancel buttons
|
||||
|
||||
4. **API key creation**: Upon authorization:
|
||||
- API key is created with approved scopes
|
||||
- Key is displayed to the user
|
||||
- If `redirect_uri` is provided, user is redirected back with the key
|
||||
|
||||
5. **Callback**: App receives the API key:
|
||||
```
|
||||
https://myapp.com/callback?api_key=xxx&state=abc123
|
||||
```
|
||||
|
||||
## Query Parameters
|
||||
|
||||
- `name` (required): Name of the requesting application
|
||||
- `description` (optional): Description of the application
|
||||
- `scopes` (required): Comma-separated list of requested scopes
|
||||
- `redirect_uri` (optional): URL to redirect after authorization
|
||||
- `state` (optional): Opaque value for maintaining state
|
||||
|
||||
## Scope Format
|
||||
|
||||
Scopes follow the pattern: `resource:action`
|
||||
|
||||
### Examples:
|
||||
|
||||
- `docker:read` - Read access to Docker
|
||||
- `vm:*` - Full access to VMs
|
||||
- `system:update` - Update access to system
|
||||
- `role:viewer` - Viewer role access
|
||||
- `role:admin` - Admin role access
|
||||
|
||||
### Available Resources:
|
||||
|
||||
- `docker`, `vm`, `system`, `share`, `user`, `network`, `disk`, etc.
|
||||
|
||||
### Available Actions:
|
||||
|
||||
- `create`, `read`, `update`, `delete` or `*` for all
|
||||
|
||||
## Security Considerations
|
||||
|
||||
1. **HTTPS required**: Redirect URIs must use HTTPS (except localhost for development)
|
||||
2. **User consent**: Users explicitly approve each permission
|
||||
3. **Session-based**: Uses existing Unraid authentication session
|
||||
4. **One-time display**: API keys are shown once and must be saved securely
|
||||
|
||||
## Example Integration
|
||||
|
||||
```javascript
|
||||
// JavaScript example
|
||||
const unraidServer = 'tower.local';
|
||||
const appName = 'My Docker Manager';
|
||||
const scopes = 'docker:*,system:read';
|
||||
const redirectUri = 'https://myapp.com/unraid/callback';
|
||||
const state = generateRandomState();
|
||||
|
||||
// Store state for verification
|
||||
sessionStorage.setItem('oauth_state', state);
|
||||
|
||||
// Redirect user to authorization page
|
||||
window.location.href =
|
||||
`https://${unraidServer}/ApiKeyAuthorize?` +
|
||||
`name=${encodeURIComponent(appName)}&` +
|
||||
`scopes=${encodeURIComponent(scopes)}&` +
|
||||
`redirect_uri=${encodeURIComponent(redirectUri)}&` +
|
||||
`state=${encodeURIComponent(state)}`;
|
||||
|
||||
// Handle callback
|
||||
const urlParams = new URLSearchParams(window.location.search);
|
||||
const apiKey = urlParams.get('api_key');
|
||||
const returnedState = urlParams.get('state');
|
||||
|
||||
if (returnedState === sessionStorage.getItem('oauth_state')) {
|
||||
// Save API key securely
|
||||
saveApiKey(apiKey);
|
||||
}
|
||||
```
|
||||
@@ -1,210 +0,0 @@
|
||||
---
|
||||
title: CLI Reference
|
||||
description: Complete reference for all Unraid API CLI commands
|
||||
sidebar_position: 4
|
||||
---
|
||||
|
||||
# CLI Commands
|
||||
|
||||
:::info[Command Structure]
|
||||
All commands follow the pattern: `unraid-api <command> [options]`
|
||||
:::
|
||||
|
||||
## 🚀 Service Management
|
||||
|
||||
### Start
|
||||
|
||||
```bash
|
||||
unraid-api start [--log-level <level>]
|
||||
```
|
||||
|
||||
Starts the Unraid API service.
|
||||
|
||||
Options:
|
||||
|
||||
- `--log-level`: Set logging level (trace|debug|info|warn|error|fatal)
|
||||
|
||||
Alternative: You can also set the log level using the `LOG_LEVEL` environment variable:
|
||||
|
||||
```bash
|
||||
LOG_LEVEL=trace unraid-api start
|
||||
```
|
||||
|
||||
### Stop
|
||||
|
||||
```bash
|
||||
unraid-api stop [--delete]
|
||||
```
|
||||
|
||||
Stops the Unraid API service.
|
||||
|
||||
- `--delete`: Optional. Delete the PM2 home directory
|
||||
|
||||
### Restart
|
||||
|
||||
```bash
|
||||
unraid-api restart [--log-level <level>]
|
||||
```
|
||||
|
||||
Restarts the Unraid API service.
|
||||
|
||||
Options:
|
||||
|
||||
- `--log-level`: Set logging level (trace|debug|info|warn|error|fatal)
|
||||
|
||||
Alternative: You can also set the log level using the `LOG_LEVEL` environment variable:
|
||||
|
||||
```bash
|
||||
LOG_LEVEL=trace unraid-api restart
|
||||
```
|
||||
|
||||
### Logs
|
||||
|
||||
```bash
|
||||
unraid-api logs [-l <lines>]
|
||||
```
|
||||
|
||||
View the API logs.
|
||||
|
||||
- `-l, --lines`: Optional. Number of lines to tail (default: 100)
|
||||
|
||||
## ⚙️ Configuration Commands
|
||||
|
||||
### Config
|
||||
|
||||
```bash
|
||||
unraid-api config
|
||||
```
|
||||
|
||||
Displays current configuration values.
|
||||
|
||||
### Switch Environment
|
||||
|
||||
```bash
|
||||
unraid-api switch-env [-e <environment>]
|
||||
```
|
||||
|
||||
Switch between production and staging environments.
|
||||
|
||||
- `-e, --environment`: Optional. Target environment (production|staging)
|
||||
|
||||
### Developer Mode
|
||||
|
||||
:::tip Web GUI Management
|
||||
You can also manage developer options through the web interface at **Settings** → **Management Access** → **Developer Options**
|
||||
:::
|
||||
|
||||
```bash
|
||||
unraid-api developer # Interactive prompt for tools
|
||||
unraid-api developer --sandbox true # Enable GraphQL sandbox
|
||||
unraid-api developer --sandbox false # Disable GraphQL sandbox
|
||||
unraid-api developer --enable-modal # Enable modal testing tool
|
||||
unraid-api developer --disable-modal # Disable modal testing tool
|
||||
```
|
||||
|
||||
Configure developer features for the API:
|
||||
|
||||
- **GraphQL Sandbox**: Enable/disable Apollo GraphQL sandbox at `/graphql`
|
||||
- **Modal Testing Tool**: Enable/disable UI modal testing in the Unraid menu
|
||||
|
||||
## API Key Management
|
||||
|
||||
:::tip Web GUI Management
|
||||
You can also manage API keys through the web interface at **Settings** → **Management Access** → **API Keys**
|
||||
:::
|
||||
|
||||
### API Key Commands
|
||||
|
||||
```bash
|
||||
unraid-api apikey [options]
|
||||
```
|
||||
|
||||
Create and manage API keys via CLI.
|
||||
|
||||
Options:
|
||||
|
||||
- `--name <name>`: Name of the key
|
||||
- `--create`: Create a new key
|
||||
- `-r, --roles <roles>`: Comma-separated list of roles
|
||||
- `-p, --permissions <permissions>`: Comma-separated list of permissions
|
||||
- `-d, --description <description>`: Description for the key
|
||||
|
||||
## SSO (Single Sign-On) Management
|
||||
|
||||
:::info OIDC Configuration
|
||||
For OIDC/SSO provider configuration, see the web interface at **Settings** → **Management Access** → **API** → **OIDC** or refer to the [OIDC Provider Setup](./oidc-provider-setup.md) guide.
|
||||
:::
|
||||
|
||||
### SSO Base Command
|
||||
|
||||
```bash
|
||||
unraid-api sso
|
||||
```
|
||||
|
||||
#### Add SSO User
|
||||
|
||||
```bash
|
||||
unraid-api sso add-user
|
||||
# or
|
||||
unraid-api sso add
|
||||
# or
|
||||
unraid-api sso a
|
||||
```
|
||||
|
||||
Add a new user for SSO authentication.
|
||||
|
||||
#### Remove SSO User
|
||||
|
||||
```bash
|
||||
unraid-api sso remove-user
|
||||
# or
|
||||
unraid-api sso remove
|
||||
# or
|
||||
unraid-api sso r
|
||||
```
|
||||
|
||||
Remove a user (or all users) from SSO.
|
||||
|
||||
#### List SSO Users
|
||||
|
||||
```bash
|
||||
unraid-api sso list-users
|
||||
# or
|
||||
unraid-api sso list
|
||||
# or
|
||||
unraid-api sso l
|
||||
```
|
||||
|
||||
List all configured SSO users.
|
||||
|
||||
#### Validate SSO Token
|
||||
|
||||
```bash
|
||||
unraid-api sso validate-token <token>
|
||||
# or
|
||||
unraid-api sso validate
|
||||
# or
|
||||
unraid-api sso v
|
||||
```
|
||||
|
||||
Validates an SSO token and returns its status.
|
||||
|
||||
## Report Generation
|
||||
|
||||
### Generate Report
|
||||
|
||||
```bash
|
||||
unraid-api report [-r] [-j]
|
||||
```
|
||||
|
||||
Generate a system report.
|
||||
|
||||
- `-r, --raw`: Display raw command output
|
||||
- `-j, --json`: Display output in JSON format
|
||||
|
||||
## Notes
|
||||
|
||||
1. Most commands require appropriate permissions to modify system state
|
||||
2. Some commands require the API to be running or stopped
|
||||
3. Store API keys securely as they provide system access
|
||||
4. SSO configuration changes may require a service restart
|
||||
@@ -1,255 +0,0 @@
|
||||
---
|
||||
title: Using the Unraid API
|
||||
description: Learn how to interact with your Unraid server through the GraphQL API
|
||||
sidebar_position: 2
|
||||
---
|
||||
|
||||
# Using the Unraid API
|
||||
|
||||
:::tip[Quick Start]
|
||||
The Unraid API provides a powerful GraphQL interface for managing your server. This guide covers authentication, common queries, and best practices.
|
||||
:::
|
||||
|
||||
The Unraid API provides a GraphQL interface that allows you to interact with your Unraid server. This guide will help you get started with exploring and using the API.
|
||||
|
||||
## 🎮 Enabling the GraphQL Sandbox
|
||||
|
||||
### Web GUI Method (Recommended)
|
||||
|
||||
:::info[Preferred Method]
|
||||
Using the Web GUI is the easiest way to enable the GraphQL sandbox.
|
||||
:::
|
||||
|
||||
1. Navigate to **Settings** → **Management Access** → **Developer Options**
|
||||
2. Enable the **GraphQL Sandbox** toggle
|
||||
3. Access the GraphQL playground by navigating to:
|
||||
|
||||
```txt
|
||||
http://YOUR_SERVER_IP/graphql
|
||||
```
|
||||
|
||||
### CLI Method
|
||||
|
||||
Alternatively, you can enable developer mode using the CLI:
|
||||
|
||||
```bash
|
||||
unraid-api developer --sandbox true
|
||||
```
|
||||
|
||||
Or use the interactive mode:
|
||||
|
||||
```bash
|
||||
unraid-api developer
|
||||
```
|
||||
|
||||
## 🔑 Authentication
|
||||
|
||||
:::warning[Required for Most Operations]
|
||||
Most queries and mutations require authentication. Always include appropriate credentials in your requests.
|
||||
:::
|
||||
|
||||
You can authenticate using:
|
||||
|
||||
1. **API Keys** - For programmatic access
|
||||
2. **Cookies** - Automatic when signed into the WebGUI
|
||||
3. **SSO/OIDC** - When configured with external providers
|
||||
|
||||
### Managing API Keys
|
||||
|
||||
<tabs>
|
||||
<tabItem value="gui" label="Web GUI (Recommended)" default>
|
||||
|
||||
Navigate to **Settings** → **Management Access** → **API Keys** in your Unraid web interface to:
|
||||
|
||||
- View existing API keys
|
||||
- Create new API keys
|
||||
- Manage permissions and roles
|
||||
- Revoke or regenerate keys
|
||||
|
||||
</tabItem>
|
||||
<tabItem value="cli" label="CLI Method">
|
||||
|
||||
You can also use the CLI to create an API key:
|
||||
|
||||
```bash
|
||||
unraid-api apikey --create
|
||||
```
|
||||
|
||||
Follow the prompts to set:
|
||||
|
||||
- Name
|
||||
- Description
|
||||
- Roles
|
||||
- Permissions
|
||||
|
||||
</tabItem>
|
||||
</tabs>
|
||||
|
||||
### Using API Keys
|
||||
|
||||
The generated API key should be included in your GraphQL requests as a header:
|
||||
|
||||
```json
|
||||
{
|
||||
"x-api-key": "YOUR_API_KEY"
|
||||
}
|
||||
```
|
||||
|
||||
## 📊 Available Schemas
|
||||
|
||||
The API provides access to various aspects of your Unraid server:
|
||||
|
||||
### System Information
|
||||
|
||||
- Query system details including CPU, memory, and OS information
|
||||
- Monitor system status and health
|
||||
- Access baseboard and hardware information
|
||||
|
||||
### Array Management
|
||||
|
||||
- Query array status and configuration
|
||||
- Manage array operations (start/stop)
|
||||
- Monitor disk status and health
|
||||
- Perform parity checks
|
||||
|
||||
### Docker Management
|
||||
|
||||
- List and manage Docker containers
|
||||
- Monitor container status
|
||||
- Manage Docker networks
|
||||
|
||||
### Remote Access
|
||||
|
||||
- Configure and manage remote access settings
|
||||
- Handle SSO configuration
|
||||
- Manage allowed origins
|
||||
|
||||
### 💻 Example Queries
|
||||
|
||||
#### Check System Status
|
||||
|
||||
```graphql
|
||||
query {
|
||||
info {
|
||||
os {
|
||||
platform
|
||||
distro
|
||||
release
|
||||
uptime
|
||||
}
|
||||
cpu {
|
||||
manufacturer
|
||||
brand
|
||||
cores
|
||||
threads
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
#### Monitor Array Status
|
||||
|
||||
```graphql
|
||||
query {
|
||||
array {
|
||||
state
|
||||
capacity {
|
||||
disks {
|
||||
free
|
||||
used
|
||||
total
|
||||
}
|
||||
}
|
||||
disks {
|
||||
name
|
||||
size
|
||||
status
|
||||
temp
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
#### List Docker Containers
|
||||
|
||||
```graphql
|
||||
query {
|
||||
dockerContainers {
|
||||
id
|
||||
names
|
||||
state
|
||||
status
|
||||
autoStart
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## 🏗️ Schema Types
|
||||
|
||||
The API includes several core types:
|
||||
|
||||
### Base Types
|
||||
|
||||
- `Node`: Interface for objects with unique IDs - please see [Object Identification](https://graphql.org/learn/global-object-identification/)
|
||||
- `JSON`: For complex JSON data
|
||||
- `DateTime`: For timestamp values
|
||||
- `Long`: For 64-bit integers
|
||||
|
||||
### Resource Types
|
||||
|
||||
- `Array`: Array and disk management
|
||||
- `Docker`: Container and network management
|
||||
- `Info`: System information
|
||||
- `Config`: Server configuration
|
||||
- `Connect`: Remote access settings
|
||||
|
||||
### Role-Based Access
|
||||
|
||||
Available roles:
|
||||
|
||||
- `admin`: Full access
|
||||
- `connect`: Remote access features
|
||||
- `guest`: Limited read access
|
||||
|
||||
## ✨ Best Practices
|
||||
|
||||
:::tip[Pro Tips]
|
||||
1. Use the Apollo Sandbox to explore the schema and test queries
|
||||
2. Start with small queries and gradually add fields as needed
|
||||
3. Monitor your query complexity to maintain performance
|
||||
4. Use appropriate roles and permissions for your API keys
|
||||
5. Keep your API keys secure and rotate them periodically
|
||||
:::
|
||||
|
||||
## ⏱️ Rate Limiting
|
||||
|
||||
:::caution[Rate Limits]
|
||||
The API implements rate limiting to prevent abuse. Ensure your applications handle rate limit responses appropriately.
|
||||
:::
|
||||
|
||||
## 🚨 Error Handling
|
||||
|
||||
The API returns standard GraphQL errors in the following format:
|
||||
|
||||
```json
|
||||
{
|
||||
"errors": [
|
||||
{
|
||||
"message": "Error description",
|
||||
"locations": [...],
|
||||
"path": [...]
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
## 📚 Additional Resources
|
||||
|
||||
:::info[Learn More]
|
||||
- Use the Apollo Sandbox's schema explorer to browse all available types and fields
|
||||
- Check the documentation tab in Apollo Sandbox for detailed field descriptions
|
||||
- Monitor the API's health using `unraid-api status`
|
||||
- Generate reports using `unraid-api report` for troubleshooting
|
||||
|
||||
For more information about specific commands and configuration options, refer to the [CLI documentation](/cli) or run `unraid-api --help`.
|
||||
:::
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 101 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 96 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 85 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 128 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 75 KiB |
@@ -1,94 +0,0 @@
|
||||
---
|
||||
title: Welcome to Unraid API
|
||||
description: The official GraphQL API for Unraid Server management and automation
|
||||
sidebar_position: 1
|
||||
---
|
||||
|
||||
# Welcome to Unraid API
|
||||
|
||||
:::tip[What's New]
|
||||
Starting with Unraid OS v7.2, the API comes built into the operating system - no plugin installation required!
|
||||
:::
|
||||
|
||||
The Unraid API provides a GraphQL interface for programmatic interaction with your Unraid server. It enables automation, monitoring, and integration capabilities.
|
||||
|
||||
## 📦 Availability
|
||||
|
||||
### ✨ Native Integration (Unraid OS v7.2+)
|
||||
|
||||
Starting with Unraid OS v7.2, the API is integrated directly into the operating system:
|
||||
|
||||
- No plugin installation required
|
||||
- Automatically available on system startup
|
||||
- Deep system integration
|
||||
- Access through **Settings** → **Management Access** → **API**
|
||||
|
||||
### 🔌 Plugin Installation (Pre-7.2 and Advanced Users)
|
||||
|
||||
For Unraid versions prior to v7.2 or to access newer API features:
|
||||
|
||||
1. Install the Unraid Connect Plugin from Community Apps
|
||||
2. [Configure the plugin](./how-to-use-the-api.md#enabling-the-graphql-sandbox)
|
||||
3. Access API functionality through the [GraphQL Sandbox](./how-to-use-the-api.md)
|
||||
|
||||
:::info Important Notes
|
||||
- The Unraid Connect plugin provides the API for pre-7.2 versions
|
||||
- You do NOT need to sign in to Unraid Connect to use the API locally
|
||||
- Installing the plugin on 7.2+ gives you access to newer API features before they're included in OS releases
|
||||
:::
|
||||
|
||||
## 📚 Documentation Sections
|
||||
|
||||
<cards>
|
||||
<card title="CLI Commands" icon="terminal" href="./cli">
|
||||
Complete reference for all CLI commands
|
||||
</card>
|
||||
<card title="Using the API" icon="code" href="./how-to-use-the-api">
|
||||
Learn how to interact with the GraphQL API
|
||||
</card>
|
||||
<card title="OIDC Setup" icon="shield" href="./oidc-provider-setup">
|
||||
Configure SSO authentication providers
|
||||
</card>
|
||||
<card title="Upcoming Features" icon="rocket" href="./upcoming-features">
|
||||
See what's coming next
|
||||
</card>
|
||||
</cards>
|
||||
|
||||
|
||||
## 🌟 Key Features
|
||||
|
||||
:::info[Core Capabilities]
|
||||
The API provides:
|
||||
|
||||
- **GraphQL Interface**: Modern, flexible API with strong typing
|
||||
- **Authentication**: Multiple methods including API keys, session cookies, and SSO/OIDC
|
||||
- **Comprehensive Coverage**: Access to system information, array management, and Docker operations
|
||||
- **Developer Tools**: Built-in GraphQL sandbox configurable via web interface or CLI
|
||||
- **Role-Based Access**: Granular permission control
|
||||
- **Web Management**: Manage API keys and settings through the web interface
|
||||
:::
|
||||
|
||||
## 🚀 Get Started
|
||||
|
||||
<tabs>
|
||||
<tabItem value="v72" label="Unraid OS v7.2+" default>
|
||||
|
||||
1. The API is already installed and running
|
||||
2. Access settings at **Settings** → **Management Access** → **API**
|
||||
3. Enable the GraphQL Sandbox for development
|
||||
4. Create your first API key
|
||||
5. Start making GraphQL queries!
|
||||
|
||||
</tabItem>
|
||||
<tabItem value="older" label="Pre-7.2 Versions">
|
||||
|
||||
1. Install the Unraid Connect plugin from Community Apps
|
||||
2. No Unraid Connect login required for local API access
|
||||
3. Configure the plugin settings
|
||||
4. Enable the GraphQL Sandbox
|
||||
5. Start exploring the API!
|
||||
|
||||
</tabItem>
|
||||
</tabs>
|
||||
|
||||
For detailed usage instructions, see the [CLI Commands](./cli) reference.
|
||||
1
api/docs/public/moved-to-docs-repo.md
Normal file
1
api/docs/public/moved-to-docs-repo.md
Normal file
@@ -0,0 +1 @@
|
||||
# All Content Here has been permanently moved to [Unraid Docs](https://github.com/unraid/docs)
|
||||
@@ -1,420 +0,0 @@
|
||||
---
|
||||
title: OIDC Provider Setup
|
||||
description: Configure OIDC (OpenID Connect) providers for SSO authentication in Unraid API
|
||||
sidebar_position: 3
|
||||
---
|
||||
|
||||
# OIDC Provider Setup
|
||||
|
||||
:::info[What is OIDC?]
|
||||
OpenID Connect (OIDC) is an authentication protocol that allows users to sign in using their existing accounts from providers like Google, Microsoft, or your corporate identity provider. It enables Single Sign-On (SSO) for seamless and secure authentication.
|
||||
:::
|
||||
|
||||
This guide walks you through configuring OIDC (OpenID Connect) providers for SSO authentication in the Unraid API using the web interface.
|
||||
|
||||
## 🚀 Quick Start
|
||||
|
||||
<details open>
|
||||
<summary><strong>Getting to OIDC Settings</strong></summary>
|
||||
|
||||
1. Navigate to your Unraid server's web interface
|
||||
2. Go to **Settings** → **Management Access** → **API** → **OIDC**
|
||||
3. You'll see tabs for different providers - click the **+** button to add a new provider
|
||||
|
||||
</details>
|
||||
|
||||
### OIDC Providers Interface Overview
|
||||
|
||||

|
||||
*Login page showing traditional login form with SSO options - "Login With Unraid.net" and "Sign in with Google" buttons*
|
||||
|
||||
The interface includes:
|
||||
|
||||
- **Provider tabs**: Each configured provider (Unraid.net, Google, etc.) appears as a tab
|
||||
- **Add Provider button**: Click the **+** button to add new providers
|
||||
- **Authorization Mode dropdown**: Toggle between "simple" and "advanced" modes
|
||||
- **Simple Authorization section**: Configure allowed email domains and specific addresses
|
||||
- **Add Item buttons**: Click to add multiple authorization rules
|
||||
|
||||
## Understanding Authorization Modes
|
||||
|
||||
The interface provides two authorization modes:
|
||||
|
||||
### Simple Mode (Recommended)
|
||||
|
||||
Simple mode is the easiest way to configure authorization. You can:
|
||||
|
||||
- Allow specific email domains (e.g., @company.com)
|
||||
- Allow specific email addresses
|
||||
- Configure who can access your Unraid server with minimal setup
|
||||
|
||||
**When to use Simple Mode:**
|
||||
|
||||
- You want to allow all users from your company domain
|
||||
- You have a small list of specific users
|
||||
- You're new to OIDC configuration
|
||||
|
||||
<details>
|
||||
<summary><strong>Advanced Mode</strong></summary>
|
||||
|
||||
Advanced mode provides granular control using claim-based rules. You can:
|
||||
|
||||
- Create complex authorization rules based on JWT claims
|
||||
- Use operators like equals, contains, endsWith, startsWith
|
||||
- Combine multiple conditions with OR/AND logic
|
||||
- Choose whether ANY rule must pass (OR mode) or ALL rules must pass (AND mode)
|
||||
|
||||
**When to use Advanced Mode:**
|
||||
|
||||
- You need to check group memberships
|
||||
- You want to verify multiple claims (e.g., email domain AND verified status)
|
||||
- You have complex authorization requirements
|
||||
- You need fine-grained control over how rules are evaluated
|
||||
|
||||
</details>
|
||||
|
||||
## Authorization Rules
|
||||
|
||||

|
||||
*Advanced authorization rules showing JWT claim configuration with email endsWith operator for domain-based access control*
|
||||
|
||||
### Simple Mode Examples
|
||||
|
||||
#### Allow Company Domain
|
||||
|
||||
In Simple Authorization:
|
||||
|
||||
- **Allowed Email Domains**: Enter `company.com`
|
||||
- This allows anyone with @company.com email
|
||||
|
||||
#### Allow Specific Users
|
||||
|
||||
- **Specific Email Addresses**: Add individual emails
|
||||
- Click **Add Item** to add multiple addresses
|
||||
|
||||
<details>
|
||||
<summary><strong>Advanced Mode Examples</strong></summary>
|
||||
|
||||
#### Authorization Rule Mode
|
||||
|
||||
When using multiple rules, you can choose how they're evaluated:
|
||||
|
||||
- **OR Mode** (default): User is authorized if ANY rule passes
|
||||
- **AND Mode**: User is authorized only if ALL rules pass
|
||||
|
||||
#### Email Domain with Verification (AND Mode)
|
||||
|
||||
To require both email domain AND verification:
|
||||
|
||||
1. Set **Authorization Rule Mode** to `AND`
|
||||
2. Add two rules:
|
||||
- Rule 1:
|
||||
- **Claim**: `email`
|
||||
- **Operator**: `endsWith`
|
||||
- **Value**: `@company.com`
|
||||
- Rule 2:
|
||||
- **Claim**: `email_verified`
|
||||
- **Operator**: `equals`
|
||||
- **Value**: `true`
|
||||
|
||||
This ensures users must have both a company email AND a verified email address.
|
||||
|
||||
#### Group-Based Access (OR Mode)
|
||||
|
||||
To allow access to multiple groups:
|
||||
|
||||
1. Set **Authorization Rule Mode** to `OR` (default)
|
||||
2. Add rules for each group:
|
||||
- **Claim**: `groups`
|
||||
- **Operator**: `contains`
|
||||
- **Value**: `admins`
|
||||
|
||||
Or add another rule:
|
||||
- **Claim**: `groups`
|
||||
- **Operator**: `contains`
|
||||
- **Value**: `developers`
|
||||
|
||||
Users in either `admins` OR `developers` group will be authorized.
|
||||
|
||||
#### Multiple Domains
|
||||
|
||||
- **Claim**: `email`
|
||||
- **Operator**: `endsWith`
|
||||
- **Values**: Add multiple domains (e.g., `company.com`, `subsidiary.com`)
|
||||
|
||||
#### Complex Authorization (AND Mode)
|
||||
|
||||
For strict security requiring multiple conditions:
|
||||
|
||||
1. Set **Authorization Rule Mode** to `AND`
|
||||
2. Add multiple rules that ALL must pass:
|
||||
- Email must be from company domain
|
||||
- Email must be verified
|
||||
- User must be in specific group
|
||||
- Account must have 2FA enabled (if claim available)
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Configuration Interface Details</strong></summary>
|
||||
|
||||
### Provider Tabs
|
||||
|
||||
- Each configured provider appears as a tab at the top
|
||||
- Click a tab to switch between provider configurations
|
||||
- The **+** button on the right adds a new provider
|
||||
|
||||
### Authorization Mode Dropdown
|
||||
|
||||
- **simple**: Best for email-based authorization (recommended for most users)
|
||||
- **advanced**: For complex claim-based rules using JWT claims
|
||||
|
||||
### Simple Authorization Fields
|
||||
|
||||
When "simple" mode is selected, you'll see:
|
||||
|
||||
- **Allowed Email Domains**: Enter domains without @ (e.g., `company.com`)
|
||||
- Helper text: "Users with emails ending in these domains can login"
|
||||
- **Specific Email Addresses**: Add individual email addresses
|
||||
- Helper text: "Only these exact email addresses can login"
|
||||
- **Add Item** buttons to add multiple entries
|
||||
|
||||
### Advanced Authorization Fields
|
||||
|
||||
When "advanced" mode is selected, you'll see:
|
||||
|
||||
- **Authorization Rule Mode**: Choose `OR` (any rule passes) or `AND` (all rules must pass)
|
||||
- **Authorization Rules**: Add multiple claim-based rules
|
||||
- **For each rule**:
|
||||
- **Claim**: The JWT claim to check
|
||||
- **Operator**: How to compare (equals, contains, endsWith, startsWith)
|
||||
- **Value**: What to match against
|
||||
|
||||
### Additional Interface Elements
|
||||
|
||||
- **Enable Developer Sandbox**: Toggle to enable GraphQL sandbox at `/graphql`
|
||||
- The interface uses a dark theme for better visibility
|
||||
- Field validation indicators help ensure correct configuration
|
||||
|
||||
</details>
|
||||
|
||||
### Required Redirect URI
|
||||
|
||||
:::caution[Important Configuration]
|
||||
All providers must be configured with this exact redirect URI format:
|
||||
:::
|
||||
|
||||
```bash
|
||||
http://YOUR_UNRAID_IP/graphql/api/auth/oidc/callback
|
||||
```
|
||||
|
||||
:::tip
|
||||
Replace `YOUR_UNRAID_IP` with your actual server IP address (e.g., `192.168.1.100` or `tower.local`).
|
||||
:::
|
||||
|
||||
### Issuer URL Format
|
||||
|
||||
The **Issuer URL** field accepts both formats, but **base URL is strongly recommended** for security:
|
||||
|
||||
- **Base URL** (recommended): `https://accounts.google.com`
|
||||
- **Full discovery URL**: `https://accounts.google.com/.well-known/openid-configuration`
|
||||
|
||||
**⚠️ Security Note**: Always use the base URL format when possible. The system automatically appends `/.well-known/openid-configuration` for OIDC discovery. Using the full discovery URL directly disables important issuer validation checks and is not recommended by the OpenID Connect specification.
|
||||
|
||||
**Examples of correct base URLs:**
|
||||
- Google: `https://accounts.google.com`
|
||||
- Microsoft/Azure: `https://login.microsoftonline.com/YOUR_TENANT_ID/v2.0`
|
||||
- Keycloak: `https://keycloak.example.com/realms/YOUR_REALM`
|
||||
- Authelia: `https://auth.yourdomain.com`
|
||||
|
||||
## ✅ Testing Your Configuration
|
||||
|
||||

|
||||
*Unraid login page displaying both traditional username/password authentication and SSO options with customized provider buttons*
|
||||
|
||||
1. Save your provider configuration
|
||||
2. Log out (if logged in)
|
||||
3. Navigate to the login page
|
||||
4. Your configured provider button should appear
|
||||
5. Click to test the login flow
|
||||
|
||||
## 🔧 Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
#### "Provider not found" error
|
||||
|
||||
- Ensure the Issuer URL is correct
|
||||
- Check that the provider supports OIDC discovery (/.well-known/openid-configuration)
|
||||
|
||||
#### "Authorization failed"
|
||||
|
||||
- In Simple Mode: Check email domains are entered correctly (without @)
|
||||
- In Advanced Mode:
|
||||
- Verify claim names match exactly what your provider sends
|
||||
- Check if Authorization Rule Mode is set correctly (OR vs AND)
|
||||
- Ensure all required claims are present in the token
|
||||
- Enable debug logging to see actual claims and rule evaluation
|
||||
|
||||
#### "Invalid redirect URI"
|
||||
|
||||
- Ensure the redirect URI in your provider matches exactly
|
||||
- Include the correct port if using a non-standard configuration
|
||||
- Verify the redirect URI protocol matches your server's configuration (HTTP or HTTPS)
|
||||
|
||||
#### Cannot see login button
|
||||
|
||||
- Check that at least one authorization rule is configured
|
||||
- Verify the provider is enabled/saved
|
||||
|
||||
### Debug Mode
|
||||
|
||||
To troubleshoot issues:
|
||||
|
||||
1. Enable debug logging:
|
||||
|
||||
```bash
|
||||
LOG_LEVEL=debug unraid-api start --debug
|
||||
```
|
||||
|
||||
2. Check logs for:
|
||||
|
||||
- Received claims from provider
|
||||
- Authorization rule evaluation
|
||||
- Token validation errors
|
||||
|
||||
## 🔐 Security Best Practices
|
||||
|
||||
1. **Use Simple Mode for authorization** - Prevents overly accepting configurations and reduces misconfiguration risks
|
||||
2. **Be specific with authorization** - Don't use overly broad rules
|
||||
3. **Rotate secrets regularly** - Update client secrets periodically
|
||||
4. **Test thoroughly** - Verify only intended users can access
|
||||
|
||||
## 💡 Need Help?
|
||||
|
||||
- Check provider's OIDC documentation
|
||||
- Review Unraid API logs for detailed error messages
|
||||
- Ensure your provider supports standard OIDC discovery
|
||||
- Verify network connectivity between Unraid and provider
|
||||
|
||||
## 🏢 Provider-Specific Setup
|
||||
|
||||
### Unraid.net Provider
|
||||
|
||||
The Unraid.net provider is built-in and pre-configured. You only need to configure authorization rules in the interface.
|
||||
|
||||
**Configuration:**
|
||||
|
||||
- **Issuer URL**: Pre-configured (built-in provider)
|
||||
- **Client ID/Secret**: Pre-configured (built-in provider)
|
||||
- **Redirect URI**: `http://YOUR_UNRAID_IP/graphql/api/auth/oidc/callback`
|
||||
|
||||
:::tip[Redirect URI Protocol]
|
||||
**Match the protocol to your server setup:** Use `http://` if accessing your Unraid server without SSL/TLS (typical for local network access). Use `https://` if you've configured SSL/TLS on your server. Some OIDC providers (like Google) require HTTPS and won't accept HTTP redirect URIs.
|
||||
:::
|
||||
|
||||
Configure authorization rules using Simple Mode (allowed email domains/addresses) or Advanced Mode for complex requirements.
|
||||
|
||||
### Google
|
||||
|
||||
<details>
|
||||
<summary><strong>📋 Setup Steps</strong></summary>
|
||||
|
||||
Set up OAuth 2.0 credentials in [Google Cloud Console](https://console.cloud.google.com/):
|
||||
|
||||
1. Go to **APIs & Services** → **Credentials**
|
||||
2. Click **Create Credentials** → **OAuth client ID**
|
||||
3. Choose **Web application** as the application type
|
||||
4. Add your redirect URI to **Authorized redirect URIs**
|
||||
5. Configure the OAuth consent screen if prompted
|
||||
|
||||
</details>
|
||||
|
||||
**Configuration:**
|
||||
|
||||
- **Issuer URL**: `https://accounts.google.com`
|
||||
- **Client ID/Secret**: From your OAuth 2.0 client credentials
|
||||
- **Required Scopes**: `openid`, `profile`, `email`
|
||||
- **Redirect URI**: `http://YOUR_UNRAID_IP/graphql/api/auth/oidc/callback`
|
||||
|
||||
:::warning[Google Domain Requirements]
|
||||
**Google requires valid domain names for OAuth redirect URIs.** Local IP addresses and `.local` domains are not accepted. To use Google OAuth with your Unraid server, you'll need:
|
||||
|
||||
- **Option 1: Reverse Proxy** - Set up a reverse proxy (like NGINX Proxy Manager or Traefik) with a valid domain name pointing to your Unraid API
|
||||
- **Option 2: Tailscale** - Use Tailscale to get a valid `*.ts.net` domain that Google will accept
|
||||
- **Option 3: Dynamic DNS** - Use a DDNS service to get a public domain name for your server
|
||||
|
||||
Remember to update your redirect URI in both Google Cloud Console and your Unraid OIDC configuration to use the valid domain.
|
||||
:::
|
||||
|
||||
For Google Workspace domains, use Advanced Mode with the `hd` claim to restrict access to your organization's domain.
|
||||
|
||||
### Authelia
|
||||
|
||||
Configure OIDC client in your Authelia `configuration.yml` with client ID `unraid-api` and generate a hashed secret using the Authelia hash-password command.
|
||||
|
||||
**Configuration:**
|
||||
|
||||
- **Issuer URL**: `https://auth.yourdomain.com`
|
||||
- **Client ID**: `unraid-api` (or as configured in Authelia)
|
||||
- **Client Secret**: Your unhashed secret
|
||||
- **Required Scopes**: `openid`, `profile`, `email`, `groups`
|
||||
- **Redirect URI**: `http://YOUR_UNRAID_IP/graphql/api/auth/oidc/callback`
|
||||
|
||||
Use Advanced Mode with `groups` claim for group-based authorization.
|
||||
|
||||
### Microsoft/Azure AD
|
||||
|
||||
Register a new app in [Azure Portal](https://portal.azure.com/) under Azure Active Directory → App registrations. Note the Application ID, create a client secret, and note your tenant ID.
|
||||
|
||||
**Configuration:**
|
||||
|
||||
- **Issuer URL**: `https://login.microsoftonline.com/YOUR_TENANT_ID/v2.0`
|
||||
- **Client ID**: Your Application (client) ID
|
||||
- **Client Secret**: Generated client secret
|
||||
- **Required Scopes**: `openid`, `profile`, `email`
|
||||
- **Redirect URI**: `http://YOUR_UNRAID_IP/graphql/api/auth/oidc/callback`
|
||||
|
||||
Authorization rules can be configured in the interface using email domains or advanced claims.
|
||||
|
||||
### Keycloak
|
||||
|
||||
Create a new confidential client in Keycloak Admin Console with `openid-connect` protocol and copy the client secret from the Credentials tab.
|
||||
|
||||
**Configuration:**
|
||||
|
||||
- **Issuer URL**: `https://keycloak.example.com/realms/YOUR_REALM`
|
||||
- **Client ID**: `unraid-api` (or as configured in Keycloak)
|
||||
- **Client Secret**: From Keycloak Credentials tab
|
||||
- **Required Scopes**: `openid`, `profile`, `email`
|
||||
- **Redirect URI**: `http://YOUR_UNRAID_IP/graphql/api/auth/oidc/callback`
|
||||
|
||||
For role-based authorization, use Advanced Mode with `realm_access.roles` or `resource_access` claims.
|
||||
|
||||
### Authentik
|
||||
|
||||
Create a new OAuth2/OpenID Provider in Authentik, then create an Application and link it to the provider.
|
||||
|
||||
**Configuration:**
|
||||
|
||||
- **Issuer URL**: `https://authentik.example.com/application/o/<application_slug>/`
|
||||
- **Client ID**: From Authentik provider configuration
|
||||
- **Client Secret**: From Authentik provider configuration
|
||||
- **Required Scopes**: `openid`, `profile`, `email`
|
||||
- **Redirect URI**: `http://YOUR_UNRAID_IP/graphql/api/auth/oidc/callback`
|
||||
|
||||
Authorization rules can be configured in the interface.
|
||||
|
||||
### Okta
|
||||
|
||||
Create a new OIDC Web Application in Okta Admin Console and assign appropriate users or groups.
|
||||
|
||||
**Configuration:**
|
||||
|
||||
- **Issuer URL**: `https://YOUR_DOMAIN.okta.com`
|
||||
- **Client ID**: From Okta application configuration
|
||||
- **Client Secret**: From Okta application configuration
|
||||
- **Required Scopes**: `openid`, `profile`, `email`
|
||||
- **Redirect URI**: `http://YOUR_UNRAID_IP/graphql/api/auth/oidc/callback`
|
||||
|
||||
Authorization rules can be configured in the interface using email domains or advanced claims.
|
||||
@@ -1,252 +0,0 @@
|
||||
---
|
||||
title: Programmatic API Key Management
|
||||
description: Create, use, and delete API keys programmatically for automated workflows
|
||||
sidebar_position: 4
|
||||
---
|
||||
|
||||
# Programmatic API Key Management
|
||||
|
||||
This guide explains how to create, use, and delete API keys programmatically using the Unraid API CLI, enabling automated workflows and scripts.
|
||||
|
||||
## Overview
|
||||
|
||||
The `unraid-api apikey` command supports both interactive and non-interactive modes, making it suitable for:
|
||||
|
||||
- Automated deployment scripts
|
||||
- CI/CD pipelines
|
||||
- Temporary access provisioning
|
||||
- Infrastructure as code workflows
|
||||
|
||||
:::tip[Quick Start]
|
||||
Jump to the [Complete Workflow Example](#complete-workflow-example) to see everything in action.
|
||||
:::
|
||||
|
||||
## Creating API Keys Programmatically
|
||||
|
||||
### Basic Creation with JSON Output
|
||||
|
||||
Use the `--json` flag to get machine-readable output:
|
||||
|
||||
```bash
|
||||
unraid-api apikey --create --name "workflow key" --roles ADMIN --json
|
||||
```
|
||||
|
||||
**Output:**
|
||||
|
||||
```json
|
||||
{
|
||||
"key": "your-generated-api-key-here",
|
||||
"name": "workflow key",
|
||||
"id": "generated-uuid"
|
||||
}
|
||||
```
|
||||
|
||||
### Advanced Creation with Permissions
|
||||
|
||||
```bash
|
||||
unraid-api apikey --create \
|
||||
--name "limited access key" \
|
||||
--permissions "DOCKER:READ_ANY,ARRAY:READ_ANY" \
|
||||
--description "Read-only access for monitoring" \
|
||||
--json
|
||||
```
|
||||
|
||||
### Handling Existing Keys
|
||||
|
||||
If a key with the same name exists, use `--overwrite`:
|
||||
|
||||
```bash
|
||||
unraid-api apikey --create --name "existing key" --roles ADMIN --overwrite --json
|
||||
```
|
||||
|
||||
:::warning[Key Replacement]
|
||||
The `--overwrite` flag will permanently replace the existing key. The old key will be immediately invalidated.
|
||||
:::
|
||||
|
||||
## Deleting API Keys Programmatically
|
||||
|
||||
### Non-Interactive Deletion
|
||||
|
||||
Delete a key by name without prompts:
|
||||
|
||||
```bash
|
||||
unraid-api apikey --delete --name "workflow key"
|
||||
```
|
||||
|
||||
**Output:**
|
||||
|
||||
```
|
||||
Successfully deleted 1 API key
|
||||
```
|
||||
|
||||
### JSON Output for Deletion
|
||||
|
||||
Use `--json` flag for machine-readable delete confirmation:
|
||||
|
||||
```bash
|
||||
unraid-api apikey --delete --name "workflow key" --json
|
||||
```
|
||||
|
||||
**Success Output:**
|
||||
|
||||
```json
|
||||
{
|
||||
"deleted": 1,
|
||||
"keys": [
|
||||
{
|
||||
"id": "generated-uuid",
|
||||
"name": "workflow key"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
**Error Output:**
|
||||
|
||||
```json
|
||||
{
|
||||
"deleted": 0,
|
||||
"error": "No API key found with name: nonexistent key"
|
||||
}
|
||||
```
|
||||
|
||||
### Error Handling
|
||||
|
||||
When the specified key doesn't exist:
|
||||
|
||||
```bash
|
||||
unraid-api apikey --delete --name "nonexistent key"
|
||||
# Output: No API keys found to delete
|
||||
```
|
||||
|
||||
**JSON Error Output:**
|
||||
|
||||
```json
|
||||
{
|
||||
"deleted": 0,
|
||||
"message": "No API keys found to delete"
|
||||
}
|
||||
```
|
||||
|
||||
## Complete Workflow Example
|
||||
|
||||
Here's a complete example for temporary access provisioning:
|
||||
|
||||
```bash
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# 1. Create temporary API key
|
||||
echo "Creating temporary API key..."
|
||||
KEY_DATA=$(unraid-api apikey --create \
|
||||
--name "temp deployment key" \
|
||||
--roles ADMIN \
|
||||
--description "Temporary key for deployment $(date)" \
|
||||
--json)
|
||||
|
||||
# 2. Extract the API key
|
||||
API_KEY=$(echo "$KEY_DATA" | jq -r '.key')
|
||||
echo "API key created successfully"
|
||||
|
||||
# 3. Use the key for operations
|
||||
echo "Configuring services..."
|
||||
curl -H "Authorization: Bearer $API_KEY" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{"provider": "azure", "clientId": "your-client-id"}' \
|
||||
http://localhost:3001/graphql
|
||||
|
||||
# 4. Clean up (always runs, even on error)
|
||||
trap 'echo "Cleaning up..."; unraid-api apikey --delete --name "temp deployment key"' EXIT
|
||||
|
||||
echo "Deployment completed successfully"
|
||||
```
|
||||
|
||||
## Command Reference
|
||||
|
||||
### Create Command Options
|
||||
|
||||
| Flag | Description | Example |
|
||||
| ----------------------- | ----------------------- | --------------------------------- |
|
||||
| `--name <name>` | Key name (required) | `--name "my key"` |
|
||||
| `--roles <roles>` | Comma-separated roles | `--roles ADMIN,VIEWER` |
|
||||
| `--permissions <perms>` | Resource:action pairs | `--permissions "DOCKER:READ_ANY"` |
|
||||
| `--description <desc>` | Key description | `--description "CI/CD key"` |
|
||||
| `--overwrite` | Replace existing key | `--overwrite` |
|
||||
| `--json` | Machine-readable output | `--json` |
|
||||
|
||||
### Available Roles
|
||||
|
||||
- `ADMIN` - Full system access
|
||||
- `CONNECT` - Unraid Connect features
|
||||
- `VIEWER` - Read-only access
|
||||
- `GUEST` - Limited access
|
||||
|
||||
### Available Resources and Actions
|
||||
|
||||
**Resources:** `ACTIVATION_CODE`, `API_KEY`, `ARRAY`, `CLOUD`, `CONFIG`, `CONNECT`, `CONNECT__REMOTE_ACCESS`, `CUSTOMIZATIONS`, `DASHBOARD`, `DISK`, `DISPLAY`, `DOCKER`, `FLASH`, `INFO`, `LOGS`, `ME`, `NETWORK`, `NOTIFICATIONS`, `ONLINE`, `OS`, `OWNER`, `PERMISSION`, `REGISTRATION`, `SERVERS`, `SERVICES`, `SHARE`, `VARS`, `VMS`, `WELCOME`
|
||||
|
||||
**Actions:** `CREATE_ANY`, `CREATE_OWN`, `READ_ANY`, `READ_OWN`, `UPDATE_ANY`, `UPDATE_OWN`, `DELETE_ANY`, `DELETE_OWN`
|
||||
|
||||
### Delete Command Options
|
||||
|
||||
| Flag | Description | Example |
|
||||
| --------------- | ------------------------ | ----------------- |
|
||||
| `--delete` | Enable delete mode | `--delete` |
|
||||
| `--name <name>` | Key to delete (optional) | `--name "my key"` |
|
||||
|
||||
**Note:** If `--name` is omitted, the command runs interactively.
|
||||
|
||||
## Best Practices
|
||||
|
||||
:::info[Security Best Practices]
|
||||
**Minimal Permissions**
|
||||
|
||||
- Use specific permissions instead of ADMIN role when possible
|
||||
- Example: `--permissions "DOCKER:READ_ANY"` instead of `--roles ADMIN`
|
||||
|
||||
**Key Lifecycle Management**
|
||||
|
||||
- Always clean up temporary keys after use
|
||||
- Store API keys securely (environment variables, secrets management)
|
||||
- Use descriptive names and descriptions for audit trails
|
||||
:::
|
||||
|
||||
### Error Handling
|
||||
|
||||
- Check exit codes (`$?`) after each command
|
||||
- Use `set -e` in bash scripts to fail fast
|
||||
- Implement proper cleanup with `trap`
|
||||
|
||||
### Key Naming
|
||||
|
||||
- Use descriptive names that include purpose and date
|
||||
- Names must contain only letters, numbers, and spaces
|
||||
- Unicode letters are supported
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
:::note[Common Error Messages]
|
||||
|
||||
**"API key name must contain only letters, numbers, and spaces"**
|
||||
|
||||
- **Solution:** Remove special characters like hyphens, underscores, or symbols
|
||||
|
||||
**"API key with name 'x' already exists"**
|
||||
|
||||
- **Solution:** Use `--overwrite` flag or choose a different name
|
||||
|
||||
**"Please add at least one role or permission to the key"**
|
||||
|
||||
- **Solution:** Specify either `--roles` or `--permissions` (or both)
|
||||
|
||||
:::
|
||||
|
||||
### Debug Mode
|
||||
|
||||
For troubleshooting, run with debug logging:
|
||||
|
||||
```bash
|
||||
LOG_LEVEL=debug unraid-api apikey --create --name "debug key" --roles ADMIN
|
||||
```
|
||||
@@ -1,172 +0,0 @@
|
||||
---
|
||||
title: Roadmap & Features
|
||||
description: Current status and upcoming features for the Unraid API
|
||||
sidebar_position: 10
|
||||
---
|
||||
|
||||
# Roadmap & Features
|
||||
|
||||
:::info Development Status
|
||||
This roadmap outlines completed and planned features for the Unraid API. Features and timelines may change based on development priorities and community feedback.
|
||||
:::
|
||||
|
||||
## Feature Status Legend
|
||||
|
||||
| Status | Description |
|
||||
|--------|-------------|
|
||||
| ✅ **Done** | Feature is complete and available |
|
||||
| 🚧 **In Progress** | Currently under active development |
|
||||
| 📅 **Planned** | Scheduled for future development |
|
||||
| 💡 **Under Consideration** | Being evaluated for future inclusion |
|
||||
|
||||
## Core Infrastructure
|
||||
|
||||
### Completed Features ✅
|
||||
|
||||
| Feature | Available Since |
|
||||
|---------|-----------------|
|
||||
| **API Development Environment Improvements** | v4.0.0 |
|
||||
| **Include API in Unraid OS** | Unraid v7.2-beta.1 |
|
||||
| **Separate API from Connect Plugin** | Unraid v7.2-beta.1 |
|
||||
|
||||
### Upcoming Features 📅
|
||||
|
||||
| Feature | Target Timeline |
|
||||
|---------|-----------------|
|
||||
| **Make API Open Source** | Q1 2025 |
|
||||
| **Developer Tools for Plugins** | Q2 2025 |
|
||||
|
||||
## Security & Authentication
|
||||
|
||||
### Completed Features ✅
|
||||
|
||||
| Feature | Available Since |
|
||||
|---------|-----------------|
|
||||
| **Permissions System Rewrite** | v4.0.0 |
|
||||
| **OIDC/SSO Support** | Unraid v7.2-beta.1 |
|
||||
|
||||
### In Development 🚧
|
||||
|
||||
- **User Interface Component Library** - Enhanced security components for the UI
|
||||
|
||||
## User Interface Improvements
|
||||
|
||||
### Planned Features 📅
|
||||
|
||||
| Feature | Target Timeline | Description |
|
||||
|---------|-----------------|-------------|
|
||||
| **New Settings Pages** | Q2 2025 | Modernized settings interface with improved UX |
|
||||
| **Custom Theme Creator** | Q2-Q3 2025 | Allow users to create and share custom themes |
|
||||
| **New Connect Settings Interface** | Q1 2025 | Redesigned Unraid Connect configuration |
|
||||
|
||||
## Array Management
|
||||
|
||||
### Completed Features ✅
|
||||
|
||||
| Feature | Available Since |
|
||||
|---------|-----------------|
|
||||
| **Array Status Monitoring** | v4.0.0 |
|
||||
|
||||
### Planned Features 📅
|
||||
|
||||
| Feature | Target Timeline | Description |
|
||||
|---------|-----------------|-------------|
|
||||
| **Storage Pool Creation Interface** | Q2 2025 | Simplified pool creation workflow |
|
||||
| **Storage Pool Status Interface** | Q2 2025 | Real-time pool health monitoring |
|
||||
|
||||
## Docker Integration
|
||||
|
||||
### Completed Features ✅
|
||||
|
||||
| Feature | Available Since |
|
||||
|---------|-----------------|
|
||||
| **Docker Container Status Monitoring** | v4.0.0 |
|
||||
|
||||
### Planned Features 📅
|
||||
|
||||
| Feature | Target Timeline | Description |
|
||||
|---------|-----------------|-------------|
|
||||
| **New Docker Status Interface Design** | Q3 2025 | Modern container management UI |
|
||||
| **New Docker Status Interface** | Q3 2025 | Implementation of new design |
|
||||
| **Docker Container Setup Interface** | Q3 2025 | Streamlined container deployment |
|
||||
| **Docker Compose Support** | TBD | Native docker-compose.yml support |
|
||||
|
||||
## Share Management
|
||||
|
||||
### Completed Features ✅
|
||||
|
||||
| Feature | Available Since |
|
||||
|---------|-----------------|
|
||||
| **Array/Cache Share Status Monitoring** | v4.0.0 |
|
||||
|
||||
### Under Consideration 💡
|
||||
|
||||
- **Storage Share Creation & Settings** - Enhanced share configuration options
|
||||
- **Storage Share Management Interface** - Unified share management dashboard
|
||||
|
||||
## Plugin System
|
||||
|
||||
### Planned Features 📅
|
||||
|
||||
| Feature | Target Timeline | Description |
|
||||
|---------|-----------------|-------------|
|
||||
| **New Plugins Interface** | Q3 2025 | Redesigned plugin management UI |
|
||||
| **Plugin Management Interface** | TBD | Advanced plugin configuration |
|
||||
| **Plugin Development Tools** | TBD | SDK and tooling for developers |
|
||||
|
||||
## Notifications
|
||||
|
||||
### Completed Features ✅
|
||||
|
||||
| Feature | Available Since |
|
||||
|---------|-----------------|
|
||||
| **Notifications System** | v4.0.0 |
|
||||
| **Notifications Interface** | v4.0.0 |
|
||||
|
||||
---
|
||||
|
||||
## Recent Releases
|
||||
|
||||
:::info Full Release History
|
||||
For a complete list of all releases, changelogs, and download links, visit the [Unraid API GitHub Releases](https://github.com/unraid/api/releases) page.
|
||||
:::
|
||||
|
||||
### Unraid v7.2-beta.1 Highlights
|
||||
|
||||
- 🎉 **API included in Unraid OS** - Native integration
|
||||
- 🔐 **OIDC/SSO Support** - Enterprise authentication
|
||||
- 📦 **Standalone API** - Separated from Connect plugin
|
||||
|
||||
### v4.0.0 Highlights
|
||||
|
||||
- 🛡️ **Permissions System Rewrite** - Enhanced security
|
||||
- 📊 **Comprehensive Monitoring** - Array, Docker, and Share status
|
||||
- 🔔 **Notifications System** - Real-time alerts and notifications
|
||||
- 🛠️ **Developer Environment** - Improved development tools
|
||||
|
||||
## Community Feedback
|
||||
|
||||
:::tip Have a Feature Request?
|
||||
We value community input! Please submit feature requests and feedback through:
|
||||
|
||||
- [Unraid Forums](https://forums.unraid.net)
|
||||
- [GitHub Issues](https://github.com/unraid/api/issues) - API is open source!
|
||||
|
||||
:::
|
||||
|
||||
## Version Support
|
||||
|
||||
| Unraid Version | API Version | Support Status |
|
||||
|----------------|-------------|----------------|
|
||||
| Unraid v7.2-beta.1+ | Latest | ✅ Active |
|
||||
| 7.0 - 7.1.x | v4.x via Plugin | ⚠️ Limited |
|
||||
| 6.12.x | v4.x via Plugin | ⚠️ Limited |
|
||||
| < 6.12 | Not Supported | ❌ EOL |
|
||||
|
||||
:::warning Legacy Support
|
||||
Versions prior to Unraid 7.2 require the API to be installed through the Unraid Connect plugin. Some features may not be available on older versions.
|
||||
:::
|
||||
|
||||
:::tip Pre-release Versions
|
||||
You can always install the Unraid Connect plugin to access pre-release versions of the API and get early access to new features before they're included in Unraid OS releases.
|
||||
:::
|
||||
@@ -1391,6 +1391,19 @@ type CpuLoad {
|
||||
percentSteal: Float!
|
||||
}
|
||||
|
||||
type CpuPackages implements Node {
|
||||
id: PrefixedID!
|
||||
|
||||
"""Total CPU package power draw (W)"""
|
||||
totalPower: Float!
|
||||
|
||||
"""Power draw per package (W)"""
|
||||
power: [Float!]!
|
||||
|
||||
"""Temperature per package (°C)"""
|
||||
temp: [Float!]!
|
||||
}
|
||||
|
||||
type CpuUtilization implements Node {
|
||||
id: PrefixedID!
|
||||
|
||||
@@ -1454,6 +1467,12 @@ type InfoCpu implements Node {
|
||||
|
||||
"""CPU feature flags"""
|
||||
flags: [String!]
|
||||
|
||||
"""
|
||||
Per-package array of core/thread pairs, e.g. [[[0,1],[2,3]], [[4,5],[6,7]]]
|
||||
"""
|
||||
topology: [[[Int!]!]!]!
|
||||
packages: CpuPackages!
|
||||
}
|
||||
|
||||
type MemoryLayout implements Node {
|
||||
@@ -2642,6 +2661,7 @@ type Subscription {
|
||||
arraySubscription: UnraidArray!
|
||||
logFile(path: String!): LogFileContent!
|
||||
systemMetricsCpu: CpuUtilization!
|
||||
systemMetricsCpuTelemetry: CpuPackages!
|
||||
systemMetricsMemory: MemoryUtilization!
|
||||
upsUpdates: UPSDevice!
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@unraid/api",
|
||||
"version": "4.22.2",
|
||||
"version": "4.27.2",
|
||||
"main": "src/cli/index.ts",
|
||||
"type": "module",
|
||||
"corepack": {
|
||||
@@ -30,6 +30,8 @@
|
||||
"// GraphQL Codegen": "",
|
||||
"codegen": "graphql-codegen --config codegen.ts",
|
||||
"codegen:watch": "graphql-codegen --config codegen.ts --watch",
|
||||
"// Internationalization": "",
|
||||
"i18n:extract": "node ./scripts/extract-translations.mjs",
|
||||
"// Code Quality": "",
|
||||
"lint": "eslint --config .eslintrc.ts src/",
|
||||
"lint:fix": "eslint --fix --config .eslintrc.ts src/",
|
||||
@@ -114,6 +116,7 @@
|
||||
"graphql-subscriptions": "3.0.0",
|
||||
"graphql-tag": "2.12.6",
|
||||
"graphql-ws": "6.0.6",
|
||||
"html-entities": "^2.6.0",
|
||||
"ini": "5.0.0",
|
||||
"ip": "2.0.1",
|
||||
"jose": "6.0.13",
|
||||
@@ -190,7 +193,7 @@
|
||||
"@types/stoppable": "1.1.3",
|
||||
"@types/strftime": "0.9.8",
|
||||
"@types/supertest": "6.0.3",
|
||||
"@types/uuid": "10.0.0",
|
||||
"@types/uuid": "11.0.0",
|
||||
"@types/ws": "8.18.1",
|
||||
"@types/wtfnode": "0.10.0",
|
||||
"@vitest/coverage-v8": "3.2.4",
|
||||
|
||||
162
api/scripts/extract-translations.mjs
Normal file
162
api/scripts/extract-translations.mjs
Normal file
@@ -0,0 +1,162 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
import { readFile, writeFile } from 'node:fs/promises';
|
||||
import path from 'node:path';
|
||||
import { glob } from 'glob';
|
||||
import ts from 'typescript';
|
||||
|
||||
const projectRoot = process.cwd();
|
||||
const sourcePatterns = 'src/**/*.{ts,js}';
|
||||
const ignorePatterns = [
|
||||
'**/__tests__/**',
|
||||
'**/__test__/**',
|
||||
'**/*.spec.ts',
|
||||
'**/*.spec.js',
|
||||
'**/*.test.ts',
|
||||
'**/*.test.js',
|
||||
];
|
||||
|
||||
const englishLocaleFile = path.resolve(projectRoot, 'src/i18n/en.json');
|
||||
|
||||
const identifierTargets = new Set(['t', 'translate']);
|
||||
const propertyTargets = new Set([
|
||||
'i18n.t',
|
||||
'i18n.translate',
|
||||
'ctx.t',
|
||||
'this.translate',
|
||||
'this.i18n.translate',
|
||||
'this.i18n.t',
|
||||
]);
|
||||
|
||||
function getPropertyChain(node) {
|
||||
if (ts.isIdentifier(node)) {
|
||||
return node.text;
|
||||
}
|
||||
if (ts.isPropertyAccessExpression(node)) {
|
||||
const left = getPropertyChain(node.expression);
|
||||
if (!left) return undefined;
|
||||
return `${left}.${node.name.text}`;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function extractLiteral(node) {
|
||||
if (ts.isStringLiteralLike(node)) {
|
||||
return node.text;
|
||||
}
|
||||
if (ts.isNoSubstitutionTemplateLiteral(node)) {
|
||||
return node.text;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function collectKeysFromSource(sourceFile) {
|
||||
const keys = new Set();
|
||||
|
||||
function visit(node) {
|
||||
if (ts.isCallExpression(node)) {
|
||||
const expr = node.expression;
|
||||
let matches = false;
|
||||
|
||||
if (ts.isIdentifier(expr) && identifierTargets.has(expr.text)) {
|
||||
matches = true;
|
||||
} else if (ts.isPropertyAccessExpression(expr)) {
|
||||
const chain = getPropertyChain(expr);
|
||||
if (chain && propertyTargets.has(chain)) {
|
||||
matches = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (matches) {
|
||||
const [firstArg] = node.arguments;
|
||||
if (firstArg) {
|
||||
const literal = extractLiteral(firstArg);
|
||||
if (literal) {
|
||||
keys.add(literal);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ts.forEachChild(node, visit);
|
||||
}
|
||||
|
||||
visit(sourceFile);
|
||||
return keys;
|
||||
}
|
||||
|
||||
async function loadEnglishCatalog() {
|
||||
try {
|
||||
const raw = await readFile(englishLocaleFile, 'utf8');
|
||||
const parsed = raw.trim() ? JSON.parse(raw) : {};
|
||||
if (typeof parsed !== 'object' || Array.isArray(parsed)) {
|
||||
throw new Error('English locale file must contain a JSON object.');
|
||||
}
|
||||
return parsed;
|
||||
} catch (error) {
|
||||
if (error && error.code === 'ENOENT') {
|
||||
return {};
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async function ensureEnglishCatalog(keys) {
|
||||
const existingCatalog = await loadEnglishCatalog();
|
||||
const existingKeys = new Set(Object.keys(existingCatalog));
|
||||
|
||||
let added = 0;
|
||||
const combinedKeys = new Set([...existingKeys, ...keys]);
|
||||
const sortedKeys = Array.from(combinedKeys).sort((a, b) => a.localeCompare(b));
|
||||
const nextCatalog = {};
|
||||
|
||||
for (const key of sortedKeys) {
|
||||
if (Object.prototype.hasOwnProperty.call(existingCatalog, key)) {
|
||||
nextCatalog[key] = existingCatalog[key];
|
||||
} else {
|
||||
nextCatalog[key] = key;
|
||||
added += 1;
|
||||
}
|
||||
}
|
||||
|
||||
const nextJson = `${JSON.stringify(nextCatalog, null, 2)}\n`;
|
||||
const existingJson = JSON.stringify(existingCatalog, null, 2) + '\n';
|
||||
|
||||
if (nextJson !== existingJson) {
|
||||
await writeFile(englishLocaleFile, nextJson, 'utf8');
|
||||
}
|
||||
|
||||
return added;
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const files = await glob(sourcePatterns, {
|
||||
cwd: projectRoot,
|
||||
ignore: ignorePatterns,
|
||||
absolute: true,
|
||||
});
|
||||
|
||||
const collectedKeys = new Set();
|
||||
|
||||
await Promise.all(
|
||||
files.map(async (file) => {
|
||||
const content = await readFile(file, 'utf8');
|
||||
const sourceFile = ts.createSourceFile(file, content, ts.ScriptTarget.Latest, true);
|
||||
const keys = collectKeysFromSource(sourceFile);
|
||||
keys.forEach((key) => collectedKeys.add(key));
|
||||
}),
|
||||
);
|
||||
|
||||
const added = await ensureEnglishCatalog(collectedKeys);
|
||||
|
||||
if (added === 0) {
|
||||
console.log('[i18n] No new backend translation keys detected.');
|
||||
} else {
|
||||
console.log(`[i18n] Added ${added} key(s) to src/i18n/en.json.`);
|
||||
}
|
||||
}
|
||||
|
||||
main().catch((error) => {
|
||||
console.error('[i18n] Failed to extract backend translations.', error);
|
||||
process.exitCode = 1;
|
||||
});
|
||||
@@ -4,23 +4,18 @@ import {
|
||||
getBannerPathIfPresent,
|
||||
getCasePathIfPresent,
|
||||
} from '@app/core/utils/images/image-file-helpers.js';
|
||||
import { loadDynamixConfigFile } from '@app/store/actions/load-dynamix-config-file.js';
|
||||
import { store } from '@app/store/index.js';
|
||||
import { loadDynamixConfig } from '@app/store/index.js';
|
||||
|
||||
test('get case path returns expected result', async () => {
|
||||
await expect(getCasePathIfPresent()).resolves.toContain('/dev/dynamix/case-model.png');
|
||||
});
|
||||
|
||||
test('get banner path returns null (state unloaded)', async () => {
|
||||
await expect(getBannerPathIfPresent()).resolves.toMatchInlineSnapshot('null');
|
||||
});
|
||||
|
||||
test('get banner path returns the banner (state loaded)', async () => {
|
||||
await store.dispatch(loadDynamixConfigFile()).unwrap();
|
||||
loadDynamixConfig();
|
||||
await expect(getBannerPathIfPresent()).resolves.toContain('/dev/dynamix/banner.png');
|
||||
});
|
||||
|
||||
test('get banner path returns null when no banner (state loaded)', async () => {
|
||||
await store.dispatch(loadDynamixConfigFile()).unwrap();
|
||||
loadDynamixConfig();
|
||||
await expect(getBannerPathIfPresent('notabanner.png')).resolves.toMatchInlineSnapshot('null');
|
||||
});
|
||||
|
||||
178
api/src/__test__/core/utils/parsers/ini-boolean-parser.test.ts
Normal file
178
api/src/__test__/core/utils/parsers/ini-boolean-parser.test.ts
Normal file
@@ -0,0 +1,178 @@
|
||||
import { describe, expect, test } from 'vitest';
|
||||
|
||||
import {
|
||||
iniBooleanOrAutoToJsBoolean,
|
||||
iniBooleanToJsBoolean,
|
||||
} from '@app/core/utils/parsers/ini-boolean-parser.js';
|
||||
|
||||
describe('iniBooleanToJsBoolean', () => {
|
||||
describe('valid boolean values', () => {
|
||||
test('returns false for "no"', () => {
|
||||
expect(iniBooleanToJsBoolean('no')).toBe(false);
|
||||
});
|
||||
|
||||
test('returns false for "false"', () => {
|
||||
expect(iniBooleanToJsBoolean('false')).toBe(false);
|
||||
});
|
||||
|
||||
test('returns true for "yes"', () => {
|
||||
expect(iniBooleanToJsBoolean('yes')).toBe(true);
|
||||
});
|
||||
|
||||
test('returns true for "true"', () => {
|
||||
expect(iniBooleanToJsBoolean('true')).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('malformed values', () => {
|
||||
test('handles "no*" as false', () => {
|
||||
expect(iniBooleanToJsBoolean('no*')).toBe(false);
|
||||
});
|
||||
|
||||
test('handles "yes*" as true', () => {
|
||||
expect(iniBooleanToJsBoolean('yes*')).toBe(true);
|
||||
});
|
||||
|
||||
test('handles "true*" as true', () => {
|
||||
expect(iniBooleanToJsBoolean('true*')).toBe(true);
|
||||
});
|
||||
|
||||
test('handles "false*" as false', () => {
|
||||
expect(iniBooleanToJsBoolean('false*')).toBe(false);
|
||||
});
|
||||
|
||||
test('returns undefined for "n0!" (cleans to "n" which is invalid)', () => {
|
||||
expect(iniBooleanToJsBoolean('n0!')).toBe(undefined);
|
||||
});
|
||||
|
||||
test('returns undefined for "y3s!" (cleans to "ys" which is invalid)', () => {
|
||||
expect(iniBooleanToJsBoolean('y3s!')).toBe(undefined);
|
||||
});
|
||||
|
||||
test('handles mixed case with extra chars "YES*" as true', () => {
|
||||
expect(iniBooleanToJsBoolean('YES*')).toBe(true);
|
||||
});
|
||||
|
||||
test('handles mixed case with extra chars "NO*" as false', () => {
|
||||
expect(iniBooleanToJsBoolean('NO*')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('default values', () => {
|
||||
test('returns default value for invalid input when provided', () => {
|
||||
expect(iniBooleanToJsBoolean('invalid', true)).toBe(true);
|
||||
expect(iniBooleanToJsBoolean('invalid', false)).toBe(false);
|
||||
});
|
||||
|
||||
test('returns default value for empty string when provided', () => {
|
||||
expect(iniBooleanToJsBoolean('', true)).toBe(true);
|
||||
expect(iniBooleanToJsBoolean('', false)).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('undefined fallback cases', () => {
|
||||
test('returns undefined for invalid input without default', () => {
|
||||
expect(iniBooleanToJsBoolean('invalid')).toBe(undefined);
|
||||
});
|
||||
|
||||
test('returns undefined for empty string without default', () => {
|
||||
expect(iniBooleanToJsBoolean('')).toBe(undefined);
|
||||
});
|
||||
|
||||
test('returns undefined for numeric string without default', () => {
|
||||
expect(iniBooleanToJsBoolean('123')).toBe(undefined);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('iniBooleanOrAutoToJsBoolean', () => {
|
||||
describe('valid boolean values', () => {
|
||||
test('returns false for "no"', () => {
|
||||
expect(iniBooleanOrAutoToJsBoolean('no')).toBe(false);
|
||||
});
|
||||
|
||||
test('returns false for "false"', () => {
|
||||
expect(iniBooleanOrAutoToJsBoolean('false')).toBe(false);
|
||||
});
|
||||
|
||||
test('returns true for "yes"', () => {
|
||||
expect(iniBooleanOrAutoToJsBoolean('yes')).toBe(true);
|
||||
});
|
||||
|
||||
test('returns true for "true"', () => {
|
||||
expect(iniBooleanOrAutoToJsBoolean('true')).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('auto value', () => {
|
||||
test('returns null for "auto"', () => {
|
||||
expect(iniBooleanOrAutoToJsBoolean('auto')).toBe(null);
|
||||
});
|
||||
});
|
||||
|
||||
describe('malformed values', () => {
|
||||
test('handles "no*" as false', () => {
|
||||
expect(iniBooleanOrAutoToJsBoolean('no*')).toBe(false);
|
||||
});
|
||||
|
||||
test('handles "yes*" as true', () => {
|
||||
expect(iniBooleanOrAutoToJsBoolean('yes*')).toBe(true);
|
||||
});
|
||||
|
||||
test('handles "auto*" as null', () => {
|
||||
expect(iniBooleanOrAutoToJsBoolean('auto*')).toBe(null);
|
||||
});
|
||||
|
||||
test('handles "true*" as true', () => {
|
||||
expect(iniBooleanOrAutoToJsBoolean('true*')).toBe(true);
|
||||
});
|
||||
|
||||
test('handles "false*" as false', () => {
|
||||
expect(iniBooleanOrAutoToJsBoolean('false*')).toBe(false);
|
||||
});
|
||||
|
||||
test('handles "n0!" as undefined fallback (cleans to "n" which is invalid)', () => {
|
||||
expect(iniBooleanOrAutoToJsBoolean('n0!')).toBe(undefined);
|
||||
});
|
||||
|
||||
test('handles "a1ut2o!" as null (removes non-alphabetic chars)', () => {
|
||||
expect(iniBooleanOrAutoToJsBoolean('a1ut2o!')).toBe(null);
|
||||
});
|
||||
|
||||
test('handles mixed case "AUTO*" as null', () => {
|
||||
expect(iniBooleanOrAutoToJsBoolean('AUTO*')).toBe(null);
|
||||
});
|
||||
});
|
||||
|
||||
describe('fallback behavior', () => {
|
||||
test('returns undefined for completely invalid input', () => {
|
||||
expect(iniBooleanOrAutoToJsBoolean('invalid123')).toBe(undefined);
|
||||
});
|
||||
|
||||
test('returns undefined for empty string', () => {
|
||||
expect(iniBooleanOrAutoToJsBoolean('')).toBe(undefined);
|
||||
});
|
||||
|
||||
test('returns undefined for numeric string', () => {
|
||||
expect(iniBooleanOrAutoToJsBoolean('123')).toBe(undefined);
|
||||
});
|
||||
|
||||
test('returns undefined for special characters only', () => {
|
||||
expect(iniBooleanOrAutoToJsBoolean('!@#$')).toBe(undefined);
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases', () => {
|
||||
test('handles undefined gracefully', () => {
|
||||
expect(iniBooleanOrAutoToJsBoolean(undefined as any)).toBe(undefined);
|
||||
});
|
||||
|
||||
test('handles null gracefully', () => {
|
||||
expect(iniBooleanOrAutoToJsBoolean(null as any)).toBe(undefined);
|
||||
});
|
||||
|
||||
test('handles non-string input gracefully', () => {
|
||||
expect(iniBooleanOrAutoToJsBoolean(123 as any)).toBe(undefined);
|
||||
});
|
||||
});
|
||||
});
|
||||
12
api/src/connect-plugin-cleanup.ts
Normal file
12
api/src/connect-plugin-cleanup.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
import { existsSync } from 'node:fs';
|
||||
|
||||
/**
|
||||
* Local filesystem and env checks stay synchronous so we can branch at module load.
|
||||
* @returns True if the Connect Unraid plugin is installed, false otherwise.
|
||||
*/
|
||||
export const isConnectPluginInstalled = () => {
|
||||
if (process.env.SKIP_CONNECT_PLUGIN_CHECK === 'true') {
|
||||
return true;
|
||||
}
|
||||
return existsSync('/boot/config/plugins/dynamix.unraid.net.plg');
|
||||
};
|
||||
66
api/src/core/utils/__test__/safe-mode.test.ts
Normal file
66
api/src/core/utils/__test__/safe-mode.test.ts
Normal file
@@ -0,0 +1,66 @@
|
||||
import { afterEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { isSafeModeEnabled } from '@app/core/utils/safe-mode.js';
|
||||
import { store } from '@app/store/index.js';
|
||||
import * as stateFileLoader from '@app/store/services/state-file-loader.js';
|
||||
|
||||
describe('isSafeModeEnabled', () => {
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
it('returns the safe mode flag already present in the store', () => {
|
||||
const baseState = store.getState();
|
||||
vi.spyOn(store, 'getState').mockReturnValue({
|
||||
...baseState,
|
||||
emhttp: {
|
||||
...baseState.emhttp,
|
||||
var: {
|
||||
...(baseState.emhttp?.var ?? {}),
|
||||
safeMode: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
const loaderSpy = vi.spyOn(stateFileLoader, 'loadStateFileSync');
|
||||
|
||||
expect(isSafeModeEnabled()).toBe(true);
|
||||
expect(loaderSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('falls back to the synchronous loader when store state is missing', () => {
|
||||
const baseState = store.getState();
|
||||
vi.spyOn(store, 'getState').mockReturnValue({
|
||||
...baseState,
|
||||
emhttp: {
|
||||
...baseState.emhttp,
|
||||
var: {
|
||||
...(baseState.emhttp?.var ?? {}),
|
||||
safeMode: undefined as unknown as boolean,
|
||||
} as typeof baseState.emhttp.var,
|
||||
} as typeof baseState.emhttp,
|
||||
} as typeof baseState);
|
||||
vi.spyOn(stateFileLoader, 'loadStateFileSync').mockReturnValue({
|
||||
...(baseState.emhttp?.var ?? {}),
|
||||
safeMode: true,
|
||||
} as any);
|
||||
|
||||
expect(isSafeModeEnabled()).toBe(true);
|
||||
});
|
||||
|
||||
it('defaults to false when loader cannot provide state', () => {
|
||||
const baseState = store.getState();
|
||||
vi.spyOn(store, 'getState').mockReturnValue({
|
||||
...baseState,
|
||||
emhttp: {
|
||||
...baseState.emhttp,
|
||||
var: {
|
||||
...(baseState.emhttp?.var ?? {}),
|
||||
safeMode: undefined as unknown as boolean,
|
||||
} as typeof baseState.emhttp.var,
|
||||
} as typeof baseState.emhttp,
|
||||
} as typeof baseState);
|
||||
vi.spyOn(stateFileLoader, 'loadStateFileSync').mockReturnValue(null);
|
||||
|
||||
expect(isSafeModeEnabled()).toBe(false);
|
||||
});
|
||||
});
|
||||
86
api/src/core/utils/parsers/ini-boolean-parser.ts
Normal file
86
api/src/core/utils/parsers/ini-boolean-parser.ts
Normal file
@@ -0,0 +1,86 @@
|
||||
import { type IniStringBoolean, type IniStringBooleanOrAuto } from '@app/core/types/ini.js';
|
||||
|
||||
/**
|
||||
* Converts INI boolean string values to JavaScript boolean values.
|
||||
* Handles malformed values by cleaning them of non-alphabetic characters.
|
||||
*
|
||||
* @param value - The string value to parse ("yes", "no", "true", "false", etc.)
|
||||
* @returns boolean value or undefined if parsing fails
|
||||
*/
|
||||
export function iniBooleanToJsBoolean(value: string): boolean | undefined;
|
||||
/**
|
||||
* Converts INI boolean string values to JavaScript boolean values.
|
||||
* Handles malformed values by cleaning them of non-alphabetic characters.
|
||||
*
|
||||
* @param value - The string value to parse ("yes", "no", "true", "false", etc.)
|
||||
* @param defaultValue - Default value to return if parsing fails
|
||||
* @returns boolean value or defaultValue if parsing fails (never undefined when defaultValue is provided)
|
||||
*/
|
||||
export function iniBooleanToJsBoolean(value: string, defaultValue: boolean): boolean;
|
||||
export function iniBooleanToJsBoolean(value: string, defaultValue?: boolean): boolean | undefined {
|
||||
if (value === 'no' || value === 'false') {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (value === 'yes' || value === 'true') {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Handle malformed values by cleaning them first
|
||||
if (typeof value === 'string') {
|
||||
const cleanValue = value.replace(/[^a-zA-Z]/g, '').toLowerCase();
|
||||
if (cleanValue === 'no' || cleanValue === 'false') {
|
||||
return false;
|
||||
}
|
||||
if (cleanValue === 'yes' || cleanValue === 'true') {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// Always return defaultValue when provided (even if undefined)
|
||||
if (arguments.length >= 2) {
|
||||
return defaultValue;
|
||||
}
|
||||
|
||||
// Return undefined only when no default was provided
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts INI boolean or auto string values to JavaScript boolean or null values.
|
||||
* Handles malformed values by cleaning them of non-alphabetic characters.
|
||||
*
|
||||
* @param value - The string value to parse ("yes", "no", "auto", "true", "false", etc.)
|
||||
* @returns boolean value for yes/no/true/false, null for auto, or undefined as fallback
|
||||
*/
|
||||
export const iniBooleanOrAutoToJsBoolean = (
|
||||
value: IniStringBooleanOrAuto | string
|
||||
): boolean | null | undefined => {
|
||||
// Handle auto first
|
||||
if (value === 'auto') {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Try to parse as boolean
|
||||
const boolResult = iniBooleanToJsBoolean(value as IniStringBoolean);
|
||||
if (boolResult !== undefined) {
|
||||
return boolResult;
|
||||
}
|
||||
|
||||
// Handle malformed values like "auto*" by extracting the base value
|
||||
if (typeof value === 'string') {
|
||||
const cleanValue = value.replace(/[^a-zA-Z]/g, '').toLowerCase();
|
||||
if (cleanValue === 'auto') {
|
||||
return null;
|
||||
}
|
||||
if (cleanValue === 'no' || cleanValue === 'false') {
|
||||
return false;
|
||||
}
|
||||
if (cleanValue === 'yes' || cleanValue === 'true') {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// Return undefined as fallback instead of throwing to prevent API crash
|
||||
return undefined;
|
||||
};
|
||||
17
api/src/core/utils/safe-mode.ts
Normal file
17
api/src/core/utils/safe-mode.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { store } from '@app/store/index.js';
|
||||
import { loadStateFileSync } from '@app/store/services/state-file-loader.js';
|
||||
import { StateFileKey } from '@app/store/types.js';
|
||||
|
||||
export const isSafeModeEnabled = (): boolean => {
|
||||
const safeModeFromStore = store.getState().emhttp?.var?.safeMode;
|
||||
if (typeof safeModeFromStore === 'boolean') {
|
||||
return safeModeFromStore;
|
||||
}
|
||||
|
||||
const varState = loadStateFileSync(StateFileKey.var);
|
||||
if (varState) {
|
||||
return Boolean(varState.safeMode);
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
1
api/src/i18n/ar.json
Normal file
1
api/src/i18n/ar.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/bn.json
Normal file
1
api/src/i18n/bn.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/ca.json
Normal file
1
api/src/i18n/ca.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/cs.json
Normal file
1
api/src/i18n/cs.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/da.json
Normal file
1
api/src/i18n/da.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/de.json
Normal file
1
api/src/i18n/de.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/en.json
Normal file
1
api/src/i18n/en.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/es.json
Normal file
1
api/src/i18n/es.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/fr.json
Normal file
1
api/src/i18n/fr.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/hi.json
Normal file
1
api/src/i18n/hi.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/hr.json
Normal file
1
api/src/i18n/hr.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/hu.json
Normal file
1
api/src/i18n/hu.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/it.json
Normal file
1
api/src/i18n/it.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/ja.json
Normal file
1
api/src/i18n/ja.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/ko.json
Normal file
1
api/src/i18n/ko.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/lv.json
Normal file
1
api/src/i18n/lv.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/nl.json
Normal file
1
api/src/i18n/nl.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/no.json
Normal file
1
api/src/i18n/no.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/pl.json
Normal file
1
api/src/i18n/pl.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/pt.json
Normal file
1
api/src/i18n/pt.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/ro.json
Normal file
1
api/src/i18n/ro.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/ru.json
Normal file
1
api/src/i18n/ru.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/sv.json
Normal file
1
api/src/i18n/sv.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/uk.json
Normal file
1
api/src/i18n/uk.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/zh.json
Normal file
1
api/src/i18n/zh.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
@@ -4,7 +4,7 @@ import '@app/dotenv.js';
|
||||
|
||||
import { type NestFastifyApplication } from '@nestjs/platform-fastify';
|
||||
import { unlinkSync } from 'fs';
|
||||
import { mkdir } from 'fs/promises';
|
||||
import { mkdir, readFile } from 'fs/promises';
|
||||
import http from 'http';
|
||||
import https from 'https';
|
||||
|
||||
@@ -18,13 +18,11 @@ import { fileExistsSync } from '@app/core/utils/files/file-exists.js';
|
||||
import { getServerIdentifier } from '@app/core/utils/server-identifier.js';
|
||||
import { environment, PATHS_CONFIG_MODULES, PORT } from '@app/environment.js';
|
||||
import * as envVars from '@app/environment.js';
|
||||
import { loadDynamixConfigFile } from '@app/store/actions/load-dynamix-config-file.js';
|
||||
import { shutdownApiEvent } from '@app/store/actions/shutdown-api-event.js';
|
||||
import { store } from '@app/store/index.js';
|
||||
import { loadDynamixConfig, store } from '@app/store/index.js';
|
||||
import { startMiddlewareListeners } from '@app/store/listeners/listener-middleware.js';
|
||||
import { loadStateFiles } from '@app/store/modules/emhttp.js';
|
||||
import { loadRegistrationKey } from '@app/store/modules/registration.js';
|
||||
import { setupDynamixConfigWatch } from '@app/store/watch/dynamix-config-watch.js';
|
||||
import { setupRegistrationKeyWatch } from '@app/store/watch/registration-watch.js';
|
||||
import { StateManager } from '@app/store/watch/state-watch.js';
|
||||
|
||||
@@ -76,7 +74,7 @@ export const viteNodeApp = async () => {
|
||||
await store.dispatch(loadRegistrationKey());
|
||||
|
||||
// Load my dynamix config file into store
|
||||
await store.dispatch(loadDynamixConfigFile());
|
||||
loadDynamixConfig();
|
||||
|
||||
// Start listening to file updates
|
||||
StateManager.getInstance();
|
||||
@@ -84,9 +82,6 @@ export const viteNodeApp = async () => {
|
||||
// Start listening to key file changes
|
||||
setupRegistrationKeyWatch();
|
||||
|
||||
// Start listening to dynamix config file changes
|
||||
setupDynamixConfigWatch();
|
||||
|
||||
// If port is unix socket, delete old socket before starting http server
|
||||
unlinkUnixPort();
|
||||
|
||||
|
||||
@@ -1,12 +1,9 @@
|
||||
import { F_OK } from 'constants';
|
||||
import { access } from 'fs/promises';
|
||||
|
||||
import { createAsyncThunk } from '@reduxjs/toolkit';
|
||||
import { createTtlMemoizedLoader } from '@unraid/shared';
|
||||
|
||||
import type { RecursivePartial } from '@app/types/index.js';
|
||||
import { type DynamixConfig } from '@app/core/types/ini.js';
|
||||
import { fileExistsSync } from '@app/core/utils/files/file-exists.js';
|
||||
import { parseConfig } from '@app/core/utils/misc/parse-config.js';
|
||||
import { type RecursiveNullable, type RecursivePartial } from '@app/types/index.js';
|
||||
import { batchProcess } from '@app/utils.js';
|
||||
|
||||
/**
|
||||
* Loads a configuration file from disk, parses it to a RecursivePartial of the provided type, and returns it.
|
||||
@@ -16,11 +13,8 @@ import { batchProcess } from '@app/utils.js';
|
||||
* @param path The path to the configuration file on disk.
|
||||
* @returns A parsed RecursivePartial of the provided type.
|
||||
*/
|
||||
async function loadConfigFile<ConfigType>(path: string): Promise<RecursivePartial<ConfigType>> {
|
||||
const fileIsAccessible = await access(path, F_OK)
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
return fileIsAccessible
|
||||
function loadConfigFileSync<ConfigType>(path: string): RecursivePartial<ConfigType> {
|
||||
return fileExistsSync(path)
|
||||
? parseConfig<RecursivePartial<ConfigType>>({
|
||||
filePath: path,
|
||||
type: 'ini',
|
||||
@@ -28,21 +22,40 @@ async function loadConfigFile<ConfigType>(path: string): Promise<RecursivePartia
|
||||
: {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Load the dynamix.cfg into the store.
|
||||
*
|
||||
* Note: If the file doesn't exist this will fallback to default values.
|
||||
*/
|
||||
export const loadDynamixConfigFile = createAsyncThunk<
|
||||
RecursiveNullable<RecursivePartial<DynamixConfig>>,
|
||||
string | undefined
|
||||
>('config/load-dynamix-config-file', async (filePath) => {
|
||||
if (filePath) {
|
||||
return loadConfigFile<DynamixConfig>(filePath);
|
||||
}
|
||||
const store = await import('@app/store/index.js');
|
||||
const paths = store.getters.paths()['dynamix-config'];
|
||||
const { data: configs } = await batchProcess(paths, (path) => loadConfigFile<DynamixConfig>(path));
|
||||
const [defaultConfig = {}, customConfig = {}] = configs;
|
||||
return { ...defaultConfig, ...customConfig };
|
||||
type ConfigPaths = readonly (string | undefined | null)[];
|
||||
const CACHE_WINDOW_MS = 250;
|
||||
|
||||
const memoizedConfigLoader = createTtlMemoizedLoader<
|
||||
RecursivePartial<DynamixConfig>,
|
||||
ConfigPaths,
|
||||
string
|
||||
>({
|
||||
ttlMs: CACHE_WINDOW_MS,
|
||||
getCacheKey: (configPaths: ConfigPaths): string => JSON.stringify(configPaths),
|
||||
load: (configPaths: ConfigPaths) => {
|
||||
const validPaths = configPaths.filter((path): path is string => Boolean(path));
|
||||
if (validPaths.length === 0) {
|
||||
return {};
|
||||
}
|
||||
const configFiles = validPaths.map((path) => loadConfigFileSync<DynamixConfig>(path));
|
||||
return configFiles.reduce<RecursivePartial<DynamixConfig>>(
|
||||
(accumulator, configFile) => ({
|
||||
...accumulator,
|
||||
...configFile,
|
||||
}),
|
||||
{}
|
||||
);
|
||||
},
|
||||
});
|
||||
|
||||
/**
|
||||
* Loads dynamix config from disk with TTL caching.
|
||||
*
|
||||
* @param configPaths - Array of config file paths to load and merge
|
||||
* @returns Merged config object from all valid paths
|
||||
*/
|
||||
export const loadDynamixConfigFromDiskSync = (
|
||||
configPaths: readonly (string | undefined | null)[]
|
||||
): RecursivePartial<DynamixConfig> => {
|
||||
return memoizedConfigLoader.get(configPaths);
|
||||
};
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
import { configureStore } from '@reduxjs/toolkit';
|
||||
|
||||
import { logger } from '@app/core/log.js';
|
||||
import { loadDynamixConfigFromDiskSync } from '@app/store/actions/load-dynamix-config-file.js';
|
||||
import { listenerMiddleware } from '@app/store/listeners/listener-middleware.js';
|
||||
import { updateDynamixConfig } from '@app/store/modules/dynamix.js';
|
||||
import { rootReducer } from '@app/store/root-reducer.js';
|
||||
import { FileLoadStatus } from '@app/store/types.js';
|
||||
|
||||
export const store = configureStore({
|
||||
reducer: rootReducer,
|
||||
@@ -15,8 +19,36 @@ export type RootState = ReturnType<typeof store.getState>;
|
||||
export type AppDispatch = typeof store.dispatch;
|
||||
export type ApiStore = typeof store;
|
||||
|
||||
// loadDynamixConfig is located here and not in the actions/load-dynamix-config-file.js file because it needs to access the store,
|
||||
// and injecting it seemed circular and convoluted for this use case.
|
||||
/**
|
||||
* Loads the dynamix config into the store.
|
||||
* Can be called multiple times - uses TTL caching internally.
|
||||
* @returns The loaded dynamix config.
|
||||
*/
|
||||
export const loadDynamixConfig = () => {
|
||||
const configPaths = store.getState().paths['dynamix-config'] ?? [];
|
||||
try {
|
||||
const config = loadDynamixConfigFromDiskSync(configPaths);
|
||||
store.dispatch(
|
||||
updateDynamixConfig({
|
||||
...config,
|
||||
status: FileLoadStatus.LOADED,
|
||||
})
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error(error, 'Failed to load dynamix config from disk');
|
||||
store.dispatch(
|
||||
updateDynamixConfig({
|
||||
status: FileLoadStatus.FAILED_LOADING,
|
||||
})
|
||||
);
|
||||
}
|
||||
return store.getState().dynamix;
|
||||
};
|
||||
|
||||
export const getters = {
|
||||
dynamix: () => store.getState().dynamix,
|
||||
dynamix: () => loadDynamixConfig(),
|
||||
emhttp: () => store.getState().emhttp,
|
||||
paths: () => store.getState().paths,
|
||||
registration: () => store.getState().registration,
|
||||
|
||||
@@ -2,7 +2,6 @@ import type { PayloadAction } from '@reduxjs/toolkit';
|
||||
import { createSlice } from '@reduxjs/toolkit';
|
||||
|
||||
import { type DynamixConfig } from '@app/core/types/ini.js';
|
||||
import { loadDynamixConfigFile } from '@app/store/actions/load-dynamix-config-file.js';
|
||||
import { FileLoadStatus } from '@app/store/types.js';
|
||||
import { RecursivePartial } from '@app/types/index.js';
|
||||
|
||||
@@ -22,24 +21,6 @@ export const dynamix = createSlice({
|
||||
return Object.assign(state, action.payload);
|
||||
},
|
||||
},
|
||||
extraReducers(builder) {
|
||||
builder.addCase(loadDynamixConfigFile.pending, (state) => {
|
||||
state.status = FileLoadStatus.LOADING;
|
||||
});
|
||||
|
||||
builder.addCase(loadDynamixConfigFile.fulfilled, (state, action) => {
|
||||
return {
|
||||
...(action.payload as DynamixConfig),
|
||||
status: FileLoadStatus.LOADED,
|
||||
};
|
||||
});
|
||||
|
||||
builder.addCase(loadDynamixConfigFile.rejected, (state, action) => {
|
||||
Object.assign(state, action.payload, {
|
||||
status: FileLoadStatus.FAILED_LOADING,
|
||||
});
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
export const { updateDynamixConfig } = dynamix.actions;
|
||||
|
||||
@@ -163,6 +163,18 @@ export const loadStateFiles = createAsyncThunk<
|
||||
return state;
|
||||
});
|
||||
|
||||
const stateFieldKeyMap: Record<StateFileKey, keyof SliceState> = {
|
||||
[StateFileKey.var]: 'var',
|
||||
[StateFileKey.devs]: 'devices',
|
||||
[StateFileKey.network]: 'networks',
|
||||
[StateFileKey.nginx]: 'nginx',
|
||||
[StateFileKey.shares]: 'shares',
|
||||
[StateFileKey.disks]: 'disks',
|
||||
[StateFileKey.users]: 'users',
|
||||
[StateFileKey.sec]: 'smbShares',
|
||||
[StateFileKey.sec_nfs]: 'nfsShares',
|
||||
};
|
||||
|
||||
export const emhttp = createSlice({
|
||||
name: 'emhttp',
|
||||
initialState,
|
||||
@@ -175,7 +187,8 @@ export const emhttp = createSlice({
|
||||
}>
|
||||
) {
|
||||
const { field } = action.payload;
|
||||
return Object.assign(state, { [field]: action.payload.state });
|
||||
const targetField = stateFieldKeyMap[field] ?? (field as keyof SliceState);
|
||||
return Object.assign(state, { [targetField]: action.payload.state });
|
||||
},
|
||||
},
|
||||
extraReducers(builder) {
|
||||
|
||||
81
api/src/store/services/__test__/state-file-loader.test.ts
Normal file
81
api/src/store/services/__test__/state-file-loader.test.ts
Normal file
@@ -0,0 +1,81 @@
|
||||
import { mkdtempSync, readFileSync, rmSync, writeFileSync } from 'node:fs';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { store } from '@app/store/index.js';
|
||||
import { loadStateFileSync } from '@app/store/services/state-file-loader.js';
|
||||
import { StateFileKey } from '@app/store/types.js';
|
||||
|
||||
const VAR_FIXTURE = readFileSync(new URL('../../../../dev/states/var.ini', import.meta.url), 'utf-8');
|
||||
|
||||
const writeVarFixture = (dir: string, safeMode: 'yes' | 'no') => {
|
||||
const content = VAR_FIXTURE.replace(/safeMode="(yes|no)"/, `safeMode="${safeMode}"`);
|
||||
writeFileSync(join(dir, `${StateFileKey.var}.ini`), content);
|
||||
};
|
||||
|
||||
describe('loadStateFileSync', () => {
|
||||
let tempDir: string;
|
||||
let baseState: ReturnType<typeof store.getState>;
|
||||
|
||||
beforeEach(() => {
|
||||
tempDir = mkdtempSync(join(tmpdir(), 'state-file-'));
|
||||
baseState = store.getState();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
rmSync(tempDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('loads var.ini, updates the store, and returns the parsed state', () => {
|
||||
writeVarFixture(tempDir, 'yes');
|
||||
vi.spyOn(store, 'getState').mockReturnValue({
|
||||
...baseState,
|
||||
paths: {
|
||||
...baseState.paths,
|
||||
states: tempDir,
|
||||
},
|
||||
});
|
||||
const dispatchSpy = vi.spyOn(store, 'dispatch').mockImplementation((action) => action as any);
|
||||
|
||||
const result = loadStateFileSync(StateFileKey.var);
|
||||
|
||||
expect(result?.safeMode).toBe(true);
|
||||
expect(dispatchSpy).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
type: 'emhttp/updateEmhttpState',
|
||||
payload: {
|
||||
field: StateFileKey.var,
|
||||
state: expect.objectContaining({ safeMode: true }),
|
||||
},
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it('returns null when the states path is missing', () => {
|
||||
vi.spyOn(store, 'getState').mockReturnValue({
|
||||
...baseState,
|
||||
paths: undefined,
|
||||
} as any);
|
||||
const dispatchSpy = vi.spyOn(store, 'dispatch');
|
||||
|
||||
expect(loadStateFileSync(StateFileKey.var)).toBeNull();
|
||||
expect(dispatchSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('returns null when the requested state file cannot be found', () => {
|
||||
vi.spyOn(store, 'getState').mockReturnValue({
|
||||
...baseState,
|
||||
paths: {
|
||||
...baseState.paths,
|
||||
states: tempDir,
|
||||
},
|
||||
});
|
||||
const dispatchSpy = vi.spyOn(store, 'dispatch');
|
||||
|
||||
expect(loadStateFileSync(StateFileKey.var)).toBeNull();
|
||||
expect(dispatchSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
81
api/src/store/services/state-file-loader.ts
Normal file
81
api/src/store/services/state-file-loader.ts
Normal file
@@ -0,0 +1,81 @@
|
||||
import { join } from 'node:path';
|
||||
|
||||
import type { SliceState } from '@app/store/modules/emhttp.js';
|
||||
import type { StateFileToIniParserMap } from '@app/store/types.js';
|
||||
import { parseConfig } from '@app/core/utils/misc/parse-config.js';
|
||||
import { store } from '@app/store/index.js';
|
||||
import { updateEmhttpState } from '@app/store/modules/emhttp.js';
|
||||
import { parse as parseDevices } from '@app/store/state-parsers/devices.js';
|
||||
import { parse as parseNetwork } from '@app/store/state-parsers/network.js';
|
||||
import { parse as parseNfs } from '@app/store/state-parsers/nfs.js';
|
||||
import { parse as parseNginx } from '@app/store/state-parsers/nginx.js';
|
||||
import { parse as parseShares } from '@app/store/state-parsers/shares.js';
|
||||
import { parse as parseSlots } from '@app/store/state-parsers/slots.js';
|
||||
import { parse as parseSmb } from '@app/store/state-parsers/smb.js';
|
||||
import { parse as parseUsers } from '@app/store/state-parsers/users.js';
|
||||
import { parse as parseVar } from '@app/store/state-parsers/var.js';
|
||||
import { StateFileKey } from '@app/store/types.js';
|
||||
|
||||
type ParserReturnMap = {
|
||||
[StateFileKey.var]: ReturnType<typeof parseVar>;
|
||||
[StateFileKey.devs]: ReturnType<typeof parseDevices>;
|
||||
[StateFileKey.network]: ReturnType<typeof parseNetwork>;
|
||||
[StateFileKey.nginx]: ReturnType<typeof parseNginx>;
|
||||
[StateFileKey.shares]: ReturnType<typeof parseShares>;
|
||||
[StateFileKey.disks]: ReturnType<typeof parseSlots>;
|
||||
[StateFileKey.users]: ReturnType<typeof parseUsers>;
|
||||
[StateFileKey.sec]: ReturnType<typeof parseSmb>;
|
||||
[StateFileKey.sec_nfs]: ReturnType<typeof parseNfs>;
|
||||
};
|
||||
|
||||
const PARSER_MAP: { [K in StateFileKey]: StateFileToIniParserMap[K] } = {
|
||||
[StateFileKey.var]: parseVar,
|
||||
[StateFileKey.devs]: parseDevices,
|
||||
[StateFileKey.network]: parseNetwork,
|
||||
[StateFileKey.nginx]: parseNginx,
|
||||
[StateFileKey.shares]: parseShares,
|
||||
[StateFileKey.disks]: parseSlots,
|
||||
[StateFileKey.users]: parseUsers,
|
||||
[StateFileKey.sec]: parseSmb,
|
||||
[StateFileKey.sec_nfs]: parseNfs,
|
||||
};
|
||||
|
||||
/**
|
||||
* Synchronously loads an emhttp state file, updates the Redux store slice, and returns the parsed state.
|
||||
*
|
||||
* Designed for bootstrap contexts (CLI, plugin loading, etc.) where dispatching the async thunks is
|
||||
* impractical but we still need authoritative emhttp state from disk.
|
||||
*/
|
||||
export const loadStateFileSync = <K extends StateFileKey>(
|
||||
stateFileKey: K
|
||||
): ParserReturnMap[K] | null => {
|
||||
const state = store.getState();
|
||||
const statesDirectory = state.paths?.states;
|
||||
|
||||
if (!statesDirectory) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const filePath = join(statesDirectory, `${stateFileKey}.ini`);
|
||||
|
||||
try {
|
||||
const parser = PARSER_MAP[stateFileKey] as StateFileToIniParserMap[K];
|
||||
const rawConfig = parseConfig<Record<string, unknown>>({
|
||||
filePath,
|
||||
type: 'ini',
|
||||
});
|
||||
const config = rawConfig as Parameters<StateFileToIniParserMap[K]>[0];
|
||||
const parsed = (parser as (input: any) => ParserReturnMap[K])(config);
|
||||
|
||||
store.dispatch(
|
||||
updateEmhttpState({
|
||||
field: stateFileKey,
|
||||
state: parsed as Partial<SliceState[keyof SliceState]>,
|
||||
})
|
||||
);
|
||||
|
||||
return parsed;
|
||||
} catch (error) {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
@@ -1,6 +1,10 @@
|
||||
import type { StateFileToIniParserMap } from '@app/store/types.js';
|
||||
import { type IniStringBoolean, type IniStringBooleanOrAuto } from '@app/core/types/ini.js';
|
||||
import { toNumber } from '@app/core/utils/index.js';
|
||||
import {
|
||||
iniBooleanOrAutoToJsBoolean,
|
||||
iniBooleanToJsBoolean,
|
||||
} from '@app/core/utils/parsers/ini-boolean-parser.js';
|
||||
import { ArrayState } from '@app/unraid-api/graph/resolvers/array/array.model.js';
|
||||
import { DiskFsType } from '@app/unraid-api/graph/resolvers/disks/disks.model.js';
|
||||
import {
|
||||
@@ -157,36 +161,6 @@ export type VarIni = {
|
||||
useUpnp: IniStringBoolean;
|
||||
};
|
||||
|
||||
const iniBooleanToJsBoolean = (value: string, defaultValue?: boolean) => {
|
||||
if (value === 'no' || value === 'false') {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (value === 'yes' || value === 'true') {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (defaultValue !== undefined) {
|
||||
return defaultValue;
|
||||
}
|
||||
|
||||
throw new Error(`Value "${value}" is not false/true or no/yes.`);
|
||||
};
|
||||
|
||||
const iniBooleanOrAutoToJsBoolean = (value: IniStringBooleanOrAuto) => {
|
||||
try {
|
||||
// Either it'll return true/false or throw
|
||||
return iniBooleanToJsBoolean(value as IniStringBoolean);
|
||||
} catch {
|
||||
// Auto or null
|
||||
if (value === 'auto') {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error(`Value "${value as string}" is not auto/no/yes.`);
|
||||
};
|
||||
|
||||
const safeParseMdState = (mdState: string | undefined): ArrayState => {
|
||||
if (!mdState || typeof mdState !== 'string') {
|
||||
return ArrayState.STOPPED;
|
||||
@@ -210,7 +184,7 @@ export const parse: StateFileToIniParserMap['var'] = (iniFile) => {
|
||||
...iniFile,
|
||||
defaultFsType: DiskFsType[iniFile.defaultFsType] || DiskFsType.XFS,
|
||||
mdState: safeParseMdState(iniFile.mdState),
|
||||
bindMgt: iniBooleanOrAutoToJsBoolean(iniFile.bindMgt),
|
||||
bindMgt: iniBooleanOrAutoToJsBoolean(iniFile.bindMgt) ?? null,
|
||||
cacheNumDevices: toNumber(iniFile.cacheNumDevices),
|
||||
cacheSbNumDisks: toNumber(iniFile.cacheSbNumDisks),
|
||||
configValid: iniBooleanToJsBoolean(iniFile.configValid, false),
|
||||
@@ -221,8 +195,8 @@ export const parse: StateFileToIniParserMap['var'] = (iniFile) => {
|
||||
fsCopyPrcnt: toNumber(iniFile.fsCopyPrcnt),
|
||||
fsNumMounted: toNumber(iniFile.fsNumMounted),
|
||||
fsNumUnmountable: toNumber(iniFile.fsNumUnmountable),
|
||||
hideDotFiles: iniBooleanToJsBoolean(iniFile.hideDotFiles),
|
||||
localMaster: iniBooleanToJsBoolean(iniFile.localMaster),
|
||||
hideDotFiles: iniBooleanToJsBoolean(iniFile.hideDotFiles, false),
|
||||
localMaster: iniBooleanToJsBoolean(iniFile.localMaster, false),
|
||||
maxArraysz: toNumber(iniFile.maxArraysz),
|
||||
maxCachesz: toNumber(iniFile.maxCachesz),
|
||||
mdNumDisabled: toNumber(iniFile.mdNumDisabled),
|
||||
@@ -254,34 +228,34 @@ export const parse: StateFileToIniParserMap['var'] = (iniFile) => {
|
||||
regState:
|
||||
RegistrationState[(iniFile.regCheck || iniFile.regTy || '').toUpperCase()] ??
|
||||
RegistrationState.EGUID,
|
||||
safeMode: iniBooleanToJsBoolean(iniFile.safeMode),
|
||||
sbClean: iniBooleanToJsBoolean(iniFile.sbClean),
|
||||
safeMode: iniBooleanToJsBoolean(iniFile.safeMode, false),
|
||||
sbClean: iniBooleanToJsBoolean(iniFile.sbClean, false),
|
||||
sbEvents: toNumber(iniFile.sbEvents),
|
||||
sbNumDisks: toNumber(iniFile.sbNumDisks),
|
||||
sbSynced: toNumber(iniFile.sbSynced),
|
||||
sbSynced2: toNumber(iniFile.sbSynced2),
|
||||
sbSyncErrs: toNumber(iniFile.sbSyncErrs),
|
||||
shareAvahiEnabled: iniBooleanToJsBoolean(iniFile.shareAvahiEnabled),
|
||||
shareCacheEnabled: iniBooleanToJsBoolean(iniFile.shareCacheEnabled),
|
||||
shareAvahiEnabled: iniBooleanToJsBoolean(iniFile.shareAvahiEnabled, false),
|
||||
shareCacheEnabled: iniBooleanToJsBoolean(iniFile.shareCacheEnabled, false),
|
||||
shareCount: toNumber(iniFile.shareCount),
|
||||
shareMoverActive: iniBooleanToJsBoolean(iniFile.shareMoverActive),
|
||||
shareMoverLogging: iniBooleanToJsBoolean(iniFile.shareMoverLogging),
|
||||
shareMoverActive: iniBooleanToJsBoolean(iniFile.shareMoverActive, false),
|
||||
shareMoverLogging: iniBooleanToJsBoolean(iniFile.shareMoverLogging, false),
|
||||
shareNfsCount: toNumber(iniFile.shareNfsCount),
|
||||
shareNfsEnabled: iniBooleanToJsBoolean(iniFile.shareNfsEnabled),
|
||||
shareNfsEnabled: iniBooleanToJsBoolean(iniFile.shareNfsEnabled, false),
|
||||
shareSmbCount: toNumber(iniFile.shareSmbCount),
|
||||
shareSmbEnabled: ['yes', 'ads'].includes(iniFile.shareSmbEnabled),
|
||||
shareSmbMode: iniFile.shareSmbEnabled === 'ads' ? 'active-directory' : 'workgroup',
|
||||
shutdownTimeout: toNumber(iniFile.shutdownTimeout),
|
||||
spindownDelay: toNumber(iniFile.spindownDelay),
|
||||
spinupGroups: iniBooleanToJsBoolean(iniFile.spinupGroups),
|
||||
startArray: iniBooleanToJsBoolean(iniFile.startArray),
|
||||
spinupGroups: iniBooleanToJsBoolean(iniFile.spinupGroups, false),
|
||||
startArray: iniBooleanToJsBoolean(iniFile.startArray, false),
|
||||
sysArraySlots: toNumber(iniFile.sysArraySlots),
|
||||
sysCacheSlots: toNumber(iniFile.sysCacheSlots),
|
||||
sysFlashSlots: toNumber(iniFile.sysFlashSlots),
|
||||
useNtp: iniBooleanToJsBoolean(iniFile.useNtp),
|
||||
useSsh: iniBooleanToJsBoolean(iniFile.useSsh),
|
||||
useSsl: iniBooleanOrAutoToJsBoolean(iniFile.useSsl),
|
||||
useTelnet: iniBooleanToJsBoolean(iniFile.useTelnet),
|
||||
useUpnp: iniBooleanToJsBoolean(iniFile.useUpnp),
|
||||
useNtp: iniBooleanToJsBoolean(iniFile.useNtp, false),
|
||||
useSsh: iniBooleanToJsBoolean(iniFile.useSsh, false),
|
||||
useSsl: iniBooleanOrAutoToJsBoolean(iniFile.useSsl) ?? null,
|
||||
useTelnet: iniBooleanToJsBoolean(iniFile.useTelnet, false),
|
||||
useUpnp: iniBooleanToJsBoolean(iniFile.useUpnp, false),
|
||||
};
|
||||
};
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
import { watch } from 'chokidar';
|
||||
|
||||
import { loadDynamixConfigFile } from '@app/store/actions/load-dynamix-config-file.js';
|
||||
import { getters, store } from '@app/store/index.js';
|
||||
|
||||
export const setupDynamixConfigWatch = () => {
|
||||
const configPath = getters.paths()?.['dynamix-config'];
|
||||
|
||||
// Update store when cfg changes
|
||||
watch(configPath, {
|
||||
persistent: true,
|
||||
ignoreInitial: true,
|
||||
}).on('change', async () => {
|
||||
// Load updated dynamix config file into store
|
||||
await store.dispatch(loadDynamixConfigFile());
|
||||
});
|
||||
};
|
||||
40
api/src/types/jsonforms-i18n.d.ts
vendored
Normal file
40
api/src/types/jsonforms-i18n.d.ts
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
import '@jsonforms/core/lib/models/jsonSchema4';
|
||||
import '@jsonforms/core/lib/models/jsonSchema7';
|
||||
import '@jsonforms/core/src/models/jsonSchema4';
|
||||
import '@jsonforms/core/src/models/jsonSchema7';
|
||||
|
||||
declare module '@jsonforms/core/lib/models/jsonSchema4' {
|
||||
interface JsonSchema4 {
|
||||
i18n?: string;
|
||||
}
|
||||
}
|
||||
|
||||
declare module '@jsonforms/core/lib/models/jsonSchema7' {
|
||||
interface JsonSchema7 {
|
||||
i18n?: string;
|
||||
}
|
||||
}
|
||||
|
||||
declare module '@jsonforms/core/src/models/jsonSchema4' {
|
||||
interface JsonSchema4 {
|
||||
i18n?: string;
|
||||
}
|
||||
}
|
||||
|
||||
declare module '@jsonforms/core/src/models/jsonSchema7' {
|
||||
interface JsonSchema7 {
|
||||
i18n?: string;
|
||||
}
|
||||
}
|
||||
|
||||
declare module '@jsonforms/core/lib/models/jsonSchema4.js' {
|
||||
interface JsonSchema4 {
|
||||
i18n?: string;
|
||||
}
|
||||
}
|
||||
|
||||
declare module '@jsonforms/core/lib/models/jsonSchema7.js' {
|
||||
interface JsonSchema7 {
|
||||
i18n?: string;
|
||||
}
|
||||
}
|
||||
@@ -6,8 +6,7 @@ import { AuthZGuard } from 'nest-authz';
|
||||
import request from 'supertest';
|
||||
import { afterAll, beforeAll, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { loadDynamixConfigFile } from '@app/store/actions/load-dynamix-config-file.js';
|
||||
import { store } from '@app/store/index.js';
|
||||
import { loadDynamixConfig, store } from '@app/store/index.js';
|
||||
import { loadStateFiles } from '@app/store/modules/emhttp.js';
|
||||
import { AppModule } from '@app/unraid-api/app/app.module.js';
|
||||
import { AuthService } from '@app/unraid-api/auth/auth.service.js';
|
||||
@@ -111,8 +110,8 @@ describe('AppModule Integration Tests', () => {
|
||||
|
||||
beforeAll(async () => {
|
||||
// Initialize the dynamix config and state files before creating the module
|
||||
await store.dispatch(loadDynamixConfigFile());
|
||||
await store.dispatch(loadStateFiles());
|
||||
loadDynamixConfig();
|
||||
|
||||
// Debug: Log the CSRF token from the store
|
||||
const { getters } = await import('@app/store/index.js');
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { CacheModule } from '@nestjs/cache-manager';
|
||||
import { ConfigModule } from '@nestjs/config';
|
||||
import { Test } from '@nestjs/testing';
|
||||
|
||||
import { describe, expect, it } from 'vitest';
|
||||
@@ -10,7 +11,11 @@ describe('Module Dependencies Integration', () => {
|
||||
let module;
|
||||
try {
|
||||
module = await Test.createTestingModule({
|
||||
imports: [CacheModule.register({ isGlobal: true }), RestModule],
|
||||
imports: [
|
||||
ConfigModule.forRoot({ ignoreEnvFile: true, isGlobal: true }),
|
||||
CacheModule.register({ isGlobal: true }),
|
||||
RestModule,
|
||||
],
|
||||
}).compile();
|
||||
|
||||
expect(module).toBeDefined();
|
||||
|
||||
@@ -8,6 +8,7 @@ import { AuthService } from '@app/unraid-api/auth/auth.service.js';
|
||||
import { CasbinModule } from '@app/unraid-api/auth/casbin/casbin.module.js';
|
||||
import { CasbinService } from '@app/unraid-api/auth/casbin/casbin.service.js';
|
||||
import { BASE_POLICY, CASBIN_MODEL } from '@app/unraid-api/auth/casbin/index.js';
|
||||
import { resolveSubjectFromUser } from '@app/unraid-api/auth/casbin/resolve-subject.util.js';
|
||||
import { CookieService, SESSION_COOKIE_CONFIG } from '@app/unraid-api/auth/cookie.service.js';
|
||||
import { UserCookieStrategy } from '@app/unraid-api/auth/cookie.strategy.js';
|
||||
import { ServerHeaderStrategy } from '@app/unraid-api/auth/header.strategy.js';
|
||||
@@ -28,6 +29,7 @@ import { getRequest } from '@app/utils.js';
|
||||
CasbinModule,
|
||||
AuthZModule.register({
|
||||
imports: [CasbinModule],
|
||||
enablePossession: false,
|
||||
enforcerProvider: {
|
||||
provide: AUTHZ_ENFORCER,
|
||||
useFactory: async (casbinService: CasbinService) => {
|
||||
@@ -40,13 +42,7 @@ import { getRequest } from '@app/utils.js';
|
||||
|
||||
try {
|
||||
const request = getRequest(ctx);
|
||||
const roles = request?.user?.roles || [];
|
||||
|
||||
if (!Array.isArray(roles)) {
|
||||
throw new UnauthorizedException('User roles must be an array');
|
||||
}
|
||||
|
||||
return roles.join(',');
|
||||
return resolveSubjectFromUser(request?.user);
|
||||
} catch (error) {
|
||||
logger.error('Failed to extract user context', error);
|
||||
throw new UnauthorizedException('Failed to authenticate user');
|
||||
|
||||
133
api/src/unraid-api/auth/casbin/authz.guard.integration.spec.ts
Normal file
133
api/src/unraid-api/auth/casbin/authz.guard.integration.spec.ts
Normal file
@@ -0,0 +1,133 @@
|
||||
import { ExecutionContext, Type } from '@nestjs/common';
|
||||
import { Reflector } from '@nestjs/core';
|
||||
import { ExecutionContextHost } from '@nestjs/core/helpers/execution-context-host.js';
|
||||
|
||||
import type { Enforcer } from 'casbin';
|
||||
import { AuthAction, Resource, Role } from '@unraid/shared/graphql.model.js';
|
||||
import { AuthZGuard, BatchApproval } from 'nest-authz';
|
||||
import { beforeAll, describe, expect, it } from 'vitest';
|
||||
|
||||
import { CasbinService } from '@app/unraid-api/auth/casbin/casbin.service.js';
|
||||
import { CASBIN_MODEL } from '@app/unraid-api/auth/casbin/model.js';
|
||||
import { BASE_POLICY } from '@app/unraid-api/auth/casbin/policy.js';
|
||||
import { resolveSubjectFromUser } from '@app/unraid-api/auth/casbin/resolve-subject.util.js';
|
||||
import { DockerMutationsResolver } from '@app/unraid-api/graph/resolvers/docker/docker.mutations.resolver.js';
|
||||
import { DockerResolver } from '@app/unraid-api/graph/resolvers/docker/docker.resolver.js';
|
||||
import { VmMutationsResolver } from '@app/unraid-api/graph/resolvers/vms/vms.mutations.resolver.js';
|
||||
import { MeResolver } from '@app/unraid-api/graph/user/user.resolver.js';
|
||||
import { getRequest } from '@app/utils.js';
|
||||
|
||||
type Handler = (...args: any[]) => unknown;
|
||||
|
||||
type TestUser = {
|
||||
id?: string;
|
||||
roles?: Role[];
|
||||
};
|
||||
|
||||
type TestRequest = {
|
||||
user?: TestUser;
|
||||
};
|
||||
|
||||
function createExecutionContext(
|
||||
handler: Handler,
|
||||
classRef: Type<unknown> | null,
|
||||
roles: Role[],
|
||||
userId = 'api-key-viewer'
|
||||
): ExecutionContext {
|
||||
const request: TestRequest = {
|
||||
user: {
|
||||
id: userId,
|
||||
roles: [...roles],
|
||||
},
|
||||
};
|
||||
|
||||
const graphqlContextHost = new ExecutionContextHost(
|
||||
[undefined, undefined, { req: request }, undefined],
|
||||
classRef,
|
||||
handler
|
||||
);
|
||||
|
||||
graphqlContextHost.setType('graphql');
|
||||
|
||||
return graphqlContextHost as unknown as ExecutionContext;
|
||||
}
|
||||
|
||||
describe('AuthZGuard + Casbin policies', () => {
|
||||
let guard: AuthZGuard;
|
||||
let enforcer: Enforcer;
|
||||
|
||||
beforeAll(async () => {
|
||||
const casbinService = new CasbinService();
|
||||
enforcer = await casbinService.initializeEnforcer(CASBIN_MODEL, BASE_POLICY);
|
||||
|
||||
await enforcer.addGroupingPolicy('api-key-viewer', Role.VIEWER);
|
||||
await enforcer.addGroupingPolicy('api-key-admin', Role.ADMIN);
|
||||
|
||||
guard = new AuthZGuard(new Reflector(), enforcer, {
|
||||
enablePossession: false,
|
||||
batchApproval: BatchApproval.ALL,
|
||||
userFromContext: (ctx: ExecutionContext) => {
|
||||
const request = getRequest(ctx) as TestRequest | undefined;
|
||||
|
||||
return resolveSubjectFromUser(request?.user);
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('denies viewer role from stopping docker containers', async () => {
|
||||
const context = createExecutionContext(
|
||||
DockerMutationsResolver.prototype.stop,
|
||||
DockerMutationsResolver,
|
||||
[Role.VIEWER],
|
||||
'api-key-viewer'
|
||||
);
|
||||
|
||||
await expect(guard.canActivate(context)).resolves.toBe(false);
|
||||
});
|
||||
|
||||
it('allows admin role to stop docker containers', async () => {
|
||||
const context = createExecutionContext(
|
||||
DockerMutationsResolver.prototype.stop,
|
||||
DockerMutationsResolver,
|
||||
[Role.ADMIN],
|
||||
'api-key-admin'
|
||||
);
|
||||
|
||||
await expect(guard.canActivate(context)).resolves.toBe(true);
|
||||
});
|
||||
|
||||
it('denies viewer role from stopping virtual machines', async () => {
|
||||
const context = createExecutionContext(
|
||||
VmMutationsResolver.prototype.stop,
|
||||
VmMutationsResolver,
|
||||
[Role.VIEWER],
|
||||
'api-key-viewer'
|
||||
);
|
||||
|
||||
await expect(guard.canActivate(context)).resolves.toBe(false);
|
||||
});
|
||||
|
||||
it('allows viewer role to read docker data', async () => {
|
||||
const context = createExecutionContext(
|
||||
DockerResolver.prototype.containers,
|
||||
DockerResolver,
|
||||
[Role.VIEWER],
|
||||
'api-key-viewer'
|
||||
);
|
||||
|
||||
await expect(guard.canActivate(context)).resolves.toBe(true);
|
||||
});
|
||||
|
||||
it('allows API key with explicit permission to access ME resource', async () => {
|
||||
await enforcer.addPolicy('api-key-custom', Resource.ME, AuthAction.READ_ANY);
|
||||
|
||||
const context = createExecutionContext(
|
||||
MeResolver.prototype.me,
|
||||
MeResolver,
|
||||
[],
|
||||
'api-key-custom'
|
||||
);
|
||||
|
||||
await expect(guard.canActivate(context)).resolves.toBe(true);
|
||||
});
|
||||
});
|
||||
43
api/src/unraid-api/auth/casbin/resolve-subject.util.spec.ts
Normal file
43
api/src/unraid-api/auth/casbin/resolve-subject.util.spec.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import { UnauthorizedException } from '@nestjs/common';
|
||||
|
||||
import { describe, expect, it } from 'vitest';
|
||||
|
||||
import { resolveSubjectFromUser } from '@app/unraid-api/auth/casbin/resolve-subject.util.js';
|
||||
|
||||
describe('resolveSubjectFromUser', () => {
|
||||
it('returns trimmed user id when available', () => {
|
||||
const subject = resolveSubjectFromUser({ id: ' user-123 ', roles: ['viewer'] });
|
||||
|
||||
expect(subject).toBe('user-123');
|
||||
});
|
||||
|
||||
it('falls back to a single non-empty role', () => {
|
||||
const subject = resolveSubjectFromUser({ roles: [' viewer '] });
|
||||
|
||||
expect(subject).toBe('viewer');
|
||||
});
|
||||
|
||||
it('throws when role list is empty', () => {
|
||||
expect(() => resolveSubjectFromUser({ roles: [] })).toThrow(UnauthorizedException);
|
||||
});
|
||||
|
||||
it('throws when multiple roles are present', () => {
|
||||
expect(() => resolveSubjectFromUser({ roles: ['viewer', 'admin'] })).toThrow(
|
||||
UnauthorizedException
|
||||
);
|
||||
});
|
||||
|
||||
it('throws when roles is not an array', () => {
|
||||
expect(() => resolveSubjectFromUser({ roles: 'viewer' as unknown })).toThrow(
|
||||
UnauthorizedException
|
||||
);
|
||||
});
|
||||
|
||||
it('throws when role subject is blank', () => {
|
||||
expect(() => resolveSubjectFromUser({ roles: [' '] })).toThrow(UnauthorizedException);
|
||||
});
|
||||
|
||||
it('throws when user is missing', () => {
|
||||
expect(() => resolveSubjectFromUser(undefined)).toThrow(UnauthorizedException);
|
||||
});
|
||||
});
|
||||
46
api/src/unraid-api/auth/casbin/resolve-subject.util.ts
Normal file
46
api/src/unraid-api/auth/casbin/resolve-subject.util.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
import { UnauthorizedException } from '@nestjs/common';
|
||||
|
||||
type CasbinUser = {
|
||||
id?: unknown;
|
||||
roles?: unknown;
|
||||
};
|
||||
|
||||
/**
|
||||
* Determine the Casbin subject for a request user.
|
||||
*
|
||||
* Prefers a non-empty `user.id`, otherwise falls back to a single non-empty role.
|
||||
* Throws when the subject cannot be resolved.
|
||||
*/
|
||||
export function resolveSubjectFromUser(user: CasbinUser | undefined): string {
|
||||
if (!user) {
|
||||
throw new UnauthorizedException('Request user context missing');
|
||||
}
|
||||
|
||||
const roles = user.roles ?? [];
|
||||
|
||||
if (!Array.isArray(roles)) {
|
||||
throw new UnauthorizedException('User roles must be an array');
|
||||
}
|
||||
|
||||
const userId = typeof user.id === 'string' ? user.id.trim() : '';
|
||||
|
||||
if (userId.length > 0) {
|
||||
return userId;
|
||||
}
|
||||
|
||||
if (roles.length === 1) {
|
||||
const [role] = roles;
|
||||
|
||||
if (typeof role === 'string') {
|
||||
const trimmedRole = role.trim();
|
||||
|
||||
if (trimmedRole.length > 0) {
|
||||
return trimmedRole;
|
||||
}
|
||||
}
|
||||
|
||||
throw new UnauthorizedException('Role subject must be a non-empty string');
|
||||
}
|
||||
|
||||
throw new UnauthorizedException('Unable to determine subject from user context');
|
||||
}
|
||||
@@ -36,6 +36,7 @@ const mockPluginManagementService = {
|
||||
addPlugin: vi.fn(),
|
||||
addBundledPlugin: vi.fn(),
|
||||
removePlugin: vi.fn(),
|
||||
removePluginConfigOnly: vi.fn(),
|
||||
removeBundledPlugin: vi.fn(),
|
||||
plugins: [] as string[],
|
||||
};
|
||||
@@ -147,6 +148,7 @@ describe('Plugin Commands', () => {
|
||||
'@unraid/plugin-example',
|
||||
'@unraid/plugin-test'
|
||||
);
|
||||
expect(mockPluginManagementService.removePluginConfigOnly).not.toHaveBeenCalled();
|
||||
expect(mockLogger.log).toHaveBeenCalledWith('Removed plugin @unraid/plugin-example');
|
||||
expect(mockLogger.log).toHaveBeenCalledWith('Removed plugin @unraid/plugin-test');
|
||||
expect(mockApiConfigPersistence.persist).toHaveBeenCalled();
|
||||
@@ -178,9 +180,72 @@ describe('Plugin Commands', () => {
|
||||
expect(mockPluginManagementService.removePlugin).toHaveBeenCalledWith(
|
||||
'@unraid/plugin-example'
|
||||
);
|
||||
expect(mockPluginManagementService.removePluginConfigOnly).not.toHaveBeenCalled();
|
||||
expect(mockApiConfigPersistence.persist).toHaveBeenCalled();
|
||||
expect(mockRestartCommand.run).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should bypass npm uninstall when bypass flag is provided', async () => {
|
||||
mockInquirerService.prompt.mockResolvedValue({
|
||||
plugins: ['@unraid/plugin-example'],
|
||||
restart: true,
|
||||
bypassNpm: true,
|
||||
});
|
||||
|
||||
await command.run([], { restart: true, bypassNpm: true });
|
||||
|
||||
expect(mockPluginManagementService.removePluginConfigOnly).toHaveBeenCalledWith(
|
||||
'@unraid/plugin-example'
|
||||
);
|
||||
expect(mockPluginManagementService.removePlugin).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should preserve cli flags when prompt supplies plugins', async () => {
|
||||
mockInquirerService.prompt.mockResolvedValue({
|
||||
plugins: ['@unraid/plugin-example'],
|
||||
});
|
||||
|
||||
await command.run([], { restart: false, bypassNpm: true });
|
||||
|
||||
expect(mockPluginManagementService.removePluginConfigOnly).toHaveBeenCalledWith(
|
||||
'@unraid/plugin-example'
|
||||
);
|
||||
expect(mockPluginManagementService.removePlugin).not.toHaveBeenCalled();
|
||||
expect(mockRestartCommand.run).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should honor prompt restart value when cli flag not provided', async () => {
|
||||
mockInquirerService.prompt.mockResolvedValue({
|
||||
plugins: ['@unraid/plugin-example'],
|
||||
restart: false,
|
||||
});
|
||||
|
||||
await command.run([], {});
|
||||
|
||||
expect(mockPluginManagementService.removePlugin).toHaveBeenCalledWith(
|
||||
'@unraid/plugin-example'
|
||||
);
|
||||
expect(mockRestartCommand.run).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should respect passed params and skip inquirer', async () => {
|
||||
await command.run(['@unraid/plugin-example'], { restart: true, bypassNpm: false });
|
||||
|
||||
expect(mockInquirerService.prompt).not.toHaveBeenCalled();
|
||||
expect(mockPluginManagementService.removePlugin).toHaveBeenCalledWith(
|
||||
'@unraid/plugin-example'
|
||||
);
|
||||
});
|
||||
|
||||
it('should bypass npm when flag provided with passed params', async () => {
|
||||
await command.run(['@unraid/plugin-example'], { restart: true, bypassNpm: true });
|
||||
|
||||
expect(mockInquirerService.prompt).not.toHaveBeenCalled();
|
||||
expect(mockPluginManagementService.removePluginConfigOnly).toHaveBeenCalledWith(
|
||||
'@unraid/plugin-example'
|
||||
);
|
||||
expect(mockPluginManagementService.removePlugin).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('ListPluginCommand', () => {
|
||||
|
||||
@@ -74,13 +74,15 @@ export class InstallPluginCommand extends CommandRunner {
|
||||
|
||||
interface RemovePluginCommandOptions {
|
||||
plugins?: string[];
|
||||
restart: boolean;
|
||||
restart?: boolean;
|
||||
bypassNpm?: boolean;
|
||||
}
|
||||
|
||||
@SubCommand({
|
||||
name: 'remove',
|
||||
aliases: ['rm'],
|
||||
description: 'Remove plugin peer dependencies.',
|
||||
arguments: '[plugins...]',
|
||||
})
|
||||
export class RemovePluginCommand extends CommandRunner {
|
||||
constructor(
|
||||
@@ -93,9 +95,83 @@ export class RemovePluginCommand extends CommandRunner {
|
||||
super();
|
||||
}
|
||||
|
||||
async run(_passedParams: string[], options?: RemovePluginCommandOptions): Promise<void> {
|
||||
async run(passedParams: string[], options?: RemovePluginCommandOptions): Promise<void> {
|
||||
const cliBypass = options?.bypassNpm;
|
||||
const cliRestart = options?.restart;
|
||||
const mergedOptions: RemovePluginCommandOptions = {
|
||||
bypassNpm: cliBypass ?? false,
|
||||
restart: cliRestart ?? true,
|
||||
plugins: passedParams.length > 0 ? passedParams : options?.plugins,
|
||||
};
|
||||
|
||||
let resolvedOptions = mergedOptions;
|
||||
if (!mergedOptions.plugins?.length) {
|
||||
const promptOptions = await this.promptForPlugins(mergedOptions);
|
||||
if (!promptOptions) {
|
||||
return;
|
||||
}
|
||||
resolvedOptions = {
|
||||
// precedence: cli > prompt > default (fallback)
|
||||
bypassNpm: cliBypass ?? promptOptions.bypassNpm ?? mergedOptions.bypassNpm,
|
||||
restart: cliRestart ?? promptOptions.restart ?? mergedOptions.restart,
|
||||
// precedence: prompt > default (fallback)
|
||||
plugins: promptOptions.plugins ?? mergedOptions.plugins,
|
||||
};
|
||||
}
|
||||
|
||||
if (!resolvedOptions.plugins?.length) {
|
||||
this.logService.warn('No plugins selected for removal.');
|
||||
return;
|
||||
}
|
||||
|
||||
if (resolvedOptions.bypassNpm) {
|
||||
await this.pluginManagementService.removePluginConfigOnly(...resolvedOptions.plugins);
|
||||
} else {
|
||||
await this.pluginManagementService.removePlugin(...resolvedOptions.plugins);
|
||||
}
|
||||
for (const plugin of resolvedOptions.plugins) {
|
||||
this.logService.log(`Removed plugin ${plugin}`);
|
||||
}
|
||||
await this.apiConfigPersistence.persist();
|
||||
|
||||
if (resolvedOptions.restart) {
|
||||
await this.restartCommand.run();
|
||||
}
|
||||
}
|
||||
|
||||
@Option({
|
||||
flags: '--no-restart',
|
||||
description: 'do NOT restart the service after deploy',
|
||||
defaultValue: true,
|
||||
})
|
||||
parseRestart(): boolean {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Option({
|
||||
flags: '-b, --bypass-npm',
|
||||
description: 'Bypass npm uninstall and only update the config',
|
||||
defaultValue: false,
|
||||
name: 'bypassNpm',
|
||||
})
|
||||
parseBypass(): boolean {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Option({
|
||||
flags: '--npm',
|
||||
description: 'Run npm uninstall for unbundled plugins (default behavior)',
|
||||
name: 'bypassNpm',
|
||||
})
|
||||
parseRunNpm(): boolean {
|
||||
return false;
|
||||
}
|
||||
|
||||
private async promptForPlugins(
|
||||
initialOptions: RemovePluginCommandOptions
|
||||
): Promise<RemovePluginCommandOptions | undefined> {
|
||||
try {
|
||||
options = await this.inquirerService.prompt(RemovePluginQuestionSet.name, options);
|
||||
return await this.inquirerService.prompt(RemovePluginQuestionSet.name, initialOptions);
|
||||
} catch (error) {
|
||||
if (error instanceof NoPluginsFoundError) {
|
||||
this.logService.error(error.message);
|
||||
@@ -108,30 +184,6 @@ export class RemovePluginCommand extends CommandRunner {
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
if (!options.plugins || options.plugins.length === 0) {
|
||||
this.logService.warn('No plugins selected for removal.');
|
||||
return;
|
||||
}
|
||||
|
||||
await this.pluginManagementService.removePlugin(...options.plugins);
|
||||
for (const plugin of options.plugins) {
|
||||
this.logService.log(`Removed plugin ${plugin}`);
|
||||
}
|
||||
await this.apiConfigPersistence.persist();
|
||||
|
||||
if (options.restart) {
|
||||
await this.restartCommand.run();
|
||||
}
|
||||
}
|
||||
|
||||
@Option({
|
||||
flags: '--no-restart',
|
||||
description: 'do NOT restart the service after deploy',
|
||||
defaultValue: true,
|
||||
})
|
||||
parseRestart(): boolean {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -58,7 +58,8 @@ export class PM2Service {
|
||||
...(needsPathUpdate && { PATH: finalPath }),
|
||||
};
|
||||
|
||||
const runCommand = () => execa(PM2_PATH, [...args], execOptions satisfies Options);
|
||||
const pm2Args = args.some((arg) => arg === '--no-color') ? args : ['--no-color', ...args];
|
||||
const runCommand = () => execa(PM2_PATH, pm2Args, execOptions satisfies Options);
|
||||
if (raw) {
|
||||
return runCommand();
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ import type { ApiConfig } from '@unraid/shared/services/api-config.js';
|
||||
import { ConfigFilePersister } from '@unraid/shared/services/config-file.js';
|
||||
import { csvStringToArray } from '@unraid/shared/util/data.js';
|
||||
|
||||
import { isConnectPluginInstalled } from '@app/connect-plugin-cleanup.js';
|
||||
import { API_VERSION, PATHS_CONFIG_MODULES } from '@app/environment.js';
|
||||
|
||||
export { type ApiConfig };
|
||||
@@ -29,6 +30,13 @@ export const loadApiConfig = async () => {
|
||||
const apiHandler = new ApiConfigPersistence(new ConfigService()).getFileHandler();
|
||||
|
||||
const diskConfig: Partial<ApiConfig> = await apiHandler.loadConfig();
|
||||
// Hack: cleanup stale connect plugin entry if necessary
|
||||
if (!isConnectPluginInstalled()) {
|
||||
diskConfig.plugins = diskConfig.plugins?.filter(
|
||||
(plugin) => plugin !== 'unraid-api-plugin-connect'
|
||||
);
|
||||
await apiHandler.writeConfigFile(diskConfig as ApiConfig);
|
||||
}
|
||||
|
||||
return {
|
||||
...defaultConfig,
|
||||
|
||||
@@ -49,6 +49,7 @@ import { PluginModule } from '@app/unraid-api/plugin/plugin.module.js';
|
||||
extra,
|
||||
};
|
||||
},
|
||||
fieldResolverEnhancers: ['guards'],
|
||||
plugins: [
|
||||
createDynamicIntrospectionPlugin(isSandboxEnabled),
|
||||
createSandboxPlugin(),
|
||||
|
||||
@@ -12,6 +12,24 @@ import {
|
||||
createSimpleLabeledControl,
|
||||
} from '@app/unraid-api/graph/utils/form-utils.js';
|
||||
|
||||
const API_KEY_I18N = {
|
||||
name: 'jsonforms.apiKey.name',
|
||||
description: 'jsonforms.apiKey.description',
|
||||
roles: 'jsonforms.apiKey.roles',
|
||||
permissionPresets: 'jsonforms.apiKey.permissionPresets',
|
||||
customPermissions: {
|
||||
root: 'jsonforms.apiKey.customPermissions',
|
||||
resources: 'jsonforms.apiKey.customPermissions.resources',
|
||||
actions: 'jsonforms.apiKey.customPermissions.actions',
|
||||
},
|
||||
permissions: {
|
||||
header: 'jsonforms.apiKey.permissions.header',
|
||||
description: 'jsonforms.apiKey.permissions.description',
|
||||
subheader: 'jsonforms.apiKey.permissions.subheader',
|
||||
help: 'jsonforms.apiKey.permissions.help',
|
||||
},
|
||||
} as const;
|
||||
|
||||
// Helper to get GraphQL enum names for JSON Schema
|
||||
// GraphQL expects the enum names (keys) not the values
|
||||
function getAuthActionEnumNames(): string[] {
|
||||
@@ -82,6 +100,7 @@ export class ApiKeyFormService {
|
||||
properties: {
|
||||
name: {
|
||||
type: 'string',
|
||||
i18n: API_KEY_I18N.name,
|
||||
title: 'API Key Name',
|
||||
description: 'A descriptive name for this API key',
|
||||
minLength: 1,
|
||||
@@ -89,12 +108,14 @@ export class ApiKeyFormService {
|
||||
},
|
||||
description: {
|
||||
type: 'string',
|
||||
i18n: API_KEY_I18N.description,
|
||||
title: 'Description',
|
||||
description: 'Optional description of what this key is used for',
|
||||
maxLength: 500,
|
||||
},
|
||||
roles: {
|
||||
type: 'array',
|
||||
i18n: API_KEY_I18N.roles,
|
||||
title: 'Roles',
|
||||
description: 'Select one or more roles to grant pre-defined permission sets',
|
||||
items: {
|
||||
@@ -105,6 +126,7 @@ export class ApiKeyFormService {
|
||||
},
|
||||
permissionPresets: {
|
||||
type: 'string',
|
||||
i18n: API_KEY_I18N.permissionPresets,
|
||||
title: 'Add Permission Preset',
|
||||
description: 'Quick add common permission sets',
|
||||
enum: [
|
||||
@@ -119,6 +141,7 @@ export class ApiKeyFormService {
|
||||
},
|
||||
customPermissions: {
|
||||
type: 'array',
|
||||
i18n: API_KEY_I18N.customPermissions.root,
|
||||
title: 'Permissions',
|
||||
description: 'Configure specific permissions',
|
||||
items: {
|
||||
@@ -126,6 +149,7 @@ export class ApiKeyFormService {
|
||||
properties: {
|
||||
resources: {
|
||||
type: 'array',
|
||||
i18n: API_KEY_I18N.customPermissions.resources,
|
||||
title: 'Resources',
|
||||
items: {
|
||||
type: 'string',
|
||||
@@ -137,6 +161,7 @@ export class ApiKeyFormService {
|
||||
},
|
||||
actions: {
|
||||
type: 'array',
|
||||
i18n: API_KEY_I18N.customPermissions.actions,
|
||||
title: 'Actions',
|
||||
items: {
|
||||
type: 'string',
|
||||
@@ -167,6 +192,7 @@ export class ApiKeyFormService {
|
||||
controlOptions: {
|
||||
inputType: 'text',
|
||||
},
|
||||
i18nKey: API_KEY_I18N.name,
|
||||
}),
|
||||
createLabeledControl({
|
||||
scope: '#/properties/description',
|
||||
@@ -177,6 +203,7 @@ export class ApiKeyFormService {
|
||||
multi: true,
|
||||
rows: 3,
|
||||
},
|
||||
i18nKey: API_KEY_I18N.description,
|
||||
}),
|
||||
// Permissions section header
|
||||
{
|
||||
@@ -185,6 +212,7 @@ export class ApiKeyFormService {
|
||||
options: {
|
||||
format: 'title',
|
||||
},
|
||||
i18n: API_KEY_I18N.permissions.header,
|
||||
} as LabelElement,
|
||||
{
|
||||
type: 'Label',
|
||||
@@ -192,6 +220,7 @@ export class ApiKeyFormService {
|
||||
options: {
|
||||
format: 'description',
|
||||
},
|
||||
i18n: API_KEY_I18N.permissions.description,
|
||||
} as LabelElement,
|
||||
// Roles selection
|
||||
createLabeledControl({
|
||||
@@ -210,6 +239,7 @@ export class ApiKeyFormService {
|
||||
),
|
||||
descriptions: this.getRoleDescriptions(),
|
||||
},
|
||||
i18nKey: API_KEY_I18N.roles,
|
||||
}),
|
||||
// Separator for permissions
|
||||
{
|
||||
@@ -218,6 +248,7 @@ export class ApiKeyFormService {
|
||||
options: {
|
||||
format: 'subtitle',
|
||||
},
|
||||
i18n: API_KEY_I18N.permissions.subheader,
|
||||
} as LabelElement,
|
||||
{
|
||||
type: 'Label',
|
||||
@@ -225,6 +256,7 @@ export class ApiKeyFormService {
|
||||
options: {
|
||||
format: 'description',
|
||||
},
|
||||
i18n: API_KEY_I18N.permissions.help,
|
||||
} as LabelElement,
|
||||
// Permission preset dropdown
|
||||
createLabeledControl({
|
||||
@@ -242,6 +274,7 @@ export class ApiKeyFormService {
|
||||
network_admin: 'Network Admin (Network & Services Control)',
|
||||
},
|
||||
},
|
||||
i18nKey: API_KEY_I18N.permissionPresets,
|
||||
}),
|
||||
// Custom permissions array - following OIDC pattern exactly
|
||||
{
|
||||
@@ -269,6 +302,7 @@ export class ApiKeyFormService {
|
||||
{}
|
||||
),
|
||||
},
|
||||
i18nKey: API_KEY_I18N.customPermissions.resources,
|
||||
}),
|
||||
createSimpleLabeledControl({
|
||||
scope: '#/properties/actions',
|
||||
@@ -278,6 +312,7 @@ export class ApiKeyFormService {
|
||||
multiple: true,
|
||||
labels: getAuthActionLabels(),
|
||||
},
|
||||
i18nKey: API_KEY_I18N.customPermissions.actions,
|
||||
}),
|
||||
],
|
||||
},
|
||||
|
||||
@@ -3,6 +3,7 @@ import { Test, TestingModule } from '@nestjs/testing';
|
||||
import * as fs from 'fs/promises';
|
||||
import * as path from 'path';
|
||||
|
||||
import type { Mock } from 'vitest';
|
||||
import { plainToInstance } from 'class-transformer';
|
||||
import * as ini from 'ini';
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
@@ -1182,4 +1183,58 @@ describe('CustomizationService - updateCfgFile', () => {
|
||||
writeError
|
||||
);
|
||||
});
|
||||
|
||||
describe('getTheme', () => {
|
||||
const mockDynamix = getters.dynamix as unknown as Mock;
|
||||
const baseDisplay = {
|
||||
theme: 'white',
|
||||
banner: '',
|
||||
showBannerGradient: 'no',
|
||||
background: '123456',
|
||||
headerdescription: 'yes',
|
||||
headermetacolor: '789abc',
|
||||
header: 'abcdef',
|
||||
};
|
||||
|
||||
const setDisplay = (overrides: Partial<typeof baseDisplay>) => {
|
||||
mockDynamix.mockReturnValue({
|
||||
display: {
|
||||
...baseDisplay,
|
||||
...overrides,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
it('reports showBannerImage when banner is "image"', async () => {
|
||||
setDisplay({ banner: 'image' });
|
||||
|
||||
const theme = await service.getTheme();
|
||||
|
||||
expect(theme.showBannerImage).toBe(true);
|
||||
});
|
||||
|
||||
it('reports showBannerImage when banner is "yes"', async () => {
|
||||
setDisplay({ banner: 'yes' });
|
||||
|
||||
const theme = await service.getTheme();
|
||||
|
||||
expect(theme.showBannerImage).toBe(true);
|
||||
});
|
||||
|
||||
it('disables showBannerImage when banner is empty', async () => {
|
||||
setDisplay({ banner: '' });
|
||||
|
||||
const theme = await service.getTheme();
|
||||
|
||||
expect(theme.showBannerImage).toBe(false);
|
||||
});
|
||||
|
||||
it('mirrors showBannerGradient flag from display settings', async () => {
|
||||
setDisplay({ banner: 'image', showBannerGradient: 'yes' });
|
||||
expect((await service.getTheme()).showBannerGradient).toBe(true);
|
||||
|
||||
setDisplay({ banner: 'image', showBannerGradient: 'no' });
|
||||
expect((await service.getTheme()).showBannerGradient).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -458,7 +458,7 @@ export class CustomizationService implements OnModuleInit {
|
||||
|
||||
return {
|
||||
name,
|
||||
showBannerImage: banner === 'yes',
|
||||
showBannerImage: banner === 'image' || banner === 'yes',
|
||||
showBannerGradient: bannerGradient === 'yes',
|
||||
headerBackgroundColor: this.addHashtoHexField(bgColor),
|
||||
headerPrimaryTextColor: this.addHashtoHexField(textColor),
|
||||
|
||||
@@ -0,0 +1,233 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { constants as fsConstants } from 'node:fs';
|
||||
import { access, readdir, readFile } from 'node:fs/promises';
|
||||
import { join } from 'path';
|
||||
|
||||
@Injectable()
|
||||
export class CpuTopologyService {
|
||||
private readonly logger = new Logger(CpuTopologyService.name);
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
// Read static CPU topology, per-package core thread pairs
|
||||
// -----------------------------------------------------------------
|
||||
async generateTopology(): Promise<number[][][]> {
|
||||
const packages: Record<number, number[][]> = {};
|
||||
let cpuDirs: string[];
|
||||
|
||||
try {
|
||||
cpuDirs = await readdir('/sys/devices/system/cpu');
|
||||
} catch (err) {
|
||||
this.logger.warn('CPU topology unavailable, /sys/devices/system/cpu not accessible');
|
||||
return [];
|
||||
}
|
||||
|
||||
for (const dir of cpuDirs) {
|
||||
if (!/^cpu\d+$/.test(dir)) continue;
|
||||
|
||||
const basePath = join('/sys/devices/system/cpu', dir, 'topology');
|
||||
const pkgFile = join(basePath, 'physical_package_id');
|
||||
const siblingsFile = join(basePath, 'thread_siblings_list');
|
||||
|
||||
try {
|
||||
const [pkgIdStr, siblingsStrRaw] = await Promise.all([
|
||||
readFile(pkgFile, 'utf8'),
|
||||
readFile(siblingsFile, 'utf8'),
|
||||
]);
|
||||
|
||||
const pkgId = parseInt(pkgIdStr.trim(), 10);
|
||||
|
||||
// expand ranges
|
||||
const siblings = siblingsStrRaw
|
||||
.trim()
|
||||
.replace(/(\d+)-(\d+)/g, (_, start, end) =>
|
||||
Array.from(
|
||||
{ length: parseInt(end) - parseInt(start) + 1 },
|
||||
(_, i) => parseInt(start) + i
|
||||
).join(',')
|
||||
)
|
||||
.split(',')
|
||||
.map((n) => parseInt(n, 10));
|
||||
|
||||
if (!packages[pkgId]) packages[pkgId] = [];
|
||||
if (!packages[pkgId].some((arr) => arr.join(',') === siblings.join(','))) {
|
||||
packages[pkgId].push(siblings);
|
||||
}
|
||||
} catch (err) {
|
||||
this.logger.warn(err, `Topology read error for ${dir}`);
|
||||
}
|
||||
}
|
||||
// Sort cores within each package, and packages by their lowest core index
|
||||
const result = Object.entries(packages)
|
||||
.sort((a, b) => a[1][0][0] - b[1][0][0]) // sort packages by first CPU ID
|
||||
.map(
|
||||
([pkgId, cores]) => cores.sort((a, b) => a[0] - b[0]) // sort cores within package
|
||||
);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
// Dynamic telemetry (power + temperature)
|
||||
// -----------------------------------------------------------------
|
||||
private async getPackageTemps(): Promise<number[]> {
|
||||
const temps: number[] = [];
|
||||
try {
|
||||
const hwmons = await readdir('/sys/class/hwmon');
|
||||
for (const hwmon of hwmons) {
|
||||
const path = join('/sys/class/hwmon', hwmon);
|
||||
try {
|
||||
const label = (await readFile(join(path, 'name'), 'utf8')).trim();
|
||||
if (/coretemp|k10temp|zenpower/i.test(label)) {
|
||||
const files = await readdir(path);
|
||||
for (const f of files) {
|
||||
if (f.startsWith('temp') && f.endsWith('_label')) {
|
||||
const lbl = (await readFile(join(path, f), 'utf8')).trim().toLowerCase();
|
||||
if (
|
||||
lbl.includes('package id') ||
|
||||
lbl.includes('tctl') ||
|
||||
lbl.includes('tdie')
|
||||
) {
|
||||
const inputFile = join(path, f.replace('_label', '_input'));
|
||||
try {
|
||||
const raw = await readFile(inputFile, 'utf8');
|
||||
const parsed = parseInt(raw.trim(), 10);
|
||||
if (Number.isFinite(parsed)) {
|
||||
temps.push(parsed / 1000);
|
||||
} else {
|
||||
this.logger.warn(`Invalid temperature value: ${raw.trim()}`);
|
||||
}
|
||||
} catch (err) {
|
||||
this.logger.warn('Failed to read file', err);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
this.logger.warn('Failed to read file', err);
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
this.logger.warn('Failed to read file', err);
|
||||
}
|
||||
return temps;
|
||||
}
|
||||
|
||||
private async getPackagePower(): Promise<Record<number, Record<string, number>>> {
|
||||
const basePath = '/sys/class/powercap';
|
||||
const prefixes = ['intel-rapl', 'intel-rapl-mmio', 'amd-rapl'];
|
||||
const raplPaths: string[] = [];
|
||||
|
||||
try {
|
||||
const entries = await readdir(basePath, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
if (entry.isSymbolicLink() && prefixes.some((p) => entry.name.startsWith(p))) {
|
||||
if (/:\d+:\d+/.test(entry.name)) continue;
|
||||
raplPaths.push(join(basePath, entry.name));
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
return {};
|
||||
}
|
||||
|
||||
if (!raplPaths.length) return {};
|
||||
|
||||
const readEnergy = async (p: string): Promise<number | null> => {
|
||||
try {
|
||||
await access(join(p, 'energy_uj'), fsConstants.R_OK);
|
||||
const raw = await readFile(join(p, 'energy_uj'), 'utf8');
|
||||
const parsed = parseInt(raw.trim(), 10);
|
||||
return Number.isFinite(parsed) ? parsed : null;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
const prevE = new Map<string, number>();
|
||||
const prevT = new Map<string, bigint>();
|
||||
|
||||
for (const p of raplPaths) {
|
||||
const val = await readEnergy(p);
|
||||
if (val !== null) {
|
||||
prevE.set(p, val);
|
||||
prevT.set(p, process.hrtime.bigint());
|
||||
}
|
||||
}
|
||||
|
||||
await new Promise((res) => setTimeout(res, 100));
|
||||
|
||||
const results: Record<number, Record<string, number>> = {};
|
||||
|
||||
for (const p of raplPaths) {
|
||||
const now = await readEnergy(p);
|
||||
if (now === null) continue;
|
||||
|
||||
const prevVal = prevE.get(p);
|
||||
const prevTime = prevT.get(p);
|
||||
if (prevVal === undefined || prevTime === undefined) continue;
|
||||
|
||||
const diffE = now - prevVal;
|
||||
const diffT = Number(process.hrtime.bigint() - prevTime);
|
||||
|
||||
if (!Number.isFinite(diffE) || !Number.isFinite(diffT)) {
|
||||
this.logger.warn(`Non-finite energy/time diff for ${p}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (diffT <= 0 || diffE < 0) continue;
|
||||
|
||||
const watts = (diffE * 1e-6) / (diffT * 1e-9);
|
||||
const powerW = Math.round(watts * 100) / 100;
|
||||
|
||||
if (!Number.isFinite(powerW)) {
|
||||
this.logger.warn(`Non-finite power value for ${p}: ${watts}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
const nameFile = join(p, 'name');
|
||||
let label = 'package';
|
||||
try {
|
||||
label = (await readFile(nameFile, 'utf8')).trim();
|
||||
} catch (err) {
|
||||
this.logger.warn('Failed to read file', err);
|
||||
}
|
||||
|
||||
const pkgMatch = label.match(/package-(\d+)/i);
|
||||
const pkgId = pkgMatch ? Number(pkgMatch[1]) : 0;
|
||||
|
||||
if (!results[pkgId]) results[pkgId] = {};
|
||||
results[pkgId][label] = powerW;
|
||||
}
|
||||
|
||||
for (const domains of Object.values(results)) {
|
||||
const total = Object.values(domains).reduce((a, b) => a + b, 0);
|
||||
domains['total'] = Math.round(total * 100) / 100;
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
async generateTelemetry(): Promise<{ id: number; power: number; temp: number }[]> {
|
||||
const temps = await this.getPackageTemps();
|
||||
const powerData = await this.getPackagePower();
|
||||
|
||||
const maxPkg = Math.max(temps.length - 1, ...Object.keys(powerData).map(Number), 0);
|
||||
|
||||
const result: {
|
||||
id: number;
|
||||
power: number;
|
||||
temp: number;
|
||||
}[] = [];
|
||||
|
||||
for (let pkgId = 0; pkgId <= maxPkg; pkgId++) {
|
||||
const entry = powerData[pkgId] ?? {};
|
||||
result.push({
|
||||
id: pkgId,
|
||||
power: entry.total ?? -1,
|
||||
temp: temps[pkgId] ?? -1,
|
||||
});
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
@@ -39,6 +39,18 @@ export class CpuLoad {
|
||||
percentSteal!: number;
|
||||
}
|
||||
|
||||
@ObjectType({ implements: () => Node })
|
||||
export class CpuPackages extends Node {
|
||||
@Field(() => Float, { description: 'Total CPU package power draw (W)' })
|
||||
totalPower!: number;
|
||||
|
||||
@Field(() => [Float], { description: 'Power draw per package (W)' })
|
||||
power!: number[];
|
||||
|
||||
@Field(() => [Float], { description: 'Temperature per package (°C)' })
|
||||
temp!: number[];
|
||||
}
|
||||
|
||||
@ObjectType({ implements: () => Node })
|
||||
export class CpuUtilization extends Node {
|
||||
@Field(() => Float, { description: 'Total CPU load in percent' })
|
||||
@@ -100,4 +112,12 @@ export class InfoCpu extends Node {
|
||||
|
||||
@Field(() => [String], { nullable: true, description: 'CPU feature flags' })
|
||||
flags?: string[];
|
||||
|
||||
@Field(() => [[[Int]]], {
|
||||
description: 'Per-package array of core/thread pairs, e.g. [[[0,1],[2,3]], [[4,5],[6,7]]]',
|
||||
})
|
||||
topology!: number[][][];
|
||||
|
||||
@Field(() => CpuPackages)
|
||||
packages!: CpuPackages;
|
||||
}
|
||||
|
||||
10
api/src/unraid-api/graph/resolvers/info/cpu/cpu.module.ts
Normal file
10
api/src/unraid-api/graph/resolvers/info/cpu/cpu.module.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
|
||||
import { CpuTopologyService } from '@app/unraid-api/graph/resolvers/info/cpu/cpu-topology.service.js';
|
||||
import { CpuService } from '@app/unraid-api/graph/resolvers/info/cpu/cpu.service.js';
|
||||
|
||||
@Module({
|
||||
providers: [CpuService, CpuTopologyService],
|
||||
exports: [CpuService, CpuTopologyService],
|
||||
})
|
||||
export class CpuModule {}
|
||||
@@ -1,5 +1,6 @@
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { CpuTopologyService } from '@app/unraid-api/graph/resolvers/info/cpu/cpu-topology.service.js';
|
||||
import { CpuService } from '@app/unraid-api/graph/resolvers/info/cpu/cpu.service.js';
|
||||
|
||||
vi.mock('systeminformation', () => ({
|
||||
@@ -88,9 +89,27 @@ vi.mock('systeminformation', () => ({
|
||||
|
||||
describe('CpuService', () => {
|
||||
let service: CpuService;
|
||||
let cpuTopologyService: CpuTopologyService;
|
||||
|
||||
beforeEach(() => {
|
||||
service = new CpuService();
|
||||
cpuTopologyService = {
|
||||
generateTopology: vi.fn().mockResolvedValue([
|
||||
[
|
||||
[0, 1],
|
||||
[2, 3],
|
||||
],
|
||||
[
|
||||
[4, 5],
|
||||
[6, 7],
|
||||
],
|
||||
]),
|
||||
generateTelemetry: vi.fn().mockResolvedValue([
|
||||
{ power: 32.5, temp: 45.0 },
|
||||
{ power: 33.0, temp: 46.0 },
|
||||
]),
|
||||
} as unknown as CpuTopologyService;
|
||||
|
||||
service = new CpuService(cpuTopologyService);
|
||||
});
|
||||
|
||||
describe('generateCpu', () => {
|
||||
@@ -121,6 +140,22 @@ describe('CpuService', () => {
|
||||
l3: 12582912,
|
||||
},
|
||||
flags: ['fpu', 'vme', 'de', 'pse', 'tsc', 'msr', 'pae', 'mce', 'cx8'],
|
||||
packages: {
|
||||
id: 'info/cpu/packages',
|
||||
totalPower: 65.5,
|
||||
power: [32.5, 33.0],
|
||||
temp: [45.0, 46.0],
|
||||
},
|
||||
topology: [
|
||||
[
|
||||
[0, 1],
|
||||
[2, 3],
|
||||
],
|
||||
[
|
||||
[4, 5],
|
||||
[6, 7],
|
||||
],
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -2,25 +2,56 @@ import { Injectable } from '@nestjs/common';
|
||||
|
||||
import { cpu, cpuFlags, currentLoad } from 'systeminformation';
|
||||
|
||||
import { CpuUtilization, InfoCpu } from '@app/unraid-api/graph/resolvers/info/cpu/cpu.model.js';
|
||||
import { CpuTopologyService } from '@app/unraid-api/graph/resolvers/info/cpu/cpu-topology.service.js';
|
||||
import {
|
||||
CpuPackages,
|
||||
CpuUtilization,
|
||||
InfoCpu,
|
||||
} from '@app/unraid-api/graph/resolvers/info/cpu/cpu.model.js';
|
||||
|
||||
@Injectable()
|
||||
export class CpuService {
|
||||
constructor(private readonly cpuTopologyService: CpuTopologyService) {}
|
||||
|
||||
async generateCpu(): Promise<InfoCpu> {
|
||||
const { cores, physicalCores, speedMin, speedMax, stepping, ...rest } = await cpu();
|
||||
const { cores, physicalCores, speedMin, speedMax, stepping, processors, ...rest } = await cpu();
|
||||
const flags = await cpuFlags()
|
||||
.then((flags) => flags.split(' '))
|
||||
.then((f) => f.split(' '))
|
||||
.catch(() => []);
|
||||
|
||||
// Gather telemetry
|
||||
const packageList = await this.cpuTopologyService.generateTelemetry();
|
||||
const topology = await this.cpuTopologyService.generateTopology();
|
||||
|
||||
// Compute total power (2 decimals)
|
||||
const totalPower = Number(
|
||||
packageList
|
||||
.map((pkg) => pkg.power)
|
||||
.filter((power) => power >= 0)
|
||||
.reduce((sum, power) => sum + power, 0)
|
||||
.toFixed(2)
|
||||
);
|
||||
|
||||
// Build CpuPackages object
|
||||
const packages: CpuPackages = {
|
||||
id: 'info/cpu/packages',
|
||||
totalPower,
|
||||
power: packageList.map((pkg) => pkg.power ?? -1),
|
||||
temp: packageList.map((pkg) => pkg.temp ?? -1),
|
||||
};
|
||||
|
||||
return {
|
||||
id: 'info/cpu',
|
||||
...rest,
|
||||
cores: physicalCores,
|
||||
threads: cores,
|
||||
processors,
|
||||
flags,
|
||||
stepping: Number(stepping),
|
||||
speedmin: speedMin || -1,
|
||||
speedmax: speedMax || -1,
|
||||
packages,
|
||||
topology,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
import { ConfigModule } from '@nestjs/config';
|
||||
|
||||
import { CpuModule } from '@app/unraid-api/graph/resolvers/info/cpu/cpu.module.js';
|
||||
import { CpuService } from '@app/unraid-api/graph/resolvers/info/cpu/cpu.service.js';
|
||||
import { DevicesResolver } from '@app/unraid-api/graph/resolvers/info/devices/devices.resolver.js';
|
||||
import { DevicesService } from '@app/unraid-api/graph/resolvers/info/devices/devices.service.js';
|
||||
@@ -14,7 +15,7 @@ import { VersionsService } from '@app/unraid-api/graph/resolvers/info/versions/v
|
||||
import { ServicesModule } from '@app/unraid-api/graph/services/services.module.js';
|
||||
|
||||
@Module({
|
||||
imports: [ConfigModule, ServicesModule],
|
||||
imports: [ConfigModule, ServicesModule, CpuModule],
|
||||
providers: [
|
||||
// Main resolver
|
||||
InfoResolver,
|
||||
@@ -25,7 +26,6 @@ import { ServicesModule } from '@app/unraid-api/graph/services/services.module.j
|
||||
CoreVersionsResolver,
|
||||
|
||||
// Services
|
||||
CpuService,
|
||||
MemoryService,
|
||||
DevicesService,
|
||||
OsService,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user