mirror of
https://github.com/unraid/api.git
synced 2026-01-02 14:40:01 -06:00
Compare commits
164 Commits
v4.23.0
...
refactor/r
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4ee5506266 | ||
|
|
86ec429079 | ||
|
|
bff05e0c5d | ||
|
|
18f3227d9e | ||
|
|
9504b4a8c5 | ||
|
|
b0ff85b2af | ||
|
|
1c4f33e4fb | ||
|
|
00f015f1b4 | ||
|
|
443335ed8e | ||
|
|
ad2a5cc443 | ||
|
|
9ed71e5df9 | ||
|
|
8ef00ce27f | ||
|
|
716789fa00 | ||
|
|
b456678fd8 | ||
|
|
902307ae55 | ||
|
|
2f88228937 | ||
|
|
5628bf9a0e | ||
|
|
3c9c04d684 | ||
|
|
a8578bf79b | ||
|
|
88e76d7fa2 | ||
|
|
1e5deccb70 | ||
|
|
4c49bafe15 | ||
|
|
17f0176857 | ||
|
|
4abc79a7b5 | ||
|
|
e817d6a317 | ||
|
|
c2d2fbea40 | ||
|
|
ad78a0251a | ||
|
|
c18de73b29 | ||
|
|
e61657c05b | ||
|
|
7969e44211 | ||
|
|
fd1a04641a | ||
|
|
d10e0540eb | ||
|
|
6458457a7f | ||
|
|
53ee465e92 | ||
|
|
4c42b4b810 | ||
|
|
4a19d4d536 | ||
|
|
ddd6b0b54f | ||
|
|
2122e6d8a2 | ||
|
|
e3bf57184b | ||
|
|
6a04a06a72 | ||
|
|
875aa0972a | ||
|
|
f7fe9597ef | ||
|
|
f690c23a20 | ||
|
|
fe74e53cf5 | ||
|
|
be1e2f5ee2 | ||
|
|
67ac30787f | ||
|
|
c21ecc7cb7 | ||
|
|
1000976296 | ||
|
|
9f27eb787f | ||
|
|
80bbf2dd6b | ||
|
|
4798c89a5f | ||
|
|
88766adeea | ||
|
|
e80ea795fe | ||
|
|
7679d71c3e | ||
|
|
a8459c7431 | ||
|
|
29312d78e6 | ||
|
|
a32dd2182a | ||
|
|
d07aa42063 | ||
|
|
9ef1cf1eca | ||
|
|
a0745e15ca | ||
|
|
c39b0b267c | ||
|
|
73135b8328 | ||
|
|
e42d619b6d | ||
|
|
560db880cc | ||
|
|
d6055f102b | ||
|
|
d099e7521d | ||
|
|
bb9b539732 | ||
|
|
0e44e73bf7 | ||
|
|
277ac42046 | ||
|
|
e1e3ea7eb6 | ||
|
|
8b155d1f1c | ||
|
|
d13a1f6174 | ||
|
|
e243ae836e | ||
|
|
01a63fd86b | ||
|
|
df78608457 | ||
|
|
ca3bee4ad5 | ||
|
|
024ae69343 | ||
|
|
99ce88bfdc | ||
|
|
73b2ce360c | ||
|
|
d6e29395c8 | ||
|
|
317e0fa307 | ||
|
|
331c913329 | ||
|
|
abf3461348 | ||
|
|
079a09ec90 | ||
|
|
e4223ab5a1 | ||
|
|
6f54206a4a | ||
|
|
e35bcc72f1 | ||
|
|
74df938e45 | ||
|
|
51f025b105 | ||
|
|
23a71207dd | ||
|
|
832e9d04f2 | ||
|
|
31af99e52f | ||
|
|
933cefa020 | ||
|
|
375dcd0598 | ||
|
|
64875edbba | ||
|
|
330e81a484 | ||
|
|
b8f0fdf8d2 | ||
|
|
36c104915e | ||
|
|
dc9a036c73 | ||
|
|
c71b0487ad | ||
|
|
e7340431a5 | ||
|
|
e4a9b8291b | ||
|
|
6b6b78fa2e | ||
|
|
e2fdf6cadb | ||
|
|
3d4f193fa4 | ||
|
|
b28ef1ea33 | ||
|
|
ee0f240233 | ||
|
|
3aacaa1fb5 | ||
|
|
0cd4c0ae16 | ||
|
|
66625ded6a | ||
|
|
f8a6785e9c | ||
|
|
d7aca81c60 | ||
|
|
854b403fbd | ||
|
|
c264a1843c | ||
|
|
45cda4af80 | ||
|
|
64eb9ce9b5 | ||
|
|
d56797c59f | ||
|
|
92af3b6115 | ||
|
|
35f8bc2258 | ||
|
|
c4cd0c6352 | ||
|
|
818e7ce997 | ||
|
|
7e13202aa1 | ||
|
|
d18eaf2364 | ||
|
|
42406e795d | ||
|
|
11d2de5d08 | ||
|
|
031c1ab5dc | ||
|
|
34075e44c5 | ||
|
|
ff2906e52a | ||
|
|
a0d6cc92c8 | ||
|
|
57acfaacf0 | ||
|
|
ea816c7a5c | ||
|
|
cafde72d38 | ||
|
|
2b481c397c | ||
|
|
8c4e9dd7ae | ||
|
|
f212dce88b | ||
|
|
8cd2a4c124 | ||
|
|
10f048ee1f | ||
|
|
e9e271ade5 | ||
|
|
31c41027fc | ||
|
|
fabe6a2c4b | ||
|
|
754966d5d3 | ||
|
|
ed594e9147 | ||
|
|
50d83313a1 | ||
|
|
e57ec00627 | ||
|
|
84f4a7221d | ||
|
|
d73953f8ff | ||
|
|
0d165a6087 | ||
|
|
f4f3e3c44b | ||
|
|
cd5eff11bc | ||
|
|
7bdeca8338 | ||
|
|
661865f976 | ||
|
|
b7afaf4632 | ||
|
|
b3ca40c639 | ||
|
|
378cdb7f10 | ||
|
|
d9c561bfeb | ||
|
|
9972a5f178 | ||
|
|
a44473c1d1 | ||
|
|
ed9a5c5ff9 | ||
|
|
d8b166e4b6 | ||
|
|
8b862ecef5 | ||
|
|
16913627de | ||
|
|
6b2f331941 | ||
|
|
8f02d96464 | ||
|
|
caff5a78ba |
@@ -241,4 +241,3 @@ const pinia = createTestingPinia({
|
||||
- Set initial state for focused testing
|
||||
- Test computed properties by accessing them directly
|
||||
- Verify state changes by updating the store
|
||||
|
||||
|
||||
208
.github/workflows/build-artifacts.yml
vendored
Normal file
208
.github/workflows/build-artifacts.yml
vendored
Normal file
@@ -0,0 +1,208 @@
|
||||
name: Build Artifacts
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ref:
|
||||
type: string
|
||||
required: false
|
||||
description: "Git ref to checkout (commit SHA, branch, or tag)"
|
||||
version_override:
|
||||
type: string
|
||||
required: false
|
||||
description: "Override version (for manual releases)"
|
||||
outputs:
|
||||
build_number:
|
||||
description: "Build number for the artifacts"
|
||||
value: ${{ jobs.build-api.outputs.build_number }}
|
||||
secrets:
|
||||
VITE_ACCOUNT:
|
||||
required: true
|
||||
VITE_CONNECT:
|
||||
required: true
|
||||
VITE_UNRAID_NET:
|
||||
required: true
|
||||
VITE_CALLBACK_KEY:
|
||||
required: true
|
||||
UNRAID_BOT_GITHUB_ADMIN_TOKEN:
|
||||
required: false
|
||||
|
||||
jobs:
|
||||
build-api:
|
||||
name: Build API
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
build_number: ${{ steps.buildnumber.outputs.build_number || steps.fallback_buildnumber.outputs.build_number }}
|
||||
defaults:
|
||||
run:
|
||||
working-directory: api
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
ref: ${{ inputs.ref || github.ref }}
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential
|
||||
version: 1.0
|
||||
|
||||
- name: PNPM Install
|
||||
run: |
|
||||
cd ${{ github.workspace }}
|
||||
pnpm install --frozen-lockfile
|
||||
|
||||
- name: Get Git Short Sha and API version
|
||||
id: vars
|
||||
run: |
|
||||
GIT_SHA=$(git rev-parse --short HEAD)
|
||||
IS_TAGGED=$(git describe --tags --abbrev=0 --exact-match || echo '')
|
||||
PACKAGE_LOCK_VERSION=$(jq -r '.version' package.json)
|
||||
API_VERSION=${{ inputs.version_override && format('"{0}"', inputs.version_override) || '${PACKAGE_LOCK_VERSION}' }}
|
||||
if [ -z "${{ inputs.version_override }}" ] && [ -z "$IS_TAGGED" ]; then
|
||||
API_VERSION="${PACKAGE_LOCK_VERSION}+${GIT_SHA}"
|
||||
fi
|
||||
export API_VERSION
|
||||
echo "API_VERSION=${API_VERSION}" >> $GITHUB_ENV
|
||||
echo "PACKAGE_LOCK_VERSION=${PACKAGE_LOCK_VERSION}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Generate build number
|
||||
id: buildnumber
|
||||
if: github.repository == 'unraid/api'
|
||||
continue-on-error: true
|
||||
uses: onyxmueller/build-tag-number@v1
|
||||
with:
|
||||
token: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN || github.token }}
|
||||
prefix: ${{ inputs.version_override || steps.vars.outputs.PACKAGE_LOCK_VERSION }}
|
||||
|
||||
- name: Generate fallback build number
|
||||
id: fallback_buildnumber
|
||||
if: steps.buildnumber.outcome != 'success'
|
||||
run: echo "build_number=${GITHUB_RUN_NUMBER}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
pnpm run build:release
|
||||
tar -czf deploy/unraid-api.tgz -C deploy/pack/ .
|
||||
|
||||
- name: Upload tgz to Github artifacts
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: unraid-api
|
||||
path: ${{ github.workspace }}/api/deploy/unraid-api.tgz
|
||||
|
||||
build-unraid-ui-webcomponents:
|
||||
name: Build Unraid UI Library (Webcomponent Version)
|
||||
defaults:
|
||||
run:
|
||||
working-directory: unraid-ui
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
ref: ${{ inputs.ref || github.ref }}
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential
|
||||
version: 1.0
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
cd ${{ github.workspace }}
|
||||
pnpm install --frozen-lockfile --filter @unraid/ui
|
||||
|
||||
- name: Lint
|
||||
run: pnpm run lint
|
||||
|
||||
- name: Build
|
||||
run: pnpm run build:wc
|
||||
|
||||
- name: Upload Artifact to Github
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: unraid-wc-ui
|
||||
path: unraid-ui/dist-wc/
|
||||
|
||||
build-web:
|
||||
name: Build Web App
|
||||
defaults:
|
||||
run:
|
||||
working-directory: web
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
ref: ${{ inputs.ref || github.ref }}
|
||||
|
||||
- name: Create env file
|
||||
run: |
|
||||
touch .env
|
||||
echo VITE_ACCOUNT=${{ secrets.VITE_ACCOUNT }} >> .env
|
||||
echo VITE_CONNECT=${{ secrets.VITE_CONNECT }} >> .env
|
||||
echo VITE_UNRAID_NET=${{ secrets.VITE_UNRAID_NET }} >> .env
|
||||
echo VITE_CALLBACK_KEY=${{ secrets.VITE_CALLBACK_KEY }} >> .env
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: PNPM Install
|
||||
run: |
|
||||
cd ${{ github.workspace }}
|
||||
pnpm install --frozen-lockfile --filter @unraid/web --filter @unraid/ui
|
||||
|
||||
- name: Build Unraid UI
|
||||
run: |
|
||||
cd ${{ github.workspace }}/unraid-ui
|
||||
pnpm run build
|
||||
|
||||
- name: Lint files
|
||||
run: pnpm run lint
|
||||
|
||||
- name: Type Check
|
||||
run: pnpm run type-check
|
||||
|
||||
- name: Build
|
||||
run: pnpm run build
|
||||
|
||||
- name: Upload build to Github artifacts
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: unraid-wc-rich
|
||||
path: web/dist
|
||||
|
||||
40
.github/workflows/build-plugin.yml
vendored
40
.github/workflows/build-plugin.yml
vendored
@@ -27,6 +27,15 @@ on:
|
||||
type: string
|
||||
required: true
|
||||
description: "Build number for the plugin builds"
|
||||
ref:
|
||||
type: string
|
||||
required: false
|
||||
description: "Git ref (commit SHA, branch, or tag) to checkout"
|
||||
TRIGGER_PRODUCTION_RELEASE:
|
||||
type: boolean
|
||||
required: false
|
||||
default: false
|
||||
description: "Whether to automatically trigger the release-production workflow (default: false)"
|
||||
secrets:
|
||||
CF_ACCESS_KEY_ID:
|
||||
required: true
|
||||
@@ -47,8 +56,9 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
ref: ${{ inputs.ref }}
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
@@ -57,7 +67,7 @@ jobs:
|
||||
run_install: false
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
@@ -68,7 +78,21 @@ jobs:
|
||||
GIT_SHA=$(git rev-parse --short HEAD)
|
||||
IS_TAGGED=$(git describe --tags --abbrev=0 --exact-match || echo '')
|
||||
PACKAGE_LOCK_VERSION=$(jq -r '.version' package.json)
|
||||
API_VERSION=$([[ -n "$IS_TAGGED" ]] && echo "$PACKAGE_LOCK_VERSION" || echo "${PACKAGE_LOCK_VERSION}+${GIT_SHA}")
|
||||
|
||||
# For release builds, trust the release tag version to avoid stale checkouts
|
||||
if [ "${{ inputs.RELEASE_CREATED }}" = "true" ] && [ -n "${{ inputs.RELEASE_TAG }}" ]; then
|
||||
TAG_VERSION="${{ inputs.RELEASE_TAG }}"
|
||||
TAG_VERSION="${TAG_VERSION#v}" # trim leading v if present
|
||||
|
||||
if [ "$TAG_VERSION" != "$PACKAGE_LOCK_VERSION" ]; then
|
||||
echo "::warning::Release tag version ($TAG_VERSION) does not match package.json version ($PACKAGE_LOCK_VERSION). Using tag version for TXZ naming."
|
||||
fi
|
||||
|
||||
API_VERSION="$TAG_VERSION"
|
||||
else
|
||||
API_VERSION=$([[ -n "$IS_TAGGED" ]] && echo "$PACKAGE_LOCK_VERSION" || echo "${PACKAGE_LOCK_VERSION}+${GIT_SHA}")
|
||||
fi
|
||||
|
||||
echo "API_VERSION=${API_VERSION}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Install dependencies
|
||||
@@ -77,19 +101,19 @@ jobs:
|
||||
pnpm install --frozen-lockfile --filter @unraid/connect-plugin
|
||||
|
||||
- name: Download Unraid UI Components
|
||||
uses: actions/download-artifact@v5
|
||||
uses: actions/download-artifact@v7
|
||||
with:
|
||||
name: unraid-wc-ui
|
||||
path: ${{ github.workspace }}/plugin/source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components/uui
|
||||
merge-multiple: true
|
||||
- name: Download Unraid Web Components
|
||||
uses: actions/download-artifact@v5
|
||||
uses: actions/download-artifact@v7
|
||||
with:
|
||||
pattern: unraid-wc-rich
|
||||
path: ${{ github.workspace }}/plugin/source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components/standalone
|
||||
merge-multiple: true
|
||||
- name: Download Unraid API
|
||||
uses: actions/download-artifact@v5
|
||||
uses: actions/download-artifact@v7
|
||||
with:
|
||||
name: unraid-api
|
||||
path: ${{ github.workspace }}/plugin/api/
|
||||
@@ -118,7 +142,7 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Upload to GHA
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: unraid-plugin-${{ github.run_id }}-${{ inputs.RELEASE_TAG }}
|
||||
path: plugin/deploy/
|
||||
@@ -136,7 +160,7 @@ jobs:
|
||||
done
|
||||
|
||||
- name: Workflow Dispatch and wait
|
||||
if: inputs.RELEASE_CREATED == 'true'
|
||||
if: inputs.RELEASE_CREATED == 'true' && inputs.TRIGGER_PRODUCTION_RELEASE == true
|
||||
uses: the-actions-org/workflow-dispatch@v4.0.0
|
||||
with:
|
||||
workflow: release-production.yml
|
||||
|
||||
103
.github/workflows/claude-code-review.yml
vendored
103
.github/workflows/claude-code-review.yml
vendored
@@ -1,103 +0,0 @@
|
||||
name: Claude Code Review
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize]
|
||||
# Skip reviews for non-code changes
|
||||
paths-ignore:
|
||||
- "**/*.md"
|
||||
- "**/package-lock.json"
|
||||
- "**/pnpm-lock.yaml"
|
||||
- "**/.gitignore"
|
||||
- "**/LICENSE"
|
||||
- "**/*.config.js"
|
||||
- "**/*.config.ts"
|
||||
- "**/tsconfig.json"
|
||||
- "**/.github/workflows/*.yml"
|
||||
- "**/docs/**"
|
||||
|
||||
jobs:
|
||||
claude-review:
|
||||
# Skip review for bot PRs and WIP/skip-review PRs
|
||||
# Only run if changes are significant (>10 lines)
|
||||
if: |
|
||||
(github.event.pull_request.additions > 10 || github.event.pull_request.deletions > 10) &&
|
||||
!contains(github.event.pull_request.title, '[skip-review]') &&
|
||||
!contains(github.event.pull_request.title, '[WIP]') &&
|
||||
!endsWith(github.event.pull_request.user.login, '[bot]') &&
|
||||
github.event.pull_request.user.login != 'dependabot' &&
|
||||
github.event.pull_request.user.login != 'renovate'
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: read
|
||||
issues: read
|
||||
id-token: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Run Claude Code Review
|
||||
id: claude-review
|
||||
uses: anthropics/claude-code-action@beta
|
||||
with:
|
||||
claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
|
||||
|
||||
# Optional: Specify model (defaults to Claude Sonnet 4, uncomment for Claude Opus 4)
|
||||
# model: "claude-opus-4-20250514"
|
||||
|
||||
# Direct prompt for automated review (no @claude mention needed)
|
||||
direct_prompt: |
|
||||
IMPORTANT: Review ONLY the DIFF/CHANGESET - the actual lines that were added or modified in this PR.
|
||||
DO NOT review the entire file context, only analyze the specific changes being made.
|
||||
|
||||
Look for HIGH-PRIORITY issues in the CHANGED LINES ONLY:
|
||||
|
||||
1. CRITICAL BUGS: Logic errors, null pointer issues, infinite loops, race conditions
|
||||
2. SECURITY: SQL injection, XSS, authentication bypass, exposed secrets, unsafe operations
|
||||
3. BREAKING CHANGES: API contract violations, removed exports, changed function signatures
|
||||
4. DATA LOSS RISKS: Destructive operations without safeguards, missing data validation
|
||||
|
||||
DO NOT comment on:
|
||||
- Code that wasn't changed in this PR
|
||||
- Style, formatting, or documentation
|
||||
- Test coverage (unless tests are broken by the changes)
|
||||
- Minor optimizations or best practices
|
||||
- Existing code issues that weren't introduced by this PR
|
||||
|
||||
If you find no critical issues in the DIFF, respond with: "✅ No critical issues found in changes"
|
||||
|
||||
Keep response under 10 lines. Reference specific line numbers from the diff when reporting issues.
|
||||
|
||||
# Optional: Use sticky comments to make Claude reuse the same comment on subsequent pushes to the same PR
|
||||
use_sticky_comment: true
|
||||
|
||||
# Context-aware review based on PR characteristics
|
||||
# Uncomment to enable different review strategies based on context
|
||||
# direct_prompt: |
|
||||
# ${{
|
||||
# (github.event.pull_request.additions > 500) &&
|
||||
# 'Large PR detected. Focus only on architectural issues and breaking changes. Skip minor issues.' ||
|
||||
# contains(github.event.pull_request.title, 'fix') &&
|
||||
# 'Bug fix PR: Verify the fix addresses the root cause and check for regression risks.' ||
|
||||
# contains(github.event.pull_request.title, 'deps') &&
|
||||
# 'Dependency update: Check for breaking changes and security advisories only.' ||
|
||||
# contains(github.event.pull_request.title, 'refactor') &&
|
||||
# 'Refactor PR: Verify no behavior changes and check for performance regressions.' ||
|
||||
# contains(github.event.pull_request.title, 'feat') &&
|
||||
# 'New feature: Check for security issues, edge cases, and integration problems only.' ||
|
||||
# 'Standard review: Check for critical bugs, security issues, and breaking changes only.'
|
||||
# }}
|
||||
|
||||
# Optional: Add specific tools for running tests or linting
|
||||
# allowed_tools: "Bash(npm run test),Bash(npm run lint),Bash(npm run typecheck)"
|
||||
|
||||
# Optional: Skip review for certain conditions
|
||||
# if: |
|
||||
# !contains(github.event.pull_request.title, '[skip-review]') &&
|
||||
# !contains(github.event.pull_request.title, '[WIP]')
|
||||
|
||||
64
.github/workflows/claude.yml
vendored
64
.github/workflows/claude.yml
vendored
@@ -1,64 +0,0 @@
|
||||
name: Claude Code
|
||||
|
||||
on:
|
||||
issue_comment:
|
||||
types: [created]
|
||||
pull_request_review_comment:
|
||||
types: [created]
|
||||
issues:
|
||||
types: [opened, assigned]
|
||||
pull_request_review:
|
||||
types: [submitted]
|
||||
|
||||
jobs:
|
||||
claude:
|
||||
if: |
|
||||
(github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude')) ||
|
||||
(github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude')) ||
|
||||
(github.event_name == 'pull_request_review' && contains(github.event.review.body, '@claude')) ||
|
||||
(github.event_name == 'issues' && (contains(github.event.issue.body, '@claude') || contains(github.event.issue.title, '@claude')))
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: read
|
||||
issues: read
|
||||
id-token: write
|
||||
actions: read # Required for Claude to read CI results on PRs
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Run Claude Code
|
||||
id: claude
|
||||
uses: anthropics/claude-code-action@beta
|
||||
with:
|
||||
claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
|
||||
|
||||
# This is an optional setting that allows Claude to read CI results on PRs
|
||||
additional_permissions: |
|
||||
actions: read
|
||||
|
||||
# Optional: Specify model (defaults to Claude Sonnet 4, uncomment for Claude Opus 4)
|
||||
# model: "claude-opus-4-20250514"
|
||||
|
||||
# Optional: Customize the trigger phrase (default: @claude)
|
||||
# trigger_phrase: "/claude"
|
||||
|
||||
# Optional: Trigger when specific user is assigned to an issue
|
||||
# assignee_trigger: "claude-bot"
|
||||
|
||||
# Optional: Allow Claude to run specific commands
|
||||
# allowed_tools: "Bash(npm install),Bash(npm run build),Bash(npm run test:*),Bash(npm run lint:*)"
|
||||
|
||||
# Optional: Add custom instructions for Claude to customize its behavior for your project
|
||||
# custom_instructions: |
|
||||
# Follow our coding standards
|
||||
# Ensure all new code has tests
|
||||
# Use TypeScript for new files
|
||||
|
||||
# Optional: Custom environment variables for Claude
|
||||
# claude_env: |
|
||||
# NODE_ENV: test
|
||||
|
||||
8
.github/workflows/codeql-analysis.yml
vendored
8
.github/workflows/codeql-analysis.yml
vendored
@@ -24,17 +24,17 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
uses: github/codeql-action/init@v4
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/codeql-config.yml
|
||||
queries: +security-and-quality
|
||||
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v3
|
||||
uses: github/codeql-action/autobuild@v4
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3
|
||||
uses: github/codeql-action/analyze@v4
|
||||
82
.github/workflows/create-docusaurus-pr.yml
vendored
82
.github/workflows/create-docusaurus-pr.yml
vendored
@@ -1,82 +0,0 @@
|
||||
name: Update API Documentation
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- 'api/docs/**'
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
# Add permissions for GITHUB_TOKEN
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
jobs:
|
||||
create-docs-pr:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout source repository
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
path: source-repo
|
||||
|
||||
- name: Checkout docs repository
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
repository: unraid/docs
|
||||
path: docs-repo
|
||||
token: ${{ secrets.DOCS_PAT_UNRAID_BOT }}
|
||||
|
||||
- name: Copy and process docs
|
||||
run: |
|
||||
if [ ! -d "source-repo/api/docs" ]; then
|
||||
echo "Source directory does not exist!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Remove old API docs but preserve other folders
|
||||
rm -rf docs-repo/docs/API/
|
||||
mkdir -p docs-repo/docs/API
|
||||
|
||||
# Copy all markdown files and maintain directory structure
|
||||
cp -r source-repo/api/docs/public/. docs-repo/docs/API/
|
||||
|
||||
# Copy images to Docusaurus static directory
|
||||
mkdir -p docs-repo/static/img/api
|
||||
|
||||
# Copy images from public/images if they exist
|
||||
if [ -d "source-repo/api/docs/public/images" ]; then
|
||||
cp -r source-repo/api/docs/public/images/. docs-repo/static/img/api/
|
||||
fi
|
||||
|
||||
# Also copy any images from the parent docs/images directory
|
||||
if [ -d "source-repo/api/docs/images" ]; then
|
||||
cp -r source-repo/api/docs/images/. docs-repo/static/img/api/
|
||||
fi
|
||||
|
||||
# Update image paths in markdown files
|
||||
# Replace relative image paths with absolute paths pointing to /img/api/
|
||||
find docs-repo/docs/API -name "*.md" -type f -exec sed -i 's|!\[\([^]]*\)\](\./images/\([^)]*\))||g' {} \;
|
||||
find docs-repo/docs/API -name "*.md" -type f -exec sed -i 's|!\[\([^]]*\)\](images/\([^)]*\))||g' {} \;
|
||||
find docs-repo/docs/API -name "*.md" -type f -exec sed -i 's|!\[\([^]]*\)\](../images/\([^)]*\))||g' {} \;
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.DOCS_PAT_UNRAID_BOT }}
|
||||
path: docs-repo
|
||||
commit-message: 'docs: update API documentation'
|
||||
title: 'Update API Documentation'
|
||||
body: |
|
||||
This PR updates the API documentation based on changes from the main repository.
|
||||
|
||||
Changes were automatically generated from api/docs/* directory.
|
||||
|
||||
@coderabbitai ignore
|
||||
reviewers: ljm42, elibosley
|
||||
branch: update-api-docs
|
||||
base: main
|
||||
delete-branch: true
|
||||
4
.github/workflows/deploy-storybook.yml
vendored
4
.github/workflows/deploy-storybook.yml
vendored
@@ -20,7 +20,7 @@ jobs:
|
||||
name: Deploy Storybook
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
@@ -28,7 +28,7 @@ jobs:
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v5
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
210
.github/workflows/generate-release-notes.yml
vendored
Normal file
210
.github/workflows/generate-release-notes.yml
vendored
Normal file
@@ -0,0 +1,210 @@
|
||||
name: Generate Release Notes
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
version:
|
||||
description: 'Version number (e.g., 4.25.3)'
|
||||
required: true
|
||||
type: string
|
||||
target_commitish:
|
||||
description: 'Commit SHA or branch (leave empty for current HEAD)'
|
||||
required: false
|
||||
type: string
|
||||
release_notes:
|
||||
description: 'Custom release notes (leave empty to auto-generate)'
|
||||
required: false
|
||||
type: string
|
||||
outputs:
|
||||
release_notes:
|
||||
description: 'Generated or provided release notes'
|
||||
value: ${{ jobs.generate.outputs.release_notes }}
|
||||
secrets:
|
||||
UNRAID_BOT_GITHUB_ADMIN_TOKEN:
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
generate:
|
||||
name: Generate Release Notes
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
release_notes: ${{ steps.generate_notes.outputs.release_notes }}
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
ref: ${{ inputs.target_commitish || github.ref }}
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN }}
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
- name: Generate Release Notes
|
||||
id: generate_notes
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
TAG_NAME="v${{ inputs.version }}"
|
||||
VERSION="${{ inputs.version }}"
|
||||
|
||||
if [ -n "${{ inputs.release_notes }}" ]; then
|
||||
NOTES="${{ inputs.release_notes }}"
|
||||
else
|
||||
CHANGELOG_PATH="api/CHANGELOG.md"
|
||||
|
||||
if [ -f "$CHANGELOG_PATH" ]; then
|
||||
echo "Extracting release notes from CHANGELOG.md for version ${VERSION}"
|
||||
|
||||
NOTES=$(awk -v ver="$VERSION" '
|
||||
BEGIN {
|
||||
found=0; capture=0; output="";
|
||||
gsub(/\./, "\\.", ver);
|
||||
}
|
||||
/^## \[/ {
|
||||
if (capture) exit;
|
||||
if ($0 ~ "\\[" ver "\\]") {
|
||||
found=1;
|
||||
capture=1;
|
||||
}
|
||||
}
|
||||
capture {
|
||||
if (output != "") output = output "\n";
|
||||
output = output $0;
|
||||
}
|
||||
END {
|
||||
if (found) print output;
|
||||
else exit 1;
|
||||
}
|
||||
' "$CHANGELOG_PATH") || EXTRACTION_STATUS=$?
|
||||
|
||||
if [ ${EXTRACTION_STATUS:-0} -eq 0 ] && [ -n "$NOTES" ]; then
|
||||
echo "✓ Found release notes in CHANGELOG.md"
|
||||
else
|
||||
echo "⚠ Version ${VERSION} not found in CHANGELOG.md, generating with conventional-changelog"
|
||||
|
||||
PREV_TAG=$(git describe --tags --abbrev=0 HEAD^ 2>/dev/null || echo "")
|
||||
CHANGELOG_GENERATED=false
|
||||
|
||||
if [ -n "$PREV_TAG" ]; then
|
||||
echo "Generating changelog from ${PREV_TAG}..HEAD using conventional-changelog"
|
||||
|
||||
npm install -g conventional-changelog-cli
|
||||
|
||||
TEMP_NOTES=$(mktemp)
|
||||
conventional-changelog -p conventionalcommits \
|
||||
--release-count 1 \
|
||||
--output-unreleased \
|
||||
> "$TEMP_NOTES" 2>/dev/null || true
|
||||
|
||||
if [ -s "$TEMP_NOTES" ]; then
|
||||
NOTES=$(cat "$TEMP_NOTES")
|
||||
|
||||
if [ -n "$NOTES" ]; then
|
||||
echo "✓ Generated changelog with conventional-changelog"
|
||||
CHANGELOG_GENERATED=true
|
||||
|
||||
TEMP_CHANGELOG=$(mktemp)
|
||||
{
|
||||
if [ -f "$CHANGELOG_PATH" ]; then
|
||||
head -n 1 "$CHANGELOG_PATH"
|
||||
echo ""
|
||||
echo "$NOTES"
|
||||
echo ""
|
||||
tail -n +2 "$CHANGELOG_PATH"
|
||||
else
|
||||
echo "# Changelog"
|
||||
echo ""
|
||||
echo "$NOTES"
|
||||
fi
|
||||
} > "$TEMP_CHANGELOG"
|
||||
|
||||
mv "$TEMP_CHANGELOG" "$CHANGELOG_PATH"
|
||||
echo "✓ Updated CHANGELOG.md with generated notes"
|
||||
else
|
||||
echo "⚠ conventional-changelog produced empty output, using GitHub auto-generation"
|
||||
NOTES=$(gh api repos/${{ github.repository }}/releases/generate-notes \
|
||||
-f tag_name="${TAG_NAME}" \
|
||||
-f target_commitish="${{ inputs.target_commitish || github.sha }}" \
|
||||
-f previous_tag_name="${PREV_TAG}" \
|
||||
--jq '.body')
|
||||
fi
|
||||
else
|
||||
echo "⚠ conventional-changelog failed, using GitHub auto-generation"
|
||||
NOTES=$(gh api repos/${{ github.repository }}/releases/generate-notes \
|
||||
-f tag_name="${TAG_NAME}" \
|
||||
-f target_commitish="${{ inputs.target_commitish || github.sha }}" \
|
||||
-f previous_tag_name="${PREV_TAG}" \
|
||||
--jq '.body')
|
||||
fi
|
||||
|
||||
rm -f "$TEMP_NOTES"
|
||||
else
|
||||
echo "⚠ No previous tag found, using GitHub auto-generation"
|
||||
NOTES=$(gh api repos/${{ github.repository }}/releases/generate-notes \
|
||||
-f tag_name="${TAG_NAME}" \
|
||||
-f target_commitish="${{ inputs.target_commitish || github.sha }}" \
|
||||
--jq '.body' || echo "Release ${VERSION}")
|
||||
fi
|
||||
|
||||
if [ "$CHANGELOG_GENERATED" = true ]; then
|
||||
BRANCH_OR_SHA="${{ inputs.target_commitish || github.ref }}"
|
||||
|
||||
if git show-ref --verify --quiet "refs/heads/${BRANCH_OR_SHA}"; then
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "CHANGELOG GENERATED AND COMMITTED"
|
||||
echo "=========================================="
|
||||
echo ""
|
||||
|
||||
git config user.name "github-actions[bot]"
|
||||
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||
|
||||
BEFORE_SHA=$(git rev-parse HEAD)
|
||||
|
||||
git add "$CHANGELOG_PATH"
|
||||
git commit -m "chore: add changelog for version ${VERSION}"
|
||||
git push origin "HEAD:${BRANCH_OR_SHA}"
|
||||
|
||||
AFTER_SHA=$(git rev-parse HEAD)
|
||||
|
||||
echo "✓ Changelog committed and pushed successfully"
|
||||
echo ""
|
||||
echo "Previous SHA: ${BEFORE_SHA}"
|
||||
echo "New SHA: ${AFTER_SHA}"
|
||||
echo ""
|
||||
echo "⚠️ CRITICAL: A new commit was created, but github.sha is immutable."
|
||||
echo "⚠️ github.sha = ${BEFORE_SHA} (original workflow trigger)"
|
||||
echo "⚠️ The release tag must point to ${AFTER_SHA} (with changelog)"
|
||||
echo ""
|
||||
echo "Re-run this workflow to create the release with the correct commit."
|
||||
echo ""
|
||||
exit 1
|
||||
else
|
||||
echo "⚠ Target is a commit SHA, not a branch. Cannot push changelog updates."
|
||||
echo "Changelog was generated but not committed."
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
else
|
||||
echo "⚠ CHANGELOG.md not found, using GitHub auto-generation"
|
||||
PREV_TAG=$(git describe --tags --abbrev=0 HEAD^ 2>/dev/null || echo "")
|
||||
|
||||
if [ -n "$PREV_TAG" ]; then
|
||||
NOTES=$(gh api repos/${{ github.repository }}/releases/generate-notes \
|
||||
-f tag_name="${TAG_NAME}" \
|
||||
-f target_commitish="${{ inputs.target_commitish || github.sha }}" \
|
||||
-f previous_tag_name="${PREV_TAG}" \
|
||||
--jq '.body')
|
||||
else
|
||||
NOTES="Release ${VERSION}"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "release_notes<<EOF" >> $GITHUB_OUTPUT
|
||||
echo "$NOTES" >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
|
||||
203
.github/workflows/main.yml
vendored
203
.github/workflows/main.yml
vendored
@@ -23,7 +23,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
@@ -33,7 +33,7 @@ jobs:
|
||||
run_install: false
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
@@ -154,173 +154,15 @@ jobs:
|
||||
files: ./coverage/coverage-final.json,../web/coverage/coverage-final.json,../unraid-ui/coverage/coverage-final.json,../packages/unraid-api-plugin-connect/coverage/coverage-final.json,../packages/unraid-shared/coverage/coverage-final.json
|
||||
fail_ci_if_error: false
|
||||
|
||||
build-api:
|
||||
name: Build API
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
build_number: ${{ steps.buildnumber.outputs.build_number }}
|
||||
defaults:
|
||||
run:
|
||||
working-directory: api
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential
|
||||
version: 1.0
|
||||
|
||||
- name: PNPM Install
|
||||
run: |
|
||||
cd ${{ github.workspace }}
|
||||
pnpm install --frozen-lockfile
|
||||
|
||||
- name: Build
|
||||
run: pnpm run build
|
||||
|
||||
- name: Get Git Short Sha and API version
|
||||
id: vars
|
||||
run: |
|
||||
GIT_SHA=$(git rev-parse --short HEAD)
|
||||
IS_TAGGED=$(git describe --tags --abbrev=0 --exact-match || echo '')
|
||||
PACKAGE_LOCK_VERSION=$(jq -r '.version' package.json)
|
||||
API_VERSION=$([[ -n "$IS_TAGGED" ]] && echo "$PACKAGE_LOCK_VERSION" || echo "${PACKAGE_LOCK_VERSION}+${GIT_SHA}")
|
||||
export API_VERSION
|
||||
echo "API_VERSION=${API_VERSION}" >> $GITHUB_ENV
|
||||
echo "PACKAGE_LOCK_VERSION=${PACKAGE_LOCK_VERSION}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Generate build number
|
||||
id: buildnumber
|
||||
uses: onyxmueller/build-tag-number@v1
|
||||
with:
|
||||
token: ${{secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN}}
|
||||
prefix: ${{steps.vars.outputs.PACKAGE_LOCK_VERSION}}
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
pnpm run build:release
|
||||
tar -czf deploy/unraid-api.tgz -C deploy/pack/ .
|
||||
|
||||
- name: Upload tgz to Github artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: unraid-api
|
||||
path: ${{ github.workspace }}/api/deploy/unraid-api.tgz
|
||||
|
||||
build-unraid-ui-webcomponents:
|
||||
name: Build Unraid UI Library (Webcomponent Version)
|
||||
defaults:
|
||||
run:
|
||||
working-directory: unraid-ui
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential
|
||||
version: 1.0
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
cd ${{ github.workspace }}
|
||||
pnpm install --frozen-lockfile --filter @unraid/ui
|
||||
|
||||
- name: Lint
|
||||
run: pnpm run lint
|
||||
|
||||
- name: Build
|
||||
run: pnpm run build:wc
|
||||
|
||||
- name: Upload Artifact to Github
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: unraid-wc-ui
|
||||
path: unraid-ui/dist-wc/
|
||||
|
||||
build-web:
|
||||
# needs: [build-unraid-ui]
|
||||
name: Build Web App
|
||||
defaults:
|
||||
run:
|
||||
working-directory: web
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Create env file
|
||||
run: |
|
||||
touch .env
|
||||
echo VITE_ACCOUNT=${{ secrets.VITE_ACCOUNT }} >> .env
|
||||
echo VITE_CONNECT=${{ secrets.VITE_CONNECT }} >> .env
|
||||
echo VITE_UNRAID_NET=${{ secrets.VITE_UNRAID_NET }} >> .env
|
||||
echo VITE_CALLBACK_KEY=${{ secrets.VITE_CALLBACK_KEY }} >> .env
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: PNPM Install
|
||||
run: |
|
||||
cd ${{ github.workspace }}
|
||||
pnpm install --frozen-lockfile --filter @unraid/web --filter @unraid/ui
|
||||
|
||||
- name: Build Unraid UI
|
||||
run: |
|
||||
cd ${{ github.workspace }}/unraid-ui
|
||||
pnpm run build
|
||||
|
||||
- name: Lint files
|
||||
run: pnpm run lint
|
||||
|
||||
- name: Type Check
|
||||
run: pnpm run type-check
|
||||
|
||||
- name: Test
|
||||
run: pnpm run test:ci
|
||||
|
||||
- name: Build
|
||||
run: pnpm run build
|
||||
|
||||
- name: Upload build to Github artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: unraid-wc-rich
|
||||
path: web/dist
|
||||
build-artifacts:
|
||||
name: Build All Artifacts
|
||||
uses: ./.github/workflows/build-artifacts.yml
|
||||
secrets:
|
||||
VITE_ACCOUNT: ${{ secrets.VITE_ACCOUNT }}
|
||||
VITE_CONNECT: ${{ secrets.VITE_CONNECT }}
|
||||
VITE_UNRAID_NET: ${{ secrets.VITE_UNRAID_NET }}
|
||||
VITE_CALLBACK_KEY: ${{ secrets.VITE_CALLBACK_KEY }}
|
||||
UNRAID_BOT_GITHUB_ADMIN_TOKEN: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN }}
|
||||
|
||||
release-please:
|
||||
name: Release Please
|
||||
@@ -329,15 +171,15 @@ jobs:
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
||||
needs:
|
||||
- test-api
|
||||
- build-api
|
||||
- build-web
|
||||
- build-unraid-ui-webcomponents
|
||||
- build-artifacts
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- id: release
|
||||
uses: googleapis/release-please-action@v4
|
||||
@@ -348,17 +190,15 @@ jobs:
|
||||
build-plugin-staging-pr:
|
||||
name: Build and Deploy Plugin
|
||||
needs:
|
||||
- build-api
|
||||
- build-web
|
||||
- build-unraid-ui-webcomponents
|
||||
- build-artifacts
|
||||
- test-api
|
||||
uses: ./.github/workflows/build-plugin.yml
|
||||
with:
|
||||
RELEASE_CREATED: false
|
||||
RELEASE_CREATED: 'false'
|
||||
TAG: ${{ github.event.pull_request.number && format('PR{0}', github.event.pull_request.number) || '' }}
|
||||
BUCKET_PATH: ${{ github.event.pull_request.number && format('unraid-api/tag/PR{0}', github.event.pull_request.number) || 'unraid-api' }}
|
||||
BASE_URL: "https://preview.dl.unraid.net/unraid-api"
|
||||
BUILD_NUMBER: ${{ needs.build-api.outputs.build_number }}
|
||||
BUILD_NUMBER: ${{ needs.build-artifacts.outputs.build_number }}
|
||||
secrets:
|
||||
CF_ACCESS_KEY_ID: ${{ secrets.CF_ACCESS_KEY_ID }}
|
||||
CF_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
@@ -370,15 +210,16 @@ jobs:
|
||||
name: Build and Deploy Production Plugin
|
||||
needs:
|
||||
- release-please
|
||||
- build-api
|
||||
- build-artifacts
|
||||
uses: ./.github/workflows/build-plugin.yml
|
||||
with:
|
||||
RELEASE_CREATED: true
|
||||
RELEASE_CREATED: 'true'
|
||||
RELEASE_TAG: ${{ needs.release-please.outputs.tag_name }}
|
||||
TAG: ""
|
||||
BUCKET_PATH: unraid-api
|
||||
BASE_URL: "https://stable.dl.unraid.net/unraid-api"
|
||||
BUILD_NUMBER: ${{ needs.build-api.outputs.build_number }}
|
||||
BUILD_NUMBER: ${{ needs.build-artifacts.outputs.build_number }}
|
||||
TRIGGER_PRODUCTION_RELEASE: true
|
||||
secrets:
|
||||
CF_ACCESS_KEY_ID: ${{ secrets.CF_ACCESS_KEY_ID }}
|
||||
CF_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
|
||||
239
.github/workflows/manual-release.yml
vendored
Normal file
239
.github/workflows/manual-release.yml
vendored
Normal file
@@ -0,0 +1,239 @@
|
||||
name: Manual Release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: 'Version to release (e.g., 4.25.3)'
|
||||
required: true
|
||||
type: string
|
||||
target_commitish:
|
||||
description: 'Commit SHA or branch (leave empty for current HEAD)'
|
||||
required: false
|
||||
type: string
|
||||
release_notes:
|
||||
description: 'Release notes/changelog (leave empty to auto-generate from commits)'
|
||||
required: false
|
||||
type: string
|
||||
prerelease:
|
||||
description: 'Mark as prerelease'
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
validate-version:
|
||||
name: Validate and Update Package Versions
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
ref: ${{ inputs.target_commitish || github.ref }}
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN }}
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
- name: Check and Update Package Versions
|
||||
run: |
|
||||
EXPECTED_VERSION="${{ inputs.version }}"
|
||||
MISMATCHES_FOUND=false
|
||||
|
||||
PACKAGE_JSONS=(
|
||||
"package.json"
|
||||
"api/package.json"
|
||||
"web/package.json"
|
||||
"unraid-ui/package.json"
|
||||
"plugin/package.json"
|
||||
"packages/unraid-shared/package.json"
|
||||
"packages/unraid-api-plugin-health/package.json"
|
||||
"packages/unraid-api-plugin-generator/package.json"
|
||||
"packages/unraid-api-plugin-connect/package.json"
|
||||
)
|
||||
|
||||
echo "Checking package.json versions against expected version: ${EXPECTED_VERSION}"
|
||||
|
||||
for pkg in "${PACKAGE_JSONS[@]}"; do
|
||||
if [ -f "$pkg" ]; then
|
||||
CURRENT_VERSION=$(node -p "require('./$pkg').version")
|
||||
if [ "$CURRENT_VERSION" != "$EXPECTED_VERSION" ]; then
|
||||
echo "❌ Version mismatch in $pkg: $CURRENT_VERSION != $EXPECTED_VERSION"
|
||||
MISMATCHES_FOUND=true
|
||||
|
||||
# Detect indentation by checking the first property line
|
||||
INDENT_SPACES=$(head -10 "$pkg" | grep '^ *"' | head -1 | sed 's/".*//g' | wc -c)
|
||||
INDENT_SPACES=$((INDENT_SPACES - 1))
|
||||
|
||||
jq --indent "$INDENT_SPACES" --arg version "$EXPECTED_VERSION" '.version = $version' "$pkg" > "$pkg.tmp" && mv "$pkg.tmp" "$pkg"
|
||||
echo "✓ Updated $pkg to version $EXPECTED_VERSION"
|
||||
else
|
||||
echo "✓ $pkg version matches: $CURRENT_VERSION"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
if [ "$MISMATCHES_FOUND" = true ]; then
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "Version mismatches found!"
|
||||
echo "=========================================="
|
||||
echo ""
|
||||
|
||||
BRANCH_OR_SHA="${{ inputs.target_commitish || github.ref }}"
|
||||
|
||||
if git show-ref --verify --quiet "refs/heads/${BRANCH_OR_SHA}"; then
|
||||
echo "Creating commit with version updates and pushing to branch: ${BRANCH_OR_SHA}"
|
||||
|
||||
git config user.name "github-actions[bot]"
|
||||
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||
|
||||
BEFORE_SHA=$(git rev-parse HEAD)
|
||||
|
||||
git add ${PACKAGE_JSONS[@]}
|
||||
git commit -m "chore: update package versions to ${{ inputs.version }}"
|
||||
git push origin "HEAD:${BRANCH_OR_SHA}"
|
||||
|
||||
AFTER_SHA=$(git rev-parse HEAD)
|
||||
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "WORKFLOW MUST BE RE-RUN"
|
||||
echo "=========================================="
|
||||
echo ""
|
||||
echo "✓ Version updates committed and pushed successfully"
|
||||
echo ""
|
||||
echo "Previous SHA: ${BEFORE_SHA}"
|
||||
echo "New SHA: ${AFTER_SHA}"
|
||||
echo ""
|
||||
echo "⚠️ CRITICAL: A new commit was created, but github.sha is immutable."
|
||||
echo "⚠️ github.sha = ${BEFORE_SHA} (original workflow trigger)"
|
||||
echo "⚠️ The release tag must point to ${AFTER_SHA} (with version updates)"
|
||||
echo ""
|
||||
echo "Re-run this workflow to create the release with the correct commit."
|
||||
echo ""
|
||||
exit 1
|
||||
else
|
||||
echo "Target is a commit SHA, not a branch. Cannot push version updates."
|
||||
echo "Please update the package.json versions manually and re-run the workflow."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "✓ All package.json versions match the expected version: ${EXPECTED_VERSION}"
|
||||
|
||||
build-artifacts:
|
||||
name: Build All Artifacts
|
||||
needs:
|
||||
- validate-version
|
||||
uses: ./.github/workflows/build-artifacts.yml
|
||||
with:
|
||||
ref: ${{ inputs.target_commitish || github.ref }}
|
||||
version_override: ${{ inputs.version }}
|
||||
secrets:
|
||||
VITE_ACCOUNT: ${{ secrets.VITE_ACCOUNT }}
|
||||
VITE_CONNECT: ${{ secrets.VITE_CONNECT }}
|
||||
VITE_UNRAID_NET: ${{ secrets.VITE_UNRAID_NET }}
|
||||
VITE_CALLBACK_KEY: ${{ secrets.VITE_CALLBACK_KEY }}
|
||||
UNRAID_BOT_GITHUB_ADMIN_TOKEN: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN }}
|
||||
|
||||
generate-release-notes:
|
||||
name: Generate Release Notes
|
||||
needs:
|
||||
- build-artifacts
|
||||
uses: ./.github/workflows/generate-release-notes.yml
|
||||
with:
|
||||
version: ${{ inputs.version }}
|
||||
target_commitish: ${{ inputs.target_commitish || github.ref }}
|
||||
release_notes: ${{ inputs.release_notes }}
|
||||
secrets:
|
||||
UNRAID_BOT_GITHUB_ADMIN_TOKEN: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN }}
|
||||
|
||||
create-release:
|
||||
name: Create GitHub Release (Draft)
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- generate-release-notes
|
||||
outputs:
|
||||
tag_name: ${{ steps.create_release.outputs.tag_name }}
|
||||
release_notes: ${{ needs.generate-release-notes.outputs.release_notes }}
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
ref: ${{ inputs.target_commitish || github.ref }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Create or Update Release as Draft
|
||||
id: create_release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
TAG_NAME="v${{ inputs.version }}"
|
||||
TARGET="${{ inputs.target_commitish || github.sha }}"
|
||||
|
||||
echo "tag_name=${TAG_NAME}" >> $GITHUB_OUTPUT
|
||||
|
||||
if gh release view "${TAG_NAME}" > /dev/null 2>&1; then
|
||||
echo "Release ${TAG_NAME} already exists, updating as draft..."
|
||||
gh release edit "${TAG_NAME}" \
|
||||
--draft \
|
||||
--notes "${{ needs.generate-release-notes.outputs.release_notes }}" \
|
||||
${{ inputs.prerelease && '--prerelease' || '' }}
|
||||
else
|
||||
echo "Creating new draft release ${TAG_NAME}..."
|
||||
git tag "${TAG_NAME}" "${TARGET}" || true
|
||||
git push origin "${TAG_NAME}" || true
|
||||
|
||||
gh release create "${TAG_NAME}" \
|
||||
--draft \
|
||||
--title "${{ inputs.version }}" \
|
||||
--notes "${{ needs.generate-release-notes.outputs.release_notes }}" \
|
||||
--target "${TARGET}" \
|
||||
${{ inputs.prerelease && '--prerelease' || '' }}
|
||||
fi
|
||||
|
||||
build-plugin-production:
|
||||
name: Build and Deploy Production Plugin
|
||||
needs:
|
||||
- create-release
|
||||
- build-artifacts
|
||||
uses: ./.github/workflows/build-plugin.yml
|
||||
with:
|
||||
RELEASE_CREATED: 'true'
|
||||
RELEASE_TAG: ${{ needs.create-release.outputs.tag_name }}
|
||||
TAG: ""
|
||||
BUCKET_PATH: unraid-api
|
||||
BASE_URL: "https://stable.dl.unraid.net/unraid-api"
|
||||
BUILD_NUMBER: ${{ needs.build-artifacts.outputs.build_number }}
|
||||
ref: ${{ inputs.target_commitish || github.ref }}
|
||||
secrets:
|
||||
CF_ACCESS_KEY_ID: ${{ secrets.CF_ACCESS_KEY_ID }}
|
||||
CF_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
CF_BUCKET_PREVIEW: ${{ secrets.CF_BUCKET_PREVIEW }}
|
||||
CF_ENDPOINT: ${{ secrets.CF_ENDPOINT }}
|
||||
UNRAID_BOT_GITHUB_ADMIN_TOKEN: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN }}
|
||||
|
||||
publish-release:
|
||||
name: Publish Release
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- create-release
|
||||
- build-plugin-production
|
||||
steps:
|
||||
- name: Publish Release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
TAG_NAME="${{ needs.create-release.outputs.tag_name }}"
|
||||
echo "Publishing release ${TAG_NAME}..."
|
||||
gh release edit "${TAG_NAME}" --draft=false --repo ${{ github.repository }}
|
||||
|
||||
30
.github/workflows/publish-schema.yml
vendored
Normal file
30
.github/workflows/publish-schema.yml
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
name: Publish GraphQL Schema
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- 'api/generated-schema.graphql'
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
publish-schema:
|
||||
name: Publish Schema to Apollo Studio
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Install Apollo Rover CLI
|
||||
run: |
|
||||
curl -sSL https://rover.apollo.dev/nix/latest | sh
|
||||
echo "$HOME/.rover/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Publish schema to Apollo Studio
|
||||
env:
|
||||
APOLLO_KEY: ${{ secrets.APOLLO_KEY }}
|
||||
run: |
|
||||
rover graph publish Unraid-API@current \
|
||||
--schema api/generated-schema.graphql
|
||||
|
||||
3
.github/workflows/release-production.yml
vendored
3
.github/workflows/release-production.yml
vendored
@@ -28,7 +28,7 @@ jobs:
|
||||
with:
|
||||
latest: true
|
||||
prerelease: false
|
||||
- uses: actions/setup-node@v5
|
||||
- uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 22.19.0
|
||||
- run: |
|
||||
@@ -143,4 +143,3 @@ jobs:
|
||||
${{ steps.release-info.outputs.body }}
|
||||
embed-color: 16734296
|
||||
embed-footer-text: "Unraid API • Automated Release"
|
||||
embed-timestamp: true
|
||||
|
||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -122,4 +122,10 @@ api/dev/Unraid.net/myservers.cfg
|
||||
|
||||
# local Mise settings
|
||||
.mise.toml
|
||||
mise.toml
|
||||
|
||||
# Compiled test pages (generated from Nunjucks templates)
|
||||
web/public/test-pages/*.html
|
||||
|
||||
# local scripts for testing and development
|
||||
.dev-scripts/
|
||||
|
||||
@@ -1 +1 @@
|
||||
{".":"4.23.0"}
|
||||
{".":"4.29.2"}
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
@custom-variant dark (&:where(.dark, .dark *));
|
||||
|
||||
/* Utility defaults for web components (when we were using shadow DOM) */
|
||||
:host {
|
||||
:host,
|
||||
.unapi {
|
||||
--tw-divide-y-reverse: 0;
|
||||
--tw-border-style: solid;
|
||||
--tw-font-weight: initial;
|
||||
@@ -61,25 +62,17 @@
|
||||
}
|
||||
*/
|
||||
|
||||
body {
|
||||
--color-alpha: #1c1b1b;
|
||||
--color-beta: #f2f2f2;
|
||||
--color-gamma: #999999;
|
||||
--color-gamma-opaque: rgba(153, 153, 153, 0.5);
|
||||
--color-customgradient-start: rgba(242, 242, 242, 0);
|
||||
--color-customgradient-end: rgba(242, 242, 242, 0.85);
|
||||
--shadow-beta: 0 25px 50px -12px rgba(242, 242, 242, 0.15);
|
||||
--ring-offset-shadow: 0 0 var(--color-beta);
|
||||
--ring-shadow: 0 0 var(--color-beta);
|
||||
.unapi {
|
||||
}
|
||||
|
||||
button:not(:disabled),
|
||||
[role='button']:not(:disabled) {
|
||||
.unapi button:not(:disabled),
|
||||
.unapi [role='button']:not(:disabled) {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
/* Font size overrides for SSO button component */
|
||||
unraid-sso-button {
|
||||
.unapi unraid-sso-button,
|
||||
unraid-sso-button.unapi {
|
||||
--text-xs: 0.75rem;
|
||||
--text-sm: 0.875rem;
|
||||
--text-base: 1rem;
|
||||
|
||||
@@ -2,4 +2,4 @@
|
||||
@import './css-variables.css';
|
||||
@import './unraid-theme.css';
|
||||
@import './theme-variants.css';
|
||||
@import './base-utilities.css';
|
||||
@import './base-utilities.css';
|
||||
@@ -5,88 +5,64 @@
|
||||
*/
|
||||
|
||||
/* Default/White Theme */
|
||||
:root,
|
||||
.theme-white {
|
||||
--header-text-primary: #ffffff;
|
||||
--header-text-secondary: #999999;
|
||||
--header-background-color: #1c1b1b;
|
||||
--header-gradient-start: rgba(28, 27, 27, 0);
|
||||
--header-gradient-end: rgba(28, 27, 27, 0.7);
|
||||
.Theme--white {
|
||||
--color-border: #383735;
|
||||
--color-alpha: #ff8c2f;
|
||||
--color-beta: #1c1b1b;
|
||||
--color-gamma: #ffffff;
|
||||
--color-gamma-opaque: rgba(255, 255, 255, 0.3);
|
||||
--color-header-gradient-start: color-mix(in srgb, var(--header-background-color) 0%, transparent);
|
||||
--color-header-gradient-end: color-mix(in srgb, var(--header-background-color) 100%, transparent);
|
||||
--shadow-beta: 0 25px 50px -12px color-mix(in srgb, var(--color-beta) 15%, transparent);
|
||||
--ring-offset-shadow: 0 0 var(--color-beta);
|
||||
--ring-shadow: 0 0 var(--color-beta);
|
||||
}
|
||||
|
||||
/* Black Theme */
|
||||
.theme-black,
|
||||
.theme-black.dark {
|
||||
--header-text-primary: #1c1b1b;
|
||||
--header-text-secondary: #999999;
|
||||
--header-background-color: #f2f2f2;
|
||||
--header-gradient-start: rgba(242, 242, 242, 0);
|
||||
--header-gradient-end: rgba(242, 242, 242, 0.7);
|
||||
.Theme--black,
|
||||
.Theme--black.dark {
|
||||
--color-border: #e0e0e0;
|
||||
--color-alpha: #ff8c2f;
|
||||
--color-beta: #f2f2f2;
|
||||
--color-gamma: #1c1b1b;
|
||||
--color-gamma-opaque: rgba(28, 27, 27, 0.3);
|
||||
--color-header-gradient-start: color-mix(in srgb, var(--header-background-color) 0%, transparent);
|
||||
--color-header-gradient-end: color-mix(in srgb, var(--header-background-color) 100%, transparent);
|
||||
--shadow-beta: 0 25px 50px -12px color-mix(in srgb, var(--color-beta) 15%, transparent);
|
||||
--ring-offset-shadow: 0 0 var(--color-beta);
|
||||
--ring-shadow: 0 0 var(--color-beta);
|
||||
}
|
||||
|
||||
/* Gray Theme */
|
||||
.theme-gray {
|
||||
--header-text-primary: #ffffff;
|
||||
--header-text-secondary: #999999;
|
||||
--header-background-color: #1c1b1b;
|
||||
--header-gradient-start: rgba(28, 27, 27, 0);
|
||||
--header-gradient-end: rgba(28, 27, 27, 0.7);
|
||||
.Theme--gray,
|
||||
.Theme--gray.dark {
|
||||
--color-border: #383735;
|
||||
--color-alpha: #ff8c2f;
|
||||
--color-beta: #383735;
|
||||
--color-gamma: #ffffff;
|
||||
--color-gamma-opaque: rgba(255, 255, 255, 0.3);
|
||||
--color-header-gradient-start: color-mix(in srgb, var(--header-background-color) 0%, transparent);
|
||||
--color-header-gradient-end: color-mix(in srgb, var(--header-background-color) 100%, transparent);
|
||||
--shadow-beta: 0 25px 50px -12px color-mix(in srgb, var(--color-beta) 15%, transparent);
|
||||
--ring-offset-shadow: 0 0 var(--color-beta);
|
||||
--ring-shadow: 0 0 var(--color-beta);
|
||||
}
|
||||
|
||||
/* Azure Theme */
|
||||
.theme-azure {
|
||||
--header-text-primary: #1c1b1b;
|
||||
--header-text-secondary: #999999;
|
||||
--header-background-color: #f2f2f2;
|
||||
--header-gradient-start: rgba(242, 242, 242, 0);
|
||||
--header-gradient-end: rgba(242, 242, 242, 0.7);
|
||||
.Theme--azure {
|
||||
--color-border: #5a8bb8;
|
||||
--color-alpha: #ff8c2f;
|
||||
--color-beta: #e7f2f8;
|
||||
--color-gamma: #336699;
|
||||
--color-gamma-opaque: rgba(51, 102, 153, 0.3);
|
||||
--color-header-gradient-start: color-mix(in srgb, var(--header-background-color) 0%, transparent);
|
||||
--color-header-gradient-end: color-mix(in srgb, var(--header-background-color) 100%, transparent);
|
||||
--shadow-beta: 0 25px 50px -12px color-mix(in srgb, var(--color-beta) 15%, transparent);
|
||||
--ring-offset-shadow: 0 0 var(--color-beta);
|
||||
--ring-shadow: 0 0 var(--color-beta);
|
||||
}
|
||||
|
||||
/* Dark Mode Overrides */
|
||||
.dark {
|
||||
--color-border: #383735;
|
||||
}
|
||||
|
||||
/*
|
||||
* Dynamic color variables for user overrides from GraphQL
|
||||
* These are set via JavaScript and override the theme defaults
|
||||
* Using :root with class for higher specificity to override theme classes
|
||||
*/
|
||||
:root.has-custom-header-text {
|
||||
--header-text-primary: var(--custom-header-text-primary);
|
||||
--color-header-text-primary: var(--custom-header-text-primary);
|
||||
}
|
||||
|
||||
:root.has-custom-header-meta {
|
||||
--header-text-secondary: var(--custom-header-text-secondary);
|
||||
--color-header-text-secondary: var(--custom-header-text-secondary);
|
||||
}
|
||||
|
||||
:root.has-custom-header-bg {
|
||||
--header-background-color: var(--custom-header-background-color);
|
||||
--color-header-background: var(--custom-header-background-color);
|
||||
--header-gradient-start: var(--custom-header-gradient-start);
|
||||
--header-gradient-end: var(--custom-header-gradient-end);
|
||||
--color-header-gradient-start: var(--custom-header-gradient-start);
|
||||
--color-header-gradient-end: var(--custom-header-gradient-end);
|
||||
}
|
||||
@@ -19,6 +19,7 @@ PATHS_LOGS_FILE=./dev/log/graphql-api.log
|
||||
PATHS_CONNECT_STATUS_FILE_PATH=./dev/connectStatus.json # Connect plugin status file
|
||||
PATHS_OIDC_JSON=./dev/configs/oidc.local.json
|
||||
PATHS_LOCAL_SESSION_FILE=./dev/local-session
|
||||
PATHS_DOCKER_TEMPLATES=./dev/docker-templates
|
||||
ENVIRONMENT="development"
|
||||
NODE_ENV="development"
|
||||
PORT="3001"
|
||||
@@ -32,3 +33,4 @@ CHOKIDAR_USEPOLLING=true
|
||||
LOG_TRANSPORT=console
|
||||
LOG_LEVEL=trace
|
||||
ENABLE_NEXT_DOCKER_RELEASE=true
|
||||
SKIP_CONNECT_PLUGIN_CHECK=true
|
||||
|
||||
@@ -3,3 +3,4 @@ NODE_ENV="production"
|
||||
PORT="/var/run/unraid-api.sock"
|
||||
MOTHERSHIP_GRAPHQL_LINK="https://mothership.unraid.net/ws"
|
||||
PATHS_CONFIG_MODULES="/boot/config/plugins/dynamix.my.servers/configs"
|
||||
ENABLE_NEXT_DOCKER_RELEASE=true
|
||||
|
||||
@@ -3,3 +3,4 @@ NODE_ENV="production"
|
||||
PORT="/var/run/unraid-api.sock"
|
||||
MOTHERSHIP_GRAPHQL_LINK="https://staging.mothership.unraid.net/ws"
|
||||
PATHS_CONFIG_MODULES="/boot/config/plugins/dynamix.my.servers/configs"
|
||||
ENABLE_NEXT_DOCKER_RELEASE=true
|
||||
|
||||
@@ -8,7 +8,7 @@ export default tseslint.config(
|
||||
eslint.configs.recommended,
|
||||
...tseslint.configs.recommended,
|
||||
{
|
||||
ignores: ['src/graphql/generated/client/**/*', 'src/**/**/dummy-process.js'],
|
||||
ignores: ['src/graphql/generated/client/**/*', 'src/**/**/dummy-process.js', 'dist/**/*'],
|
||||
},
|
||||
{
|
||||
plugins: {
|
||||
@@ -42,7 +42,10 @@ export default tseslint.config(
|
||||
'ignorePackages',
|
||||
{
|
||||
js: 'always',
|
||||
ts: 'always',
|
||||
mjs: 'always',
|
||||
cjs: 'always',
|
||||
ts: 'never',
|
||||
tsx: 'never',
|
||||
},
|
||||
],
|
||||
'no-restricted-globals': [
|
||||
|
||||
6
api/.gitignore
vendored
6
api/.gitignore
vendored
@@ -83,6 +83,8 @@ deploy/*
|
||||
|
||||
!**/*.login.*
|
||||
|
||||
# Local Development Artifacts
|
||||
|
||||
# local api configs - don't need project-wide tracking
|
||||
dev/connectStatus.json
|
||||
dev/configs/*
|
||||
@@ -96,3 +98,7 @@ dev/configs/oidc.local.json
|
||||
|
||||
# local api keys
|
||||
dev/keys/*
|
||||
# mock docker templates
|
||||
dev/docker-templates
|
||||
# ie unraid notifications
|
||||
dev/notifications
|
||||
@@ -5,3 +5,4 @@ src/unraid-api/unraid-file-modifier/modifications/__fixtures__/downloaded/*
|
||||
|
||||
# Generated Types
|
||||
src/graphql/generated/client/*.ts
|
||||
dist/
|
||||
|
||||
181
api/CHANGELOG.md
181
api/CHANGELOG.md
@@ -1,5 +1,186 @@
|
||||
# Changelog
|
||||
|
||||
## [4.29.2](https://github.com/unraid/api/compare/v4.29.1...v4.29.2) (2025-12-19)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* unraid-connect plugin not loaded when connect is installed ([#1856](https://github.com/unraid/api/issues/1856)) ([73135b8](https://github.com/unraid/api/commit/73135b832801f5c76d60020161492e4770958c3d))
|
||||
|
||||
## [4.29.1](https://github.com/unraid/api/compare/v4.29.0...v4.29.1) (2025-12-19)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* revert replace docker overview table with web component (7.3+) ([#1853](https://github.com/unraid/api/issues/1853)) ([560db88](https://github.com/unraid/api/commit/560db880cc138324f9ff8753f7209b683a84c045))
|
||||
|
||||
## [4.29.0](https://github.com/unraid/api/compare/v4.28.2...v4.29.0) (2025-12-19)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* replace docker overview table with web component (7.3+) ([#1764](https://github.com/unraid/api/issues/1764)) ([277ac42](https://github.com/unraid/api/commit/277ac420464379e7ee6739c4530271caf7717503))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* handle race condition between guid loading and license check ([#1847](https://github.com/unraid/api/issues/1847)) ([8b155d1](https://github.com/unraid/api/commit/8b155d1f1c99bb19efbc9614e000d852e9f0c12d))
|
||||
* resolve issue with "Continue" button when updating ([#1852](https://github.com/unraid/api/issues/1852)) ([d099e75](https://github.com/unraid/api/commit/d099e7521d2062bb9cf84f340e46b169dd2492c5))
|
||||
* update myservers config references to connect config references ([#1810](https://github.com/unraid/api/issues/1810)) ([e1e3ea7](https://github.com/unraid/api/commit/e1e3ea7eb68cc6840f67a8aec937fd3740e75b28))
|
||||
|
||||
## [4.28.2](https://github.com/unraid/api/compare/v4.28.1...v4.28.2) (2025-12-16)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **api:** timeout on startup on 7.0 and 6.12 ([#1844](https://github.com/unraid/api/issues/1844)) ([e243ae8](https://github.com/unraid/api/commit/e243ae836ec1a7fde37dceeb106cc693b20ec82b))
|
||||
|
||||
## [4.28.1](https://github.com/unraid/api/compare/v4.28.0...v4.28.1) (2025-12-16)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* empty commit to release as 4.28.1 ([df78608](https://github.com/unraid/api/commit/df786084572eefb82e086c15939b50cc08b9db10))
|
||||
|
||||
## [4.28.0](https://github.com/unraid/api/compare/v4.27.2...v4.28.0) (2025-12-15)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* when cancelling OS upgrade, delete any plugin files that were d… ([#1823](https://github.com/unraid/api/issues/1823)) ([74df938](https://github.com/unraid/api/commit/74df938e450def2ee3e2864d4b928f53a68e9eb8))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* change keyfile watcher to poll instead of inotify on FAT32 ([#1820](https://github.com/unraid/api/issues/1820)) ([23a7120](https://github.com/unraid/api/commit/23a71207ddde221867562b722f4e65a5fc4dd744))
|
||||
* enhance dark mode support in theme handling ([#1808](https://github.com/unraid/api/issues/1808)) ([d6e2939](https://github.com/unraid/api/commit/d6e29395c8a8b0215d4f5945775de7fa358d06ec))
|
||||
* improve API startup reliability with timeout budget tracking ([#1824](https://github.com/unraid/api/issues/1824)) ([51f025b](https://github.com/unraid/api/commit/51f025b105487b178048afaabf46b260c4a7f9c1))
|
||||
* PHP Warnings in Management Settings ([#1805](https://github.com/unraid/api/issues/1805)) ([832e9d0](https://github.com/unraid/api/commit/832e9d04f207d3ec612c98500a2ffc86659264e5))
|
||||
* **plg:** explicitly stop an existing api before installation ([#1841](https://github.com/unraid/api/issues/1841)) ([99ce88b](https://github.com/unraid/api/commit/99ce88bfdc0a7f020c42f2fe0c6a0f4e32ac8f5a))
|
||||
* update @unraid/shared-callbacks to version 3.0.0 ([#1831](https://github.com/unraid/api/issues/1831)) ([73b2ce3](https://github.com/unraid/api/commit/73b2ce360c66cd9bedc138a5f8306af04b6bde77))
|
||||
* **ups:** convert estimatedRuntime from minutes to seconds ([#1822](https://github.com/unraid/api/issues/1822)) ([024ae69](https://github.com/unraid/api/commit/024ae69343bad5a3cbc19f80e357082e9b2efc1e))
|
||||
|
||||
## [4.27.2](https://github.com/unraid/api/compare/v4.27.1...v4.27.2) (2025-11-21)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* issue with header flashing + issue with trial date ([64875ed](https://github.com/unraid/api/commit/64875edbba786a0d1ba0113c9e9a3d38594eafcc))
|
||||
|
||||
## [4.27.1](https://github.com/unraid/api/compare/v4.27.0...v4.27.1) (2025-11-21)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* missing translations for expiring trials ([#1800](https://github.com/unraid/api/issues/1800)) ([36c1049](https://github.com/unraid/api/commit/36c104915ece203a3cac9e1a13e0c325e536a839))
|
||||
* resolve header flash when background color is set ([#1796](https://github.com/unraid/api/issues/1796)) ([dc9a036](https://github.com/unraid/api/commit/dc9a036c73d8ba110029364e0d044dc24c7d0dfa))
|
||||
|
||||
## [4.27.0](https://github.com/unraid/api/compare/v4.26.2...v4.27.0) (2025-11-19)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* remove Unraid API log download functionality ([#1793](https://github.com/unraid/api/issues/1793)) ([e4a9b82](https://github.com/unraid/api/commit/e4a9b8291b049752a9ff59b17ff50cf464fe0535))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* auto-uninstallation of connect api plugin ([#1791](https://github.com/unraid/api/issues/1791)) ([e734043](https://github.com/unraid/api/commit/e7340431a58821ec1b4f5d1b452fba6613b01fa5))
|
||||
|
||||
## [4.26.2](https://github.com/unraid/api/compare/v4.26.1...v4.26.2) (2025-11-19)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **theme:** Missing header background color ([e2fdf6c](https://github.com/unraid/api/commit/e2fdf6cadbd816559b8c82546c2bc771a81ffa9e))
|
||||
|
||||
## [4.26.1](https://github.com/unraid/api/compare/v4.26.0...v4.26.1) (2025-11-18)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **theme:** update theme class naming and scoping logic ([b28ef1e](https://github.com/unraid/api/commit/b28ef1ea334cb4842f01fa992effa7024185c6c9))
|
||||
|
||||
## [4.26.0](https://github.com/unraid/api/compare/v4.25.3...v4.26.0) (2025-11-17)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add cpu power query & subscription ([#1745](https://github.com/unraid/api/issues/1745)) ([d7aca81](https://github.com/unraid/api/commit/d7aca81c60281bfa47fb9113929c1ead6ed3361b))
|
||||
* add schema publishing to apollo studio ([#1772](https://github.com/unraid/api/issues/1772)) ([7e13202](https://github.com/unraid/api/commit/7e13202aa1c02803095bb72bb1bcb2472716f53a))
|
||||
* add workflow_dispatch trigger to schema publishing workflow ([818e7ce](https://github.com/unraid/api/commit/818e7ce997059663e07efcf1dab706bf0d7fc9da))
|
||||
* apollo studio readme link ([c4cd0c6](https://github.com/unraid/api/commit/c4cd0c63520deec15d735255f38811f0360fe3a1))
|
||||
* **cli:** make `unraid-api plugins remove` scriptable ([#1774](https://github.com/unraid/api/issues/1774)) ([64eb9ce](https://github.com/unraid/api/commit/64eb9ce9b5d1ff4fb1f08d9963522c5d32221ba7))
|
||||
* use persisted theme css to fix flashes on header ([#1784](https://github.com/unraid/api/issues/1784)) ([854b403](https://github.com/unraid/api/commit/854b403fbd85220a3012af58ce033cf0b8418516))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **api:** decode html entities before parsing notifications ([#1768](https://github.com/unraid/api/issues/1768)) ([42406e7](https://github.com/unraid/api/commit/42406e795da1e5b95622951a467722dde72d51a8))
|
||||
* **connect:** disable api plugin if unraid plugin is absent ([#1773](https://github.com/unraid/api/issues/1773)) ([c264a18](https://github.com/unraid/api/commit/c264a1843cf115e8cc1add1ab4f12fdcc932405a))
|
||||
* detection of flash backup activation state ([#1769](https://github.com/unraid/api/issues/1769)) ([d18eaf2](https://github.com/unraid/api/commit/d18eaf2364e0c04992c52af38679ff0a0c570440))
|
||||
* re-add missing header gradient styles ([#1787](https://github.com/unraid/api/issues/1787)) ([f8a6785](https://github.com/unraid/api/commit/f8a6785e9c92f81acaef76ac5eb78a4a769e69da))
|
||||
* respect OS safe mode in plugin loader ([#1775](https://github.com/unraid/api/issues/1775)) ([92af3b6](https://github.com/unraid/api/commit/92af3b61156cabae70368cf5222a2f7ac5b4d083))
|
||||
|
||||
## [4.25.3](https://github.com/unraid/unraid-api/compare/v4.25.2...v4.25.3) (2025-10-22)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* flaky watch on boot drive's dynamix config ([ec7aa06](https://github.com/unraid/unraid-api/commit/ec7aa06d4a5fb1f0e84420266b0b0d7ee09a3663))
|
||||
|
||||
## [4.25.2](https://github.com/unraid/api/compare/v4.25.1...v4.25.2) (2025-09-30)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* enhance activation code modal visibility logic ([#1733](https://github.com/unraid/api/issues/1733)) ([e57ec00](https://github.com/unraid/api/commit/e57ec00627e54ce76d903fd0fa8686ad02b393f3))
|
||||
|
||||
## [4.25.1](https://github.com/unraid/api/compare/v4.25.0...v4.25.1) (2025-09-30)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* add cache busting to web component extractor ([#1731](https://github.com/unraid/api/issues/1731)) ([0d165a6](https://github.com/unraid/api/commit/0d165a608740505bdc505dcf69fb615225969741))
|
||||
* Connect won't appear within Apps - Previous Apps ([#1727](https://github.com/unraid/api/issues/1727)) ([d73953f](https://github.com/unraid/api/commit/d73953f8ff3d7425c0aed32d16236ededfd948e1))
|
||||
|
||||
## [4.25.0](https://github.com/unraid/api/compare/v4.24.1...v4.25.0) (2025-09-26)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add Tailwind scoping plugin and integrate into Vite config ([#1722](https://github.com/unraid/api/issues/1722)) ([b7afaf4](https://github.com/unraid/api/commit/b7afaf463243b073e1ab1083961a16a12ac6c4a3))
|
||||
* notification filter controls pill buttons ([#1718](https://github.com/unraid/api/issues/1718)) ([661865f](https://github.com/unraid/api/commit/661865f97611cf802f239fde8232f3109281dde6))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* enable auth guard for nested fields - thanks [@ingel81](https://github.com/ingel81) ([7bdeca8](https://github.com/unraid/api/commit/7bdeca8338a3901f15fde06fd7aede3b0c16e087))
|
||||
* enhance user context validation in auth module ([#1726](https://github.com/unraid/api/issues/1726)) ([cd5eff1](https://github.com/unraid/api/commit/cd5eff11bcb4398581472966cb7ec124eac7ad0a))
|
||||
|
||||
## [4.24.1](https://github.com/unraid/api/compare/v4.24.0...v4.24.1) (2025-09-23)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* cleanup leftover removed packages on upgrade ([#1719](https://github.com/unraid/api/issues/1719)) ([9972a5f](https://github.com/unraid/api/commit/9972a5f178f9a251e6c129d85c5f11cfd25e6281))
|
||||
* enhance version comparison logic in installation script ([d9c561b](https://github.com/unraid/api/commit/d9c561bfebed0c553fe4bfa26b088ae71ca59755))
|
||||
* issue with incorrect permissions on viewer / other roles ([378cdb7](https://github.com/unraid/api/commit/378cdb7f102f63128dd236c13f1a3745902d5a2c))
|
||||
|
||||
## [4.24.0](https://github.com/unraid/api/compare/v4.23.1...v4.24.0) (2025-09-18)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* improve dom content loading by being more efficient about component mounting ([#1716](https://github.com/unraid/api/issues/1716)) ([d8b166e](https://github.com/unraid/api/commit/d8b166e4b6a718e07783d9c8ac8393b50ec89ae3))
|
||||
|
||||
## [4.23.1](https://github.com/unraid/api/compare/v4.23.0...v4.23.1) (2025-09-17)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* cleanup ini parser logic with better fallbacks ([#1713](https://github.com/unraid/api/issues/1713)) ([1691362](https://github.com/unraid/api/commit/16913627de9497a5d2f71edb710cec6e2eb9f890))
|
||||
|
||||
## [4.23.0](https://github.com/unraid/api/compare/v4.22.2...v4.23.0) (2025-09-16)
|
||||
|
||||
|
||||
|
||||
@@ -71,6 +71,20 @@ unraid-api report -vv
|
||||
|
||||
If you found this file you're likely a developer. If you'd like to know more about the API and when it's available please join [our discord](https://discord.unraid.net/).
|
||||
|
||||
## Internationalization
|
||||
|
||||
- Run `pnpm --filter @unraid/api i18n:extract` to scan the Nest.js source for translation helper usages and update `src/i18n/en.json` with any new keys. The extractor keeps existing translations intact and appends new keys with their English source text.
|
||||
|
||||
## Developer Documentation
|
||||
|
||||
For detailed information about specific features:
|
||||
|
||||
- [API Plugins](docs/developer/api-plugins.md) - Working with API plugins and workspace packages
|
||||
- [Docker Feature](docs/developer/docker.md) - Container management, GraphQL API, and WebGUI integration
|
||||
- [Feature Flags](docs/developer/feature-flags.md) - Conditionally enabling functionality
|
||||
- [Repository Organization](docs/developer/repo-organization.md) - Codebase structure
|
||||
- [Development Workflows](docs/developer/workflows.md) - Development processes
|
||||
|
||||
## License
|
||||
|
||||
Copyright Lime Technology Inc. All rights reserved.
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
{
|
||||
"version": "4.22.2",
|
||||
"version": "4.29.2",
|
||||
"extraOrigins": [],
|
||||
"sandbox": true,
|
||||
"ssoSubIds": [],
|
||||
"plugins": [
|
||||
"unraid-api-plugin-connect"
|
||||
]
|
||||
}
|
||||
"plugins": ["unraid-api-plugin-connect"]
|
||||
}
|
||||
|
||||
@@ -0,0 +1,6 @@
|
||||
timestamp=1730937600
|
||||
event=Hashtag Test
|
||||
subject=Warning [UNRAID] - #1 OS is cooking
|
||||
description=Disk 1 temperature has reached #epic # levels of proportion
|
||||
importance=warning
|
||||
|
||||
@@ -0,0 +1,6 @@
|
||||
timestamp=1730937600
|
||||
event=Temperature Test
|
||||
subject=Warning [UNRAID] - High disk temperature detected: 45 °C
|
||||
description=Disk 1 temperature has reached 45 °C (threshold: 40 °C)<br><br>Current temperatures:<br>Parity - 32 °C [OK]<br>Disk 1 - 45 °C [WARNING]<br>Disk 2 - 38 °C [OK]<br>Cache - 28 °C [OK]<br><br>Please check cooling system.
|
||||
importance=warning
|
||||
|
||||
555
api/docs/developer/docker.md
Normal file
555
api/docs/developer/docker.md
Normal file
@@ -0,0 +1,555 @@
|
||||
# Docker Feature
|
||||
|
||||
The Docker feature provides complete container management for Unraid through a GraphQL API, including lifecycle operations, real-time monitoring, update detection, and organizational tools.
|
||||
|
||||
## Table of Contents
|
||||
|
||||
- [Overview](#overview)
|
||||
- [Architecture](#architecture)
|
||||
- [Module Structure](#module-structure)
|
||||
- [Data Flow](#data-flow)
|
||||
- [Core Services](#core-services)
|
||||
- [DockerService](#dockerservice)
|
||||
- [DockerNetworkService](#dockernetworkservice)
|
||||
- [DockerPortService](#dockerportservice)
|
||||
- [DockerLogService](#dockerlogservice)
|
||||
- [DockerStatsService](#dockerstatsservice)
|
||||
- [DockerAutostartService](#dockerautostartservice)
|
||||
- [DockerConfigService](#dockerconfigservice)
|
||||
- [DockerManifestService](#dockermanifestservice)
|
||||
- [DockerPhpService](#dockerphpservice)
|
||||
- [DockerTailscaleService](#dockertailscaleservice)
|
||||
- [DockerTemplateScannerService](#dockertemplatescannerservice)
|
||||
- [DockerOrganizerService](#dockerorganizerservice)
|
||||
- [GraphQL API](#graphql-api)
|
||||
- [Queries](#queries)
|
||||
- [Mutations](#mutations)
|
||||
- [Subscriptions](#subscriptions)
|
||||
- [Data Models](#data-models)
|
||||
- [DockerContainer](#dockercontainer)
|
||||
- [ContainerState](#containerstate)
|
||||
- [ContainerPort](#containerport)
|
||||
- [DockerPortConflicts](#dockerportconflicts)
|
||||
- [Caching Strategy](#caching-strategy)
|
||||
- [WebGUI Integration](#webgui-integration)
|
||||
- [File Modification](#file-modification)
|
||||
- [PHP Integration](#php-integration)
|
||||
- [Permissions](#permissions)
|
||||
- [Configuration Files](#configuration-files)
|
||||
- [Development](#development)
|
||||
- [Adding a New Docker Service](#adding-a-new-docker-service)
|
||||
- [Testing](#testing)
|
||||
- [Feature Flag Testing](#feature-flag-testing)
|
||||
|
||||
## Overview
|
||||
|
||||
**Location:** `src/unraid-api/graph/resolvers/docker/`
|
||||
|
||||
**Feature Flag:** Many next-generation features are gated behind `ENABLE_NEXT_DOCKER_RELEASE`. See [Feature Flags](./feature-flags.md) for details on enabling.
|
||||
|
||||
**Key Capabilities:**
|
||||
|
||||
- Container lifecycle management (start, stop, pause, update, remove)
|
||||
- Real-time container stats streaming
|
||||
- Network and port conflict detection
|
||||
- Container log retrieval
|
||||
- Automatic update detection via digest comparison
|
||||
- Tailscale container integration
|
||||
- Container organization with folders and views
|
||||
- Template-based metadata resolution
|
||||
|
||||
## Architecture
|
||||
|
||||
### Module Structure
|
||||
|
||||
The Docker module (`docker.module.ts`) serves as the entry point and exports:
|
||||
|
||||
- **13 services** for various Docker operations
|
||||
- **3 resolvers** for GraphQL query/mutation/subscription handling
|
||||
|
||||
**Dependencies:**
|
||||
|
||||
- `JobModule` - Background job scheduling
|
||||
- `NotificationsModule` - User notifications
|
||||
- `ServicesModule` - Shared service utilities
|
||||
|
||||
### Data Flow
|
||||
|
||||
```text
|
||||
Docker Daemon (Unix Socket)
|
||||
↓
|
||||
dockerode library
|
||||
↓
|
||||
DockerService (transform & cache)
|
||||
↓
|
||||
GraphQL Resolvers
|
||||
↓
|
||||
Client Applications
|
||||
```
|
||||
|
||||
The API communicates with the Docker daemon through the `dockerode` library via Unix socket. Container data is transformed from raw Docker API format to GraphQL types, enriched with Unraid-specific metadata (templates, autostart config), and cached for performance.
|
||||
|
||||
## Core Services
|
||||
|
||||
### DockerService
|
||||
|
||||
**File:** `docker.service.ts`
|
||||
|
||||
Central orchestrator for all container operations.
|
||||
|
||||
**Key Methods:**
|
||||
|
||||
- `getContainers(skipCache?, includeSize?)` - List containers with caching
|
||||
- `start(id)`, `stop(id)`, `pause(id)`, `unpause(id)` - Lifecycle operations
|
||||
- `updateContainer(id)`, `updateContainers(ids)`, `updateAllContainers()` - Image updates
|
||||
- `removeContainer(id, withImage?)` - Remove container and optionally its image
|
||||
|
||||
**Caching:**
|
||||
|
||||
- Cache TTL: 60 seconds (60000ms)
|
||||
- Cache keys: `docker_containers`, `docker_containers_with_size`
|
||||
- Invalidated automatically on mutations
|
||||
|
||||
### DockerNetworkService
|
||||
|
||||
**File:** `docker-network.service.ts`
|
||||
|
||||
Lists Docker networks with metadata including driver, scope, IPAM settings, and connected containers.
|
||||
|
||||
**Caching:** 60 seconds
|
||||
|
||||
### DockerPortService
|
||||
|
||||
**File:** `docker-port.service.ts`
|
||||
|
||||
Detects port conflicts between containers and with the host.
|
||||
|
||||
**Features:**
|
||||
|
||||
- Deduplicates port mappings from Docker API
|
||||
- Identifies container-to-container conflicts
|
||||
- Detects host-level port collisions
|
||||
- Separates TCP and UDP conflicts
|
||||
- Calculates LAN-accessible IP:port combinations
|
||||
|
||||
### DockerLogService
|
||||
|
||||
**File:** `docker-log.service.ts`
|
||||
|
||||
Retrieves container logs with configurable options.
|
||||
|
||||
**Parameters:**
|
||||
|
||||
- `tail` - Number of lines (default: 200, max: 2000)
|
||||
- `since` - Timestamp filter for log entries
|
||||
|
||||
**Additional Features:**
|
||||
|
||||
- Calculates container log file sizes
|
||||
- Supports timestamp-based filtering
|
||||
|
||||
### DockerStatsService
|
||||
|
||||
**File:** `docker-stats.service.ts`
|
||||
|
||||
Provides real-time container statistics via GraphQL subscription.
|
||||
|
||||
**Metrics:**
|
||||
|
||||
- CPU percentage
|
||||
- Memory usage and limit
|
||||
- Network I/O (received/transmitted bytes)
|
||||
- Block I/O (read/written bytes)
|
||||
|
||||
**Implementation:**
|
||||
|
||||
- Spawns `docker stats` process with streaming output
|
||||
- Publishes to `PUBSUB_CHANNEL.DOCKER_STATS`
|
||||
- Auto-starts on first subscriber, stops when last disconnects
|
||||
|
||||
### DockerAutostartService
|
||||
|
||||
**File:** `docker-autostart.service.ts`
|
||||
|
||||
Manages container auto-start configuration.
|
||||
|
||||
**Features:**
|
||||
|
||||
- Parses auto-start file format (name + wait time per line)
|
||||
- Maintains auto-start order and wait times
|
||||
- Persists configuration changes
|
||||
- Tracks container primary names
|
||||
|
||||
### DockerConfigService
|
||||
|
||||
**File:** `docker-config.service.ts`
|
||||
|
||||
Persistent configuration management using `ConfigFilePersister`.
|
||||
|
||||
**Configuration Options:**
|
||||
|
||||
- `templateMappings` - Container name to template file path mappings
|
||||
- `skipTemplatePaths` - Containers excluded from template scanning
|
||||
- `updateCheckCronSchedule` - Cron expression for digest refresh (default: daily at 6am)
|
||||
|
||||
### DockerManifestService
|
||||
|
||||
**File:** `docker-manifest.service.ts`
|
||||
|
||||
Detects available container image updates.
|
||||
|
||||
**Implementation:**
|
||||
|
||||
- Compares local and remote image SHA256 digests
|
||||
- Reads cached status from `/var/lib/docker/unraid-update-status.json`
|
||||
- Triggers refresh via PHP integration
|
||||
|
||||
### DockerPhpService
|
||||
|
||||
**File:** `docker-php.service.ts`
|
||||
|
||||
Integration with legacy Unraid PHP Docker scripts.
|
||||
|
||||
**PHP Scripts Used:**
|
||||
|
||||
- `DockerUpdate.php` - Refresh container digests
|
||||
- `DockerContainers.php` - Get update statuses
|
||||
|
||||
**Update Statuses:**
|
||||
|
||||
- `UP_TO_DATE` - Container is current
|
||||
- `UPDATE_AVAILABLE` - New image available
|
||||
- `REBUILD_READY` - Rebuild required
|
||||
- `UNKNOWN` - Status could not be determined
|
||||
|
||||
### DockerTailscaleService
|
||||
|
||||
**File:** `docker-tailscale.service.ts`
|
||||
|
||||
Detects and monitors Tailscale-enabled containers.
|
||||
|
||||
**Detection Methods:**
|
||||
|
||||
- Container labels indicating Tailscale
|
||||
- Tailscale socket mount points
|
||||
|
||||
**Status Information:**
|
||||
|
||||
- Tailscale version and backend state
|
||||
- Hostname and DNS name
|
||||
- Exit node status
|
||||
- Key expiry dates
|
||||
|
||||
**Caching:**
|
||||
|
||||
- Status cache: 30 seconds
|
||||
- DERP map and versions: 24 hours
|
||||
|
||||
### DockerTemplateScannerService
|
||||
|
||||
**File:** `docker-template-scanner.service.ts`
|
||||
|
||||
Maps containers to their template files for metadata resolution.
|
||||
|
||||
**Bootstrap Process:**
|
||||
|
||||
1. Runs 5 seconds after app startup
|
||||
2. Scans XML templates from configured paths
|
||||
3. Parses container/image names from XML
|
||||
4. Matches against running containers
|
||||
5. Stores mappings in `docker.config.json`
|
||||
|
||||
**Template Metadata Resolved:**
|
||||
|
||||
- `projectUrl`, `registryUrl`, `supportUrl`
|
||||
- `iconUrl`, `webUiUrl`, `shell`
|
||||
- Template port mappings
|
||||
|
||||
**Orphaned Containers:**
|
||||
|
||||
Containers without matching templates are marked as "orphaned" in the API response.
|
||||
|
||||
### DockerOrganizerService
|
||||
|
||||
**File:** `organizer/docker-organizer.service.ts`
|
||||
|
||||
Container organization system for UI views.
|
||||
|
||||
**Features:**
|
||||
|
||||
- Hierarchical folder structure
|
||||
- Multiple views with different layouts
|
||||
- Position-based organization
|
||||
- View-specific preferences (sorting, filtering)
|
||||
|
||||
## GraphQL API
|
||||
|
||||
### Queries
|
||||
|
||||
```graphql
|
||||
type Query {
|
||||
docker: Docker!
|
||||
}
|
||||
|
||||
type Docker {
|
||||
containers(skipCache: Boolean): [DockerContainer!]!
|
||||
container(id: PrefixedID!): DockerContainer # Feature-flagged
|
||||
networks(skipCache: Boolean): [DockerNetwork!]!
|
||||
portConflicts(skipCache: Boolean): DockerPortConflicts!
|
||||
logs(id: PrefixedID!, since: Int, tail: Int): DockerContainerLogs!
|
||||
organizer(skipCache: Boolean): DockerOrganizer! # Feature-flagged
|
||||
containerUpdateStatuses: [ContainerUpdateStatus!]! # Feature-flagged
|
||||
}
|
||||
```
|
||||
|
||||
### Mutations
|
||||
|
||||
**Container Lifecycle:**
|
||||
|
||||
```graphql
|
||||
type Mutation {
|
||||
start(id: PrefixedID!): DockerContainer!
|
||||
stop(id: PrefixedID!): DockerContainer!
|
||||
pause(id: PrefixedID!): DockerContainer!
|
||||
unpause(id: PrefixedID!): DockerContainer!
|
||||
removeContainer(id: PrefixedID!, withImage: Boolean): Boolean!
|
||||
}
|
||||
```
|
||||
|
||||
**Container Updates:**
|
||||
|
||||
```graphql
|
||||
type Mutation {
|
||||
updateContainer(id: PrefixedID!): DockerContainer!
|
||||
updateContainers(ids: [PrefixedID!]!): [DockerContainer!]!
|
||||
updateAllContainers: [DockerContainer!]!
|
||||
refreshDockerDigests: Boolean!
|
||||
}
|
||||
```
|
||||
|
||||
**Configuration:**
|
||||
|
||||
```graphql
|
||||
type Mutation {
|
||||
updateAutostartConfiguration(
|
||||
entries: [AutostartEntry!]!
|
||||
persistUserPreferences: Boolean
|
||||
): Boolean!
|
||||
syncDockerTemplatePaths: Boolean!
|
||||
resetDockerTemplateMappings: Boolean!
|
||||
}
|
||||
```
|
||||
|
||||
**Organizer (Feature-flagged):**
|
||||
|
||||
```graphql
|
||||
type Mutation {
|
||||
createDockerFolder(name: String!, parentId: ID, childrenIds: [ID!]): DockerFolder!
|
||||
createDockerFolderWithItems(
|
||||
name: String!
|
||||
parentId: ID
|
||||
sourceEntryIds: [ID!]
|
||||
position: Int
|
||||
): DockerFolder!
|
||||
setDockerFolderChildren(folderId: ID!, childrenIds: [ID!]!): DockerFolder!
|
||||
deleteDockerEntries(entryIds: [ID!]!): Boolean!
|
||||
moveDockerEntriesToFolder(sourceEntryIds: [ID!]!, destinationFolderId: ID!): Boolean!
|
||||
moveDockerItemsToPosition(
|
||||
sourceEntryIds: [ID!]!
|
||||
destinationFolderId: ID!
|
||||
position: Int!
|
||||
): Boolean!
|
||||
renameDockerFolder(folderId: ID!, newName: String!): DockerFolder!
|
||||
updateDockerViewPreferences(viewId: ID!, prefs: ViewPreferencesInput!): Boolean!
|
||||
}
|
||||
```
|
||||
|
||||
### Subscriptions
|
||||
|
||||
```graphql
|
||||
type Subscription {
|
||||
dockerContainerStats: DockerContainerStats!
|
||||
}
|
||||
```
|
||||
|
||||
Real-time container statistics stream. Automatically starts when first client subscribes and stops when last client disconnects.
|
||||
|
||||
## Data Models
|
||||
|
||||
### DockerContainer
|
||||
|
||||
Primary container representation with 24+ fields:
|
||||
|
||||
```typescript
|
||||
{
|
||||
id: PrefixedID
|
||||
names: [String!]!
|
||||
image: String!
|
||||
imageId: String!
|
||||
state: ContainerState!
|
||||
status: String!
|
||||
created: Float!
|
||||
|
||||
// Networking
|
||||
ports: [ContainerPort!]!
|
||||
lanIpPorts: [ContainerPort!]!
|
||||
hostConfig: ContainerHostConfig
|
||||
networkSettings: DockerNetworkSettings
|
||||
|
||||
// Storage
|
||||
sizeRootFs: Float
|
||||
sizeRw: Float
|
||||
sizeLog: Float
|
||||
mounts: [ContainerMount!]!
|
||||
|
||||
// Metadata
|
||||
labels: JSON
|
||||
|
||||
// Auto-start
|
||||
autoStart: Boolean!
|
||||
autoStartOrder: Int
|
||||
autoStartWait: Int
|
||||
|
||||
// Template Integration
|
||||
templatePath: String
|
||||
isOrphaned: Boolean!
|
||||
projectUrl: String
|
||||
registryUrl: String
|
||||
supportUrl: String
|
||||
iconUrl: String
|
||||
webUiUrl: String
|
||||
shell: String
|
||||
templatePorts: [ContainerPort!]
|
||||
|
||||
// Tailscale
|
||||
tailscaleEnabled: Boolean!
|
||||
tailscaleStatus: TailscaleStatus
|
||||
|
||||
// Updates
|
||||
isUpdateAvailable: Boolean
|
||||
isRebuildReady: Boolean
|
||||
}
|
||||
```
|
||||
|
||||
### ContainerState
|
||||
|
||||
```typescript
|
||||
enum ContainerState {
|
||||
RUNNING
|
||||
PAUSED
|
||||
EXITED
|
||||
}
|
||||
```
|
||||
|
||||
### ContainerPort
|
||||
|
||||
```typescript
|
||||
{
|
||||
ip: String
|
||||
privatePort: Int!
|
||||
publicPort: Int
|
||||
type: String! // "tcp" or "udp"
|
||||
}
|
||||
```
|
||||
|
||||
### DockerPortConflicts
|
||||
|
||||
```typescript
|
||||
{
|
||||
containerConflicts: [DockerContainerPortConflict!]!
|
||||
lanConflicts: [DockerLanPortConflict!]!
|
||||
}
|
||||
```
|
||||
|
||||
## Caching Strategy
|
||||
|
||||
The Docker feature uses `cache-manager` v7 for performance optimization.
|
||||
|
||||
**Important:** cache-manager v7 expects TTL values in **milliseconds**, not seconds.
|
||||
|
||||
| Cache Key | TTL | Invalidation |
|
||||
|-----------|-----|--------------|
|
||||
| `docker_containers` | 60s | On any container mutation |
|
||||
| `docker_containers_with_size` | 60s | On any container mutation |
|
||||
| `docker_networks` | 60s | On network changes |
|
||||
| Tailscale status | 30s | Automatic |
|
||||
| Tailscale DERP/versions | 24h | Automatic |
|
||||
|
||||
**Cache Invalidation Triggers:**
|
||||
|
||||
- `start()`, `stop()`, `pause()`, `unpause()`
|
||||
- `updateContainer()`, `updateContainers()`, `updateAllContainers()`
|
||||
- `removeContainer()`
|
||||
- `updateAutostartConfiguration()`
|
||||
|
||||
## WebGUI Integration
|
||||
|
||||
### File Modification
|
||||
|
||||
**File:** `unraid-file-modifier/modifications/docker-containers-page.modification.ts`
|
||||
|
||||
**Target:** `/usr/local/emhttp/plugins/dynamix.docker.manager/DockerContainers.page`
|
||||
|
||||
When `ENABLE_NEXT_DOCKER_RELEASE` is enabled and Unraid version is 7.3.0+, the modification:
|
||||
|
||||
1. Replaces the legacy Docker containers page
|
||||
2. Injects the Vue web component: `<unraid-docker-container-overview>`
|
||||
3. Retains the `Nchan="docker_load"` page attribute (an emhttp/WebGUI feature for real-time updates, not controlled by the API)
|
||||
|
||||
### PHP Integration
|
||||
|
||||
The API integrates with legacy Unraid PHP scripts for certain operations:
|
||||
|
||||
- **Digest refresh:** Calls `DockerUpdate.php` to refresh container image digests
|
||||
- **Update status:** Reads from `DockerContainers.php` output
|
||||
|
||||
## Permissions
|
||||
|
||||
All Docker operations are protected with permission checks:
|
||||
|
||||
| Operation | Resource | Action |
|
||||
|-----------|----------|--------|
|
||||
| Read containers/networks | `Resource.DOCKER` | `AuthAction.READ_ANY` |
|
||||
| Start/stop/pause/update | `Resource.DOCKER` | `AuthAction.UPDATE_ANY` |
|
||||
| Remove containers | `Resource.DOCKER` | `AuthAction.DELETE_ANY` |
|
||||
|
||||
## Configuration Files
|
||||
|
||||
| File | Purpose |
|
||||
|------|---------|
|
||||
| `docker.config.json` | Template mappings, skip paths, cron schedule |
|
||||
| `docker.organizer.json` | Container organization tree and views |
|
||||
| `/var/lib/docker/unraid-update-status.json` | Cached container update statuses |
|
||||
|
||||
## Development
|
||||
|
||||
### Adding a New Docker Service
|
||||
|
||||
1. Create service file in `src/unraid-api/graph/resolvers/docker/`
|
||||
2. Add to `docker.module.ts` providers and exports
|
||||
3. Inject into resolvers as needed
|
||||
4. Add GraphQL types to `docker.model.ts` if needed
|
||||
|
||||
### Testing
|
||||
|
||||
```bash
|
||||
# Run Docker-related tests
|
||||
pnpm --filter ./api test -- src/unraid-api/graph/resolvers/docker/
|
||||
|
||||
# Run specific test file
|
||||
pnpm --filter ./api test -- src/unraid-api/graph/resolvers/docker/docker.service.spec.ts
|
||||
```
|
||||
|
||||
### Feature Flag Testing
|
||||
|
||||
To test next-generation Docker features locally:
|
||||
|
||||
```bash
|
||||
ENABLE_NEXT_DOCKER_RELEASE=true unraid-api start
|
||||
```
|
||||
|
||||
Or add to `.env`:
|
||||
|
||||
```env
|
||||
ENABLE_NEXT_DOCKER_RELEASE=true
|
||||
```
|
||||
@@ -62,15 +62,18 @@ To build all packages in the monorepo:
|
||||
pnpm build
|
||||
```
|
||||
|
||||
### Watch Mode Building
|
||||
### Plugin Building (Docker Required)
|
||||
|
||||
For continuous building during development:
|
||||
The plugin build requires Docker. This command automatically builds all dependencies (API, web) before starting Docker:
|
||||
|
||||
```bash
|
||||
pnpm build:watch
|
||||
cd plugin
|
||||
pnpm run docker:build-and-run
|
||||
# Then inside the container:
|
||||
pnpm build
|
||||
```
|
||||
|
||||
This is useful when you want to see your changes reflected without manually rebuilding. This will also allow you to install a local plugin to test your changes.
|
||||
This serves the plugin at `http://YOUR_IP:5858/` for installation on your Unraid server.
|
||||
|
||||
### Package-Specific Building
|
||||
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
{
|
||||
"label": "Unraid API",
|
||||
"position": 4
|
||||
}
|
||||
@@ -1,100 +0,0 @@
|
||||
# API Key Authorization Flow
|
||||
|
||||
This document describes the self-service API key creation flow for third-party applications.
|
||||
|
||||
## Overview
|
||||
|
||||
Applications can request API access to an Unraid server by redirecting users to a special authorization page where users can review requested permissions and create an API key with one click.
|
||||
|
||||
## Flow
|
||||
|
||||
1. **Application initiates request**: The app redirects the user to:
|
||||
|
||||
```
|
||||
https://[unraid-server]/ApiKeyAuthorize?name=MyApp&scopes=docker:read,vm:*&redirect_uri=https://myapp.com/callback&state=abc123
|
||||
```
|
||||
|
||||
2. **User authentication**: If not already logged in, the user is redirected to login first (standard Unraid auth)
|
||||
|
||||
3. **Consent screen**: User sees:
|
||||
- Application name and description
|
||||
- Requested permissions (with checkboxes to approve/deny specific scopes)
|
||||
- API key name field (pre-filled)
|
||||
- Authorize & Cancel buttons
|
||||
|
||||
4. **API key creation**: Upon authorization:
|
||||
- API key is created with approved scopes
|
||||
- Key is displayed to the user
|
||||
- If `redirect_uri` is provided, user is redirected back with the key
|
||||
|
||||
5. **Callback**: App receives the API key:
|
||||
```
|
||||
https://myapp.com/callback?api_key=xxx&state=abc123
|
||||
```
|
||||
|
||||
## Query Parameters
|
||||
|
||||
- `name` (required): Name of the requesting application
|
||||
- `description` (optional): Description of the application
|
||||
- `scopes` (required): Comma-separated list of requested scopes
|
||||
- `redirect_uri` (optional): URL to redirect after authorization
|
||||
- `state` (optional): Opaque value for maintaining state
|
||||
|
||||
## Scope Format
|
||||
|
||||
Scopes follow the pattern: `resource:action`
|
||||
|
||||
### Examples:
|
||||
|
||||
- `docker:read` - Read access to Docker
|
||||
- `vm:*` - Full access to VMs
|
||||
- `system:update` - Update access to system
|
||||
- `role:viewer` - Viewer role access
|
||||
- `role:admin` - Admin role access
|
||||
|
||||
### Available Resources:
|
||||
|
||||
- `docker`, `vm`, `system`, `share`, `user`, `network`, `disk`, etc.
|
||||
|
||||
### Available Actions:
|
||||
|
||||
- `create`, `read`, `update`, `delete` or `*` for all
|
||||
|
||||
## Security Considerations
|
||||
|
||||
1. **HTTPS required**: Redirect URIs must use HTTPS (except localhost for development)
|
||||
2. **User consent**: Users explicitly approve each permission
|
||||
3. **Session-based**: Uses existing Unraid authentication session
|
||||
4. **One-time display**: API keys are shown once and must be saved securely
|
||||
|
||||
## Example Integration
|
||||
|
||||
```javascript
|
||||
// JavaScript example
|
||||
const unraidServer = 'tower.local';
|
||||
const appName = 'My Docker Manager';
|
||||
const scopes = 'docker:*,system:read';
|
||||
const redirectUri = 'https://myapp.com/unraid/callback';
|
||||
const state = generateRandomState();
|
||||
|
||||
// Store state for verification
|
||||
sessionStorage.setItem('oauth_state', state);
|
||||
|
||||
// Redirect user to authorization page
|
||||
window.location.href =
|
||||
`https://${unraidServer}/ApiKeyAuthorize?` +
|
||||
`name=${encodeURIComponent(appName)}&` +
|
||||
`scopes=${encodeURIComponent(scopes)}&` +
|
||||
`redirect_uri=${encodeURIComponent(redirectUri)}&` +
|
||||
`state=${encodeURIComponent(state)}`;
|
||||
|
||||
// Handle callback
|
||||
const urlParams = new URLSearchParams(window.location.search);
|
||||
const apiKey = urlParams.get('api_key');
|
||||
const returnedState = urlParams.get('state');
|
||||
|
||||
if (returnedState === sessionStorage.getItem('oauth_state')) {
|
||||
// Save API key securely
|
||||
saveApiKey(apiKey);
|
||||
}
|
||||
```
|
||||
@@ -1,210 +0,0 @@
|
||||
---
|
||||
title: CLI Reference
|
||||
description: Complete reference for all Unraid API CLI commands
|
||||
sidebar_position: 4
|
||||
---
|
||||
|
||||
# CLI Commands
|
||||
|
||||
:::info[Command Structure]
|
||||
All commands follow the pattern: `unraid-api <command> [options]`
|
||||
:::
|
||||
|
||||
## 🚀 Service Management
|
||||
|
||||
### Start
|
||||
|
||||
```bash
|
||||
unraid-api start [--log-level <level>]
|
||||
```
|
||||
|
||||
Starts the Unraid API service.
|
||||
|
||||
Options:
|
||||
|
||||
- `--log-level`: Set logging level (trace|debug|info|warn|error|fatal)
|
||||
|
||||
Alternative: You can also set the log level using the `LOG_LEVEL` environment variable:
|
||||
|
||||
```bash
|
||||
LOG_LEVEL=trace unraid-api start
|
||||
```
|
||||
|
||||
### Stop
|
||||
|
||||
```bash
|
||||
unraid-api stop [--delete]
|
||||
```
|
||||
|
||||
Stops the Unraid API service.
|
||||
|
||||
- `--delete`: Optional. Delete the PM2 home directory
|
||||
|
||||
### Restart
|
||||
|
||||
```bash
|
||||
unraid-api restart [--log-level <level>]
|
||||
```
|
||||
|
||||
Restarts the Unraid API service.
|
||||
|
||||
Options:
|
||||
|
||||
- `--log-level`: Set logging level (trace|debug|info|warn|error|fatal)
|
||||
|
||||
Alternative: You can also set the log level using the `LOG_LEVEL` environment variable:
|
||||
|
||||
```bash
|
||||
LOG_LEVEL=trace unraid-api restart
|
||||
```
|
||||
|
||||
### Logs
|
||||
|
||||
```bash
|
||||
unraid-api logs [-l <lines>]
|
||||
```
|
||||
|
||||
View the API logs.
|
||||
|
||||
- `-l, --lines`: Optional. Number of lines to tail (default: 100)
|
||||
|
||||
## ⚙️ Configuration Commands
|
||||
|
||||
### Config
|
||||
|
||||
```bash
|
||||
unraid-api config
|
||||
```
|
||||
|
||||
Displays current configuration values.
|
||||
|
||||
### Switch Environment
|
||||
|
||||
```bash
|
||||
unraid-api switch-env [-e <environment>]
|
||||
```
|
||||
|
||||
Switch between production and staging environments.
|
||||
|
||||
- `-e, --environment`: Optional. Target environment (production|staging)
|
||||
|
||||
### Developer Mode
|
||||
|
||||
:::tip Web GUI Management
|
||||
You can also manage developer options through the web interface at **Settings** → **Management Access** → **Developer Options**
|
||||
:::
|
||||
|
||||
```bash
|
||||
unraid-api developer # Interactive prompt for tools
|
||||
unraid-api developer --sandbox true # Enable GraphQL sandbox
|
||||
unraid-api developer --sandbox false # Disable GraphQL sandbox
|
||||
unraid-api developer --enable-modal # Enable modal testing tool
|
||||
unraid-api developer --disable-modal # Disable modal testing tool
|
||||
```
|
||||
|
||||
Configure developer features for the API:
|
||||
|
||||
- **GraphQL Sandbox**: Enable/disable Apollo GraphQL sandbox at `/graphql`
|
||||
- **Modal Testing Tool**: Enable/disable UI modal testing in the Unraid menu
|
||||
|
||||
## API Key Management
|
||||
|
||||
:::tip Web GUI Management
|
||||
You can also manage API keys through the web interface at **Settings** → **Management Access** → **API Keys**
|
||||
:::
|
||||
|
||||
### API Key Commands
|
||||
|
||||
```bash
|
||||
unraid-api apikey [options]
|
||||
```
|
||||
|
||||
Create and manage API keys via CLI.
|
||||
|
||||
Options:
|
||||
|
||||
- `--name <name>`: Name of the key
|
||||
- `--create`: Create a new key
|
||||
- `-r, --roles <roles>`: Comma-separated list of roles
|
||||
- `-p, --permissions <permissions>`: Comma-separated list of permissions
|
||||
- `-d, --description <description>`: Description for the key
|
||||
|
||||
## SSO (Single Sign-On) Management
|
||||
|
||||
:::info OIDC Configuration
|
||||
For OIDC/SSO provider configuration, see the web interface at **Settings** → **Management Access** → **API** → **OIDC** or refer to the [OIDC Provider Setup](./oidc-provider-setup.md) guide.
|
||||
:::
|
||||
|
||||
### SSO Base Command
|
||||
|
||||
```bash
|
||||
unraid-api sso
|
||||
```
|
||||
|
||||
#### Add SSO User
|
||||
|
||||
```bash
|
||||
unraid-api sso add-user
|
||||
# or
|
||||
unraid-api sso add
|
||||
# or
|
||||
unraid-api sso a
|
||||
```
|
||||
|
||||
Add a new user for SSO authentication.
|
||||
|
||||
#### Remove SSO User
|
||||
|
||||
```bash
|
||||
unraid-api sso remove-user
|
||||
# or
|
||||
unraid-api sso remove
|
||||
# or
|
||||
unraid-api sso r
|
||||
```
|
||||
|
||||
Remove a user (or all users) from SSO.
|
||||
|
||||
#### List SSO Users
|
||||
|
||||
```bash
|
||||
unraid-api sso list-users
|
||||
# or
|
||||
unraid-api sso list
|
||||
# or
|
||||
unraid-api sso l
|
||||
```
|
||||
|
||||
List all configured SSO users.
|
||||
|
||||
#### Validate SSO Token
|
||||
|
||||
```bash
|
||||
unraid-api sso validate-token <token>
|
||||
# or
|
||||
unraid-api sso validate
|
||||
# or
|
||||
unraid-api sso v
|
||||
```
|
||||
|
||||
Validates an SSO token and returns its status.
|
||||
|
||||
## Report Generation
|
||||
|
||||
### Generate Report
|
||||
|
||||
```bash
|
||||
unraid-api report [-r] [-j]
|
||||
```
|
||||
|
||||
Generate a system report.
|
||||
|
||||
- `-r, --raw`: Display raw command output
|
||||
- `-j, --json`: Display output in JSON format
|
||||
|
||||
## Notes
|
||||
|
||||
1. Most commands require appropriate permissions to modify system state
|
||||
2. Some commands require the API to be running or stopped
|
||||
3. Store API keys securely as they provide system access
|
||||
4. SSO configuration changes may require a service restart
|
||||
@@ -1,255 +0,0 @@
|
||||
---
|
||||
title: Using the Unraid API
|
||||
description: Learn how to interact with your Unraid server through the GraphQL API
|
||||
sidebar_position: 2
|
||||
---
|
||||
|
||||
# Using the Unraid API
|
||||
|
||||
:::tip[Quick Start]
|
||||
The Unraid API provides a powerful GraphQL interface for managing your server. This guide covers authentication, common queries, and best practices.
|
||||
:::
|
||||
|
||||
The Unraid API provides a GraphQL interface that allows you to interact with your Unraid server. This guide will help you get started with exploring and using the API.
|
||||
|
||||
## 🎮 Enabling the GraphQL Sandbox
|
||||
|
||||
### Web GUI Method (Recommended)
|
||||
|
||||
:::info[Preferred Method]
|
||||
Using the Web GUI is the easiest way to enable the GraphQL sandbox.
|
||||
:::
|
||||
|
||||
1. Navigate to **Settings** → **Management Access** → **Developer Options**
|
||||
2. Enable the **GraphQL Sandbox** toggle
|
||||
3. Access the GraphQL playground by navigating to:
|
||||
|
||||
```txt
|
||||
http://YOUR_SERVER_IP/graphql
|
||||
```
|
||||
|
||||
### CLI Method
|
||||
|
||||
Alternatively, you can enable developer mode using the CLI:
|
||||
|
||||
```bash
|
||||
unraid-api developer --sandbox true
|
||||
```
|
||||
|
||||
Or use the interactive mode:
|
||||
|
||||
```bash
|
||||
unraid-api developer
|
||||
```
|
||||
|
||||
## 🔑 Authentication
|
||||
|
||||
:::warning[Required for Most Operations]
|
||||
Most queries and mutations require authentication. Always include appropriate credentials in your requests.
|
||||
:::
|
||||
|
||||
You can authenticate using:
|
||||
|
||||
1. **API Keys** - For programmatic access
|
||||
2. **Cookies** - Automatic when signed into the WebGUI
|
||||
3. **SSO/OIDC** - When configured with external providers
|
||||
|
||||
### Managing API Keys
|
||||
|
||||
<tabs>
|
||||
<tabItem value="gui" label="Web GUI (Recommended)" default>
|
||||
|
||||
Navigate to **Settings** → **Management Access** → **API Keys** in your Unraid web interface to:
|
||||
|
||||
- View existing API keys
|
||||
- Create new API keys
|
||||
- Manage permissions and roles
|
||||
- Revoke or regenerate keys
|
||||
|
||||
</tabItem>
|
||||
<tabItem value="cli" label="CLI Method">
|
||||
|
||||
You can also use the CLI to create an API key:
|
||||
|
||||
```bash
|
||||
unraid-api apikey --create
|
||||
```
|
||||
|
||||
Follow the prompts to set:
|
||||
|
||||
- Name
|
||||
- Description
|
||||
- Roles
|
||||
- Permissions
|
||||
|
||||
</tabItem>
|
||||
</tabs>
|
||||
|
||||
### Using API Keys
|
||||
|
||||
The generated API key should be included in your GraphQL requests as a header:
|
||||
|
||||
```json
|
||||
{
|
||||
"x-api-key": "YOUR_API_KEY"
|
||||
}
|
||||
```
|
||||
|
||||
## 📊 Available Schemas
|
||||
|
||||
The API provides access to various aspects of your Unraid server:
|
||||
|
||||
### System Information
|
||||
|
||||
- Query system details including CPU, memory, and OS information
|
||||
- Monitor system status and health
|
||||
- Access baseboard and hardware information
|
||||
|
||||
### Array Management
|
||||
|
||||
- Query array status and configuration
|
||||
- Manage array operations (start/stop)
|
||||
- Monitor disk status and health
|
||||
- Perform parity checks
|
||||
|
||||
### Docker Management
|
||||
|
||||
- List and manage Docker containers
|
||||
- Monitor container status
|
||||
- Manage Docker networks
|
||||
|
||||
### Remote Access
|
||||
|
||||
- Configure and manage remote access settings
|
||||
- Handle SSO configuration
|
||||
- Manage allowed origins
|
||||
|
||||
### 💻 Example Queries
|
||||
|
||||
#### Check System Status
|
||||
|
||||
```graphql
|
||||
query {
|
||||
info {
|
||||
os {
|
||||
platform
|
||||
distro
|
||||
release
|
||||
uptime
|
||||
}
|
||||
cpu {
|
||||
manufacturer
|
||||
brand
|
||||
cores
|
||||
threads
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
#### Monitor Array Status
|
||||
|
||||
```graphql
|
||||
query {
|
||||
array {
|
||||
state
|
||||
capacity {
|
||||
disks {
|
||||
free
|
||||
used
|
||||
total
|
||||
}
|
||||
}
|
||||
disks {
|
||||
name
|
||||
size
|
||||
status
|
||||
temp
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
#### List Docker Containers
|
||||
|
||||
```graphql
|
||||
query {
|
||||
dockerContainers {
|
||||
id
|
||||
names
|
||||
state
|
||||
status
|
||||
autoStart
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## 🏗️ Schema Types
|
||||
|
||||
The API includes several core types:
|
||||
|
||||
### Base Types
|
||||
|
||||
- `Node`: Interface for objects with unique IDs - please see [Object Identification](https://graphql.org/learn/global-object-identification/)
|
||||
- `JSON`: For complex JSON data
|
||||
- `DateTime`: For timestamp values
|
||||
- `Long`: For 64-bit integers
|
||||
|
||||
### Resource Types
|
||||
|
||||
- `Array`: Array and disk management
|
||||
- `Docker`: Container and network management
|
||||
- `Info`: System information
|
||||
- `Config`: Server configuration
|
||||
- `Connect`: Remote access settings
|
||||
|
||||
### Role-Based Access
|
||||
|
||||
Available roles:
|
||||
|
||||
- `admin`: Full access
|
||||
- `connect`: Remote access features
|
||||
- `guest`: Limited read access
|
||||
|
||||
## ✨ Best Practices
|
||||
|
||||
:::tip[Pro Tips]
|
||||
1. Use the Apollo Sandbox to explore the schema and test queries
|
||||
2. Start with small queries and gradually add fields as needed
|
||||
3. Monitor your query complexity to maintain performance
|
||||
4. Use appropriate roles and permissions for your API keys
|
||||
5. Keep your API keys secure and rotate them periodically
|
||||
:::
|
||||
|
||||
## ⏱️ Rate Limiting
|
||||
|
||||
:::caution[Rate Limits]
|
||||
The API implements rate limiting to prevent abuse. Ensure your applications handle rate limit responses appropriately.
|
||||
:::
|
||||
|
||||
## 🚨 Error Handling
|
||||
|
||||
The API returns standard GraphQL errors in the following format:
|
||||
|
||||
```json
|
||||
{
|
||||
"errors": [
|
||||
{
|
||||
"message": "Error description",
|
||||
"locations": [...],
|
||||
"path": [...]
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
## 📚 Additional Resources
|
||||
|
||||
:::info[Learn More]
|
||||
- Use the Apollo Sandbox's schema explorer to browse all available types and fields
|
||||
- Check the documentation tab in Apollo Sandbox for detailed field descriptions
|
||||
- Monitor the API's health using `unraid-api status`
|
||||
- Generate reports using `unraid-api report` for troubleshooting
|
||||
|
||||
For more information about specific commands and configuration options, refer to the [CLI documentation](/cli) or run `unraid-api --help`.
|
||||
:::
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 101 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 96 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 85 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 128 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 75 KiB |
@@ -1,94 +0,0 @@
|
||||
---
|
||||
title: Welcome to Unraid API
|
||||
description: The official GraphQL API for Unraid Server management and automation
|
||||
sidebar_position: 1
|
||||
---
|
||||
|
||||
# Welcome to Unraid API
|
||||
|
||||
:::tip[What's New]
|
||||
Starting with Unraid OS v7.2, the API comes built into the operating system - no plugin installation required!
|
||||
:::
|
||||
|
||||
The Unraid API provides a GraphQL interface for programmatic interaction with your Unraid server. It enables automation, monitoring, and integration capabilities.
|
||||
|
||||
## 📦 Availability
|
||||
|
||||
### ✨ Native Integration (Unraid OS v7.2+)
|
||||
|
||||
Starting with Unraid OS v7.2, the API is integrated directly into the operating system:
|
||||
|
||||
- No plugin installation required
|
||||
- Automatically available on system startup
|
||||
- Deep system integration
|
||||
- Access through **Settings** → **Management Access** → **API**
|
||||
|
||||
### 🔌 Plugin Installation (Pre-7.2 and Advanced Users)
|
||||
|
||||
For Unraid versions prior to v7.2 or to access newer API features:
|
||||
|
||||
1. Install the Unraid Connect Plugin from Community Apps
|
||||
2. [Configure the plugin](./how-to-use-the-api.md#enabling-the-graphql-sandbox)
|
||||
3. Access API functionality through the [GraphQL Sandbox](./how-to-use-the-api.md)
|
||||
|
||||
:::info Important Notes
|
||||
- The Unraid Connect plugin provides the API for pre-7.2 versions
|
||||
- You do NOT need to sign in to Unraid Connect to use the API locally
|
||||
- Installing the plugin on 7.2+ gives you access to newer API features before they're included in OS releases
|
||||
:::
|
||||
|
||||
## 📚 Documentation Sections
|
||||
|
||||
<cards>
|
||||
<card title="CLI Commands" icon="terminal" href="./cli">
|
||||
Complete reference for all CLI commands
|
||||
</card>
|
||||
<card title="Using the API" icon="code" href="./how-to-use-the-api">
|
||||
Learn how to interact with the GraphQL API
|
||||
</card>
|
||||
<card title="OIDC Setup" icon="shield" href="./oidc-provider-setup">
|
||||
Configure SSO authentication providers
|
||||
</card>
|
||||
<card title="Upcoming Features" icon="rocket" href="./upcoming-features">
|
||||
See what's coming next
|
||||
</card>
|
||||
</cards>
|
||||
|
||||
|
||||
## 🌟 Key Features
|
||||
|
||||
:::info[Core Capabilities]
|
||||
The API provides:
|
||||
|
||||
- **GraphQL Interface**: Modern, flexible API with strong typing
|
||||
- **Authentication**: Multiple methods including API keys, session cookies, and SSO/OIDC
|
||||
- **Comprehensive Coverage**: Access to system information, array management, and Docker operations
|
||||
- **Developer Tools**: Built-in GraphQL sandbox configurable via web interface or CLI
|
||||
- **Role-Based Access**: Granular permission control
|
||||
- **Web Management**: Manage API keys and settings through the web interface
|
||||
:::
|
||||
|
||||
## 🚀 Get Started
|
||||
|
||||
<tabs>
|
||||
<tabItem value="v72" label="Unraid OS v7.2+" default>
|
||||
|
||||
1. The API is already installed and running
|
||||
2. Access settings at **Settings** → **Management Access** → **API**
|
||||
3. Enable the GraphQL Sandbox for development
|
||||
4. Create your first API key
|
||||
5. Start making GraphQL queries!
|
||||
|
||||
</tabItem>
|
||||
<tabItem value="older" label="Pre-7.2 Versions">
|
||||
|
||||
1. Install the Unraid Connect plugin from Community Apps
|
||||
2. No Unraid Connect login required for local API access
|
||||
3. Configure the plugin settings
|
||||
4. Enable the GraphQL Sandbox
|
||||
5. Start exploring the API!
|
||||
|
||||
</tabItem>
|
||||
</tabs>
|
||||
|
||||
For detailed usage instructions, see the [CLI Commands](./cli) reference.
|
||||
1
api/docs/public/moved-to-docs-repo.md
Normal file
1
api/docs/public/moved-to-docs-repo.md
Normal file
@@ -0,0 +1 @@
|
||||
# All Content Here has been permanently moved to [Unraid Docs](https://github.com/unraid/docs)
|
||||
@@ -1,420 +0,0 @@
|
||||
---
|
||||
title: OIDC Provider Setup
|
||||
description: Configure OIDC (OpenID Connect) providers for SSO authentication in Unraid API
|
||||
sidebar_position: 3
|
||||
---
|
||||
|
||||
# OIDC Provider Setup
|
||||
|
||||
:::info[What is OIDC?]
|
||||
OpenID Connect (OIDC) is an authentication protocol that allows users to sign in using their existing accounts from providers like Google, Microsoft, or your corporate identity provider. It enables Single Sign-On (SSO) for seamless and secure authentication.
|
||||
:::
|
||||
|
||||
This guide walks you through configuring OIDC (OpenID Connect) providers for SSO authentication in the Unraid API using the web interface.
|
||||
|
||||
## 🚀 Quick Start
|
||||
|
||||
<details open>
|
||||
<summary><strong>Getting to OIDC Settings</strong></summary>
|
||||
|
||||
1. Navigate to your Unraid server's web interface
|
||||
2. Go to **Settings** → **Management Access** → **API** → **OIDC**
|
||||
3. You'll see tabs for different providers - click the **+** button to add a new provider
|
||||
|
||||
</details>
|
||||
|
||||
### OIDC Providers Interface Overview
|
||||
|
||||

|
||||
*Login page showing traditional login form with SSO options - "Login With Unraid.net" and "Sign in with Google" buttons*
|
||||
|
||||
The interface includes:
|
||||
|
||||
- **Provider tabs**: Each configured provider (Unraid.net, Google, etc.) appears as a tab
|
||||
- **Add Provider button**: Click the **+** button to add new providers
|
||||
- **Authorization Mode dropdown**: Toggle between "simple" and "advanced" modes
|
||||
- **Simple Authorization section**: Configure allowed email domains and specific addresses
|
||||
- **Add Item buttons**: Click to add multiple authorization rules
|
||||
|
||||
## Understanding Authorization Modes
|
||||
|
||||
The interface provides two authorization modes:
|
||||
|
||||
### Simple Mode (Recommended)
|
||||
|
||||
Simple mode is the easiest way to configure authorization. You can:
|
||||
|
||||
- Allow specific email domains (e.g., @company.com)
|
||||
- Allow specific email addresses
|
||||
- Configure who can access your Unraid server with minimal setup
|
||||
|
||||
**When to use Simple Mode:**
|
||||
|
||||
- You want to allow all users from your company domain
|
||||
- You have a small list of specific users
|
||||
- You're new to OIDC configuration
|
||||
|
||||
<details>
|
||||
<summary><strong>Advanced Mode</strong></summary>
|
||||
|
||||
Advanced mode provides granular control using claim-based rules. You can:
|
||||
|
||||
- Create complex authorization rules based on JWT claims
|
||||
- Use operators like equals, contains, endsWith, startsWith
|
||||
- Combine multiple conditions with OR/AND logic
|
||||
- Choose whether ANY rule must pass (OR mode) or ALL rules must pass (AND mode)
|
||||
|
||||
**When to use Advanced Mode:**
|
||||
|
||||
- You need to check group memberships
|
||||
- You want to verify multiple claims (e.g., email domain AND verified status)
|
||||
- You have complex authorization requirements
|
||||
- You need fine-grained control over how rules are evaluated
|
||||
|
||||
</details>
|
||||
|
||||
## Authorization Rules
|
||||
|
||||

|
||||
*Advanced authorization rules showing JWT claim configuration with email endsWith operator for domain-based access control*
|
||||
|
||||
### Simple Mode Examples
|
||||
|
||||
#### Allow Company Domain
|
||||
|
||||
In Simple Authorization:
|
||||
|
||||
- **Allowed Email Domains**: Enter `company.com`
|
||||
- This allows anyone with @company.com email
|
||||
|
||||
#### Allow Specific Users
|
||||
|
||||
- **Specific Email Addresses**: Add individual emails
|
||||
- Click **Add Item** to add multiple addresses
|
||||
|
||||
<details>
|
||||
<summary><strong>Advanced Mode Examples</strong></summary>
|
||||
|
||||
#### Authorization Rule Mode
|
||||
|
||||
When using multiple rules, you can choose how they're evaluated:
|
||||
|
||||
- **OR Mode** (default): User is authorized if ANY rule passes
|
||||
- **AND Mode**: User is authorized only if ALL rules pass
|
||||
|
||||
#### Email Domain with Verification (AND Mode)
|
||||
|
||||
To require both email domain AND verification:
|
||||
|
||||
1. Set **Authorization Rule Mode** to `AND`
|
||||
2. Add two rules:
|
||||
- Rule 1:
|
||||
- **Claim**: `email`
|
||||
- **Operator**: `endsWith`
|
||||
- **Value**: `@company.com`
|
||||
- Rule 2:
|
||||
- **Claim**: `email_verified`
|
||||
- **Operator**: `equals`
|
||||
- **Value**: `true`
|
||||
|
||||
This ensures users must have both a company email AND a verified email address.
|
||||
|
||||
#### Group-Based Access (OR Mode)
|
||||
|
||||
To allow access to multiple groups:
|
||||
|
||||
1. Set **Authorization Rule Mode** to `OR` (default)
|
||||
2. Add rules for each group:
|
||||
- **Claim**: `groups`
|
||||
- **Operator**: `contains`
|
||||
- **Value**: `admins`
|
||||
|
||||
Or add another rule:
|
||||
- **Claim**: `groups`
|
||||
- **Operator**: `contains`
|
||||
- **Value**: `developers`
|
||||
|
||||
Users in either `admins` OR `developers` group will be authorized.
|
||||
|
||||
#### Multiple Domains
|
||||
|
||||
- **Claim**: `email`
|
||||
- **Operator**: `endsWith`
|
||||
- **Values**: Add multiple domains (e.g., `company.com`, `subsidiary.com`)
|
||||
|
||||
#### Complex Authorization (AND Mode)
|
||||
|
||||
For strict security requiring multiple conditions:
|
||||
|
||||
1. Set **Authorization Rule Mode** to `AND`
|
||||
2. Add multiple rules that ALL must pass:
|
||||
- Email must be from company domain
|
||||
- Email must be verified
|
||||
- User must be in specific group
|
||||
- Account must have 2FA enabled (if claim available)
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Configuration Interface Details</strong></summary>
|
||||
|
||||
### Provider Tabs
|
||||
|
||||
- Each configured provider appears as a tab at the top
|
||||
- Click a tab to switch between provider configurations
|
||||
- The **+** button on the right adds a new provider
|
||||
|
||||
### Authorization Mode Dropdown
|
||||
|
||||
- **simple**: Best for email-based authorization (recommended for most users)
|
||||
- **advanced**: For complex claim-based rules using JWT claims
|
||||
|
||||
### Simple Authorization Fields
|
||||
|
||||
When "simple" mode is selected, you'll see:
|
||||
|
||||
- **Allowed Email Domains**: Enter domains without @ (e.g., `company.com`)
|
||||
- Helper text: "Users with emails ending in these domains can login"
|
||||
- **Specific Email Addresses**: Add individual email addresses
|
||||
- Helper text: "Only these exact email addresses can login"
|
||||
- **Add Item** buttons to add multiple entries
|
||||
|
||||
### Advanced Authorization Fields
|
||||
|
||||
When "advanced" mode is selected, you'll see:
|
||||
|
||||
- **Authorization Rule Mode**: Choose `OR` (any rule passes) or `AND` (all rules must pass)
|
||||
- **Authorization Rules**: Add multiple claim-based rules
|
||||
- **For each rule**:
|
||||
- **Claim**: The JWT claim to check
|
||||
- **Operator**: How to compare (equals, contains, endsWith, startsWith)
|
||||
- **Value**: What to match against
|
||||
|
||||
### Additional Interface Elements
|
||||
|
||||
- **Enable Developer Sandbox**: Toggle to enable GraphQL sandbox at `/graphql`
|
||||
- The interface uses a dark theme for better visibility
|
||||
- Field validation indicators help ensure correct configuration
|
||||
|
||||
</details>
|
||||
|
||||
### Required Redirect URI
|
||||
|
||||
:::caution[Important Configuration]
|
||||
All providers must be configured with this exact redirect URI format:
|
||||
:::
|
||||
|
||||
```bash
|
||||
http://YOUR_UNRAID_IP/graphql/api/auth/oidc/callback
|
||||
```
|
||||
|
||||
:::tip
|
||||
Replace `YOUR_UNRAID_IP` with your actual server IP address (e.g., `192.168.1.100` or `tower.local`).
|
||||
:::
|
||||
|
||||
### Issuer URL Format
|
||||
|
||||
The **Issuer URL** field accepts both formats, but **base URL is strongly recommended** for security:
|
||||
|
||||
- **Base URL** (recommended): `https://accounts.google.com`
|
||||
- **Full discovery URL**: `https://accounts.google.com/.well-known/openid-configuration`
|
||||
|
||||
**⚠️ Security Note**: Always use the base URL format when possible. The system automatically appends `/.well-known/openid-configuration` for OIDC discovery. Using the full discovery URL directly disables important issuer validation checks and is not recommended by the OpenID Connect specification.
|
||||
|
||||
**Examples of correct base URLs:**
|
||||
- Google: `https://accounts.google.com`
|
||||
- Microsoft/Azure: `https://login.microsoftonline.com/YOUR_TENANT_ID/v2.0`
|
||||
- Keycloak: `https://keycloak.example.com/realms/YOUR_REALM`
|
||||
- Authelia: `https://auth.yourdomain.com`
|
||||
|
||||
## ✅ Testing Your Configuration
|
||||
|
||||

|
||||
*Unraid login page displaying both traditional username/password authentication and SSO options with customized provider buttons*
|
||||
|
||||
1. Save your provider configuration
|
||||
2. Log out (if logged in)
|
||||
3. Navigate to the login page
|
||||
4. Your configured provider button should appear
|
||||
5. Click to test the login flow
|
||||
|
||||
## 🔧 Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
#### "Provider not found" error
|
||||
|
||||
- Ensure the Issuer URL is correct
|
||||
- Check that the provider supports OIDC discovery (/.well-known/openid-configuration)
|
||||
|
||||
#### "Authorization failed"
|
||||
|
||||
- In Simple Mode: Check email domains are entered correctly (without @)
|
||||
- In Advanced Mode:
|
||||
- Verify claim names match exactly what your provider sends
|
||||
- Check if Authorization Rule Mode is set correctly (OR vs AND)
|
||||
- Ensure all required claims are present in the token
|
||||
- Enable debug logging to see actual claims and rule evaluation
|
||||
|
||||
#### "Invalid redirect URI"
|
||||
|
||||
- Ensure the redirect URI in your provider matches exactly
|
||||
- Include the correct port if using a non-standard configuration
|
||||
- Verify the redirect URI protocol matches your server's configuration (HTTP or HTTPS)
|
||||
|
||||
#### Cannot see login button
|
||||
|
||||
- Check that at least one authorization rule is configured
|
||||
- Verify the provider is enabled/saved
|
||||
|
||||
### Debug Mode
|
||||
|
||||
To troubleshoot issues:
|
||||
|
||||
1. Enable debug logging:
|
||||
|
||||
```bash
|
||||
LOG_LEVEL=debug unraid-api start --debug
|
||||
```
|
||||
|
||||
2. Check logs for:
|
||||
|
||||
- Received claims from provider
|
||||
- Authorization rule evaluation
|
||||
- Token validation errors
|
||||
|
||||
## 🔐 Security Best Practices
|
||||
|
||||
1. **Use Simple Mode for authorization** - Prevents overly accepting configurations and reduces misconfiguration risks
|
||||
2. **Be specific with authorization** - Don't use overly broad rules
|
||||
3. **Rotate secrets regularly** - Update client secrets periodically
|
||||
4. **Test thoroughly** - Verify only intended users can access
|
||||
|
||||
## 💡 Need Help?
|
||||
|
||||
- Check provider's OIDC documentation
|
||||
- Review Unraid API logs for detailed error messages
|
||||
- Ensure your provider supports standard OIDC discovery
|
||||
- Verify network connectivity between Unraid and provider
|
||||
|
||||
## 🏢 Provider-Specific Setup
|
||||
|
||||
### Unraid.net Provider
|
||||
|
||||
The Unraid.net provider is built-in and pre-configured. You only need to configure authorization rules in the interface.
|
||||
|
||||
**Configuration:**
|
||||
|
||||
- **Issuer URL**: Pre-configured (built-in provider)
|
||||
- **Client ID/Secret**: Pre-configured (built-in provider)
|
||||
- **Redirect URI**: `http://YOUR_UNRAID_IP/graphql/api/auth/oidc/callback`
|
||||
|
||||
:::tip[Redirect URI Protocol]
|
||||
**Match the protocol to your server setup:** Use `http://` if accessing your Unraid server without SSL/TLS (typical for local network access). Use `https://` if you've configured SSL/TLS on your server. Some OIDC providers (like Google) require HTTPS and won't accept HTTP redirect URIs.
|
||||
:::
|
||||
|
||||
Configure authorization rules using Simple Mode (allowed email domains/addresses) or Advanced Mode for complex requirements.
|
||||
|
||||
### Google
|
||||
|
||||
<details>
|
||||
<summary><strong>📋 Setup Steps</strong></summary>
|
||||
|
||||
Set up OAuth 2.0 credentials in [Google Cloud Console](https://console.cloud.google.com/):
|
||||
|
||||
1. Go to **APIs & Services** → **Credentials**
|
||||
2. Click **Create Credentials** → **OAuth client ID**
|
||||
3. Choose **Web application** as the application type
|
||||
4. Add your redirect URI to **Authorized redirect URIs**
|
||||
5. Configure the OAuth consent screen if prompted
|
||||
|
||||
</details>
|
||||
|
||||
**Configuration:**
|
||||
|
||||
- **Issuer URL**: `https://accounts.google.com`
|
||||
- **Client ID/Secret**: From your OAuth 2.0 client credentials
|
||||
- **Required Scopes**: `openid`, `profile`, `email`
|
||||
- **Redirect URI**: `http://YOUR_UNRAID_IP/graphql/api/auth/oidc/callback`
|
||||
|
||||
:::warning[Google Domain Requirements]
|
||||
**Google requires valid domain names for OAuth redirect URIs.** Local IP addresses and `.local` domains are not accepted. To use Google OAuth with your Unraid server, you'll need:
|
||||
|
||||
- **Option 1: Reverse Proxy** - Set up a reverse proxy (like NGINX Proxy Manager or Traefik) with a valid domain name pointing to your Unraid API
|
||||
- **Option 2: Tailscale** - Use Tailscale to get a valid `*.ts.net` domain that Google will accept
|
||||
- **Option 3: Dynamic DNS** - Use a DDNS service to get a public domain name for your server
|
||||
|
||||
Remember to update your redirect URI in both Google Cloud Console and your Unraid OIDC configuration to use the valid domain.
|
||||
:::
|
||||
|
||||
For Google Workspace domains, use Advanced Mode with the `hd` claim to restrict access to your organization's domain.
|
||||
|
||||
### Authelia
|
||||
|
||||
Configure OIDC client in your Authelia `configuration.yml` with client ID `unraid-api` and generate a hashed secret using the Authelia hash-password command.
|
||||
|
||||
**Configuration:**
|
||||
|
||||
- **Issuer URL**: `https://auth.yourdomain.com`
|
||||
- **Client ID**: `unraid-api` (or as configured in Authelia)
|
||||
- **Client Secret**: Your unhashed secret
|
||||
- **Required Scopes**: `openid`, `profile`, `email`, `groups`
|
||||
- **Redirect URI**: `http://YOUR_UNRAID_IP/graphql/api/auth/oidc/callback`
|
||||
|
||||
Use Advanced Mode with `groups` claim for group-based authorization.
|
||||
|
||||
### Microsoft/Azure AD
|
||||
|
||||
Register a new app in [Azure Portal](https://portal.azure.com/) under Azure Active Directory → App registrations. Note the Application ID, create a client secret, and note your tenant ID.
|
||||
|
||||
**Configuration:**
|
||||
|
||||
- **Issuer URL**: `https://login.microsoftonline.com/YOUR_TENANT_ID/v2.0`
|
||||
- **Client ID**: Your Application (client) ID
|
||||
- **Client Secret**: Generated client secret
|
||||
- **Required Scopes**: `openid`, `profile`, `email`
|
||||
- **Redirect URI**: `http://YOUR_UNRAID_IP/graphql/api/auth/oidc/callback`
|
||||
|
||||
Authorization rules can be configured in the interface using email domains or advanced claims.
|
||||
|
||||
### Keycloak
|
||||
|
||||
Create a new confidential client in Keycloak Admin Console with `openid-connect` protocol and copy the client secret from the Credentials tab.
|
||||
|
||||
**Configuration:**
|
||||
|
||||
- **Issuer URL**: `https://keycloak.example.com/realms/YOUR_REALM`
|
||||
- **Client ID**: `unraid-api` (or as configured in Keycloak)
|
||||
- **Client Secret**: From Keycloak Credentials tab
|
||||
- **Required Scopes**: `openid`, `profile`, `email`
|
||||
- **Redirect URI**: `http://YOUR_UNRAID_IP/graphql/api/auth/oidc/callback`
|
||||
|
||||
For role-based authorization, use Advanced Mode with `realm_access.roles` or `resource_access` claims.
|
||||
|
||||
### Authentik
|
||||
|
||||
Create a new OAuth2/OpenID Provider in Authentik, then create an Application and link it to the provider.
|
||||
|
||||
**Configuration:**
|
||||
|
||||
- **Issuer URL**: `https://authentik.example.com/application/o/<application_slug>/`
|
||||
- **Client ID**: From Authentik provider configuration
|
||||
- **Client Secret**: From Authentik provider configuration
|
||||
- **Required Scopes**: `openid`, `profile`, `email`
|
||||
- **Redirect URI**: `http://YOUR_UNRAID_IP/graphql/api/auth/oidc/callback`
|
||||
|
||||
Authorization rules can be configured in the interface.
|
||||
|
||||
### Okta
|
||||
|
||||
Create a new OIDC Web Application in Okta Admin Console and assign appropriate users or groups.
|
||||
|
||||
**Configuration:**
|
||||
|
||||
- **Issuer URL**: `https://YOUR_DOMAIN.okta.com`
|
||||
- **Client ID**: From Okta application configuration
|
||||
- **Client Secret**: From Okta application configuration
|
||||
- **Required Scopes**: `openid`, `profile`, `email`
|
||||
- **Redirect URI**: `http://YOUR_UNRAID_IP/graphql/api/auth/oidc/callback`
|
||||
|
||||
Authorization rules can be configured in the interface using email domains or advanced claims.
|
||||
@@ -1,252 +0,0 @@
|
||||
---
|
||||
title: Programmatic API Key Management
|
||||
description: Create, use, and delete API keys programmatically for automated workflows
|
||||
sidebar_position: 4
|
||||
---
|
||||
|
||||
# Programmatic API Key Management
|
||||
|
||||
This guide explains how to create, use, and delete API keys programmatically using the Unraid API CLI, enabling automated workflows and scripts.
|
||||
|
||||
## Overview
|
||||
|
||||
The `unraid-api apikey` command supports both interactive and non-interactive modes, making it suitable for:
|
||||
|
||||
- Automated deployment scripts
|
||||
- CI/CD pipelines
|
||||
- Temporary access provisioning
|
||||
- Infrastructure as code workflows
|
||||
|
||||
:::tip[Quick Start]
|
||||
Jump to the [Complete Workflow Example](#complete-workflow-example) to see everything in action.
|
||||
:::
|
||||
|
||||
## Creating API Keys Programmatically
|
||||
|
||||
### Basic Creation with JSON Output
|
||||
|
||||
Use the `--json` flag to get machine-readable output:
|
||||
|
||||
```bash
|
||||
unraid-api apikey --create --name "workflow key" --roles ADMIN --json
|
||||
```
|
||||
|
||||
**Output:**
|
||||
|
||||
```json
|
||||
{
|
||||
"key": "your-generated-api-key-here",
|
||||
"name": "workflow key",
|
||||
"id": "generated-uuid"
|
||||
}
|
||||
```
|
||||
|
||||
### Advanced Creation with Permissions
|
||||
|
||||
```bash
|
||||
unraid-api apikey --create \
|
||||
--name "limited access key" \
|
||||
--permissions "DOCKER:READ_ANY,ARRAY:READ_ANY" \
|
||||
--description "Read-only access for monitoring" \
|
||||
--json
|
||||
```
|
||||
|
||||
### Handling Existing Keys
|
||||
|
||||
If a key with the same name exists, use `--overwrite`:
|
||||
|
||||
```bash
|
||||
unraid-api apikey --create --name "existing key" --roles ADMIN --overwrite --json
|
||||
```
|
||||
|
||||
:::warning[Key Replacement]
|
||||
The `--overwrite` flag will permanently replace the existing key. The old key will be immediately invalidated.
|
||||
:::
|
||||
|
||||
## Deleting API Keys Programmatically
|
||||
|
||||
### Non-Interactive Deletion
|
||||
|
||||
Delete a key by name without prompts:
|
||||
|
||||
```bash
|
||||
unraid-api apikey --delete --name "workflow key"
|
||||
```
|
||||
|
||||
**Output:**
|
||||
|
||||
```
|
||||
Successfully deleted 1 API key
|
||||
```
|
||||
|
||||
### JSON Output for Deletion
|
||||
|
||||
Use `--json` flag for machine-readable delete confirmation:
|
||||
|
||||
```bash
|
||||
unraid-api apikey --delete --name "workflow key" --json
|
||||
```
|
||||
|
||||
**Success Output:**
|
||||
|
||||
```json
|
||||
{
|
||||
"deleted": 1,
|
||||
"keys": [
|
||||
{
|
||||
"id": "generated-uuid",
|
||||
"name": "workflow key"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
**Error Output:**
|
||||
|
||||
```json
|
||||
{
|
||||
"deleted": 0,
|
||||
"error": "No API key found with name: nonexistent key"
|
||||
}
|
||||
```
|
||||
|
||||
### Error Handling
|
||||
|
||||
When the specified key doesn't exist:
|
||||
|
||||
```bash
|
||||
unraid-api apikey --delete --name "nonexistent key"
|
||||
# Output: No API keys found to delete
|
||||
```
|
||||
|
||||
**JSON Error Output:**
|
||||
|
||||
```json
|
||||
{
|
||||
"deleted": 0,
|
||||
"message": "No API keys found to delete"
|
||||
}
|
||||
```
|
||||
|
||||
## Complete Workflow Example
|
||||
|
||||
Here's a complete example for temporary access provisioning:
|
||||
|
||||
```bash
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# 1. Create temporary API key
|
||||
echo "Creating temporary API key..."
|
||||
KEY_DATA=$(unraid-api apikey --create \
|
||||
--name "temp deployment key" \
|
||||
--roles ADMIN \
|
||||
--description "Temporary key for deployment $(date)" \
|
||||
--json)
|
||||
|
||||
# 2. Extract the API key
|
||||
API_KEY=$(echo "$KEY_DATA" | jq -r '.key')
|
||||
echo "API key created successfully"
|
||||
|
||||
# 3. Use the key for operations
|
||||
echo "Configuring services..."
|
||||
curl -H "Authorization: Bearer $API_KEY" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{"provider": "azure", "clientId": "your-client-id"}' \
|
||||
http://localhost:3001/graphql
|
||||
|
||||
# 4. Clean up (always runs, even on error)
|
||||
trap 'echo "Cleaning up..."; unraid-api apikey --delete --name "temp deployment key"' EXIT
|
||||
|
||||
echo "Deployment completed successfully"
|
||||
```
|
||||
|
||||
## Command Reference
|
||||
|
||||
### Create Command Options
|
||||
|
||||
| Flag | Description | Example |
|
||||
| ----------------------- | ----------------------- | --------------------------------- |
|
||||
| `--name <name>` | Key name (required) | `--name "my key"` |
|
||||
| `--roles <roles>` | Comma-separated roles | `--roles ADMIN,VIEWER` |
|
||||
| `--permissions <perms>` | Resource:action pairs | `--permissions "DOCKER:READ_ANY"` |
|
||||
| `--description <desc>` | Key description | `--description "CI/CD key"` |
|
||||
| `--overwrite` | Replace existing key | `--overwrite` |
|
||||
| `--json` | Machine-readable output | `--json` |
|
||||
|
||||
### Available Roles
|
||||
|
||||
- `ADMIN` - Full system access
|
||||
- `CONNECT` - Unraid Connect features
|
||||
- `VIEWER` - Read-only access
|
||||
- `GUEST` - Limited access
|
||||
|
||||
### Available Resources and Actions
|
||||
|
||||
**Resources:** `ACTIVATION_CODE`, `API_KEY`, `ARRAY`, `CLOUD`, `CONFIG`, `CONNECT`, `CONNECT__REMOTE_ACCESS`, `CUSTOMIZATIONS`, `DASHBOARD`, `DISK`, `DISPLAY`, `DOCKER`, `FLASH`, `INFO`, `LOGS`, `ME`, `NETWORK`, `NOTIFICATIONS`, `ONLINE`, `OS`, `OWNER`, `PERMISSION`, `REGISTRATION`, `SERVERS`, `SERVICES`, `SHARE`, `VARS`, `VMS`, `WELCOME`
|
||||
|
||||
**Actions:** `CREATE_ANY`, `CREATE_OWN`, `READ_ANY`, `READ_OWN`, `UPDATE_ANY`, `UPDATE_OWN`, `DELETE_ANY`, `DELETE_OWN`
|
||||
|
||||
### Delete Command Options
|
||||
|
||||
| Flag | Description | Example |
|
||||
| --------------- | ------------------------ | ----------------- |
|
||||
| `--delete` | Enable delete mode | `--delete` |
|
||||
| `--name <name>` | Key to delete (optional) | `--name "my key"` |
|
||||
|
||||
**Note:** If `--name` is omitted, the command runs interactively.
|
||||
|
||||
## Best Practices
|
||||
|
||||
:::info[Security Best Practices]
|
||||
**Minimal Permissions**
|
||||
|
||||
- Use specific permissions instead of ADMIN role when possible
|
||||
- Example: `--permissions "DOCKER:READ_ANY"` instead of `--roles ADMIN`
|
||||
|
||||
**Key Lifecycle Management**
|
||||
|
||||
- Always clean up temporary keys after use
|
||||
- Store API keys securely (environment variables, secrets management)
|
||||
- Use descriptive names and descriptions for audit trails
|
||||
:::
|
||||
|
||||
### Error Handling
|
||||
|
||||
- Check exit codes (`$?`) after each command
|
||||
- Use `set -e` in bash scripts to fail fast
|
||||
- Implement proper cleanup with `trap`
|
||||
|
||||
### Key Naming
|
||||
|
||||
- Use descriptive names that include purpose and date
|
||||
- Names must contain only letters, numbers, and spaces
|
||||
- Unicode letters are supported
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
:::note[Common Error Messages]
|
||||
|
||||
**"API key name must contain only letters, numbers, and spaces"**
|
||||
|
||||
- **Solution:** Remove special characters like hyphens, underscores, or symbols
|
||||
|
||||
**"API key with name 'x' already exists"**
|
||||
|
||||
- **Solution:** Use `--overwrite` flag or choose a different name
|
||||
|
||||
**"Please add at least one role or permission to the key"**
|
||||
|
||||
- **Solution:** Specify either `--roles` or `--permissions` (or both)
|
||||
|
||||
:::
|
||||
|
||||
### Debug Mode
|
||||
|
||||
For troubleshooting, run with debug logging:
|
||||
|
||||
```bash
|
||||
LOG_LEVEL=debug unraid-api apikey --create --name "debug key" --roles ADMIN
|
||||
```
|
||||
@@ -1,172 +0,0 @@
|
||||
---
|
||||
title: Roadmap & Features
|
||||
description: Current status and upcoming features for the Unraid API
|
||||
sidebar_position: 10
|
||||
---
|
||||
|
||||
# Roadmap & Features
|
||||
|
||||
:::info Development Status
|
||||
This roadmap outlines completed and planned features for the Unraid API. Features and timelines may change based on development priorities and community feedback.
|
||||
:::
|
||||
|
||||
## Feature Status Legend
|
||||
|
||||
| Status | Description |
|
||||
|--------|-------------|
|
||||
| ✅ **Done** | Feature is complete and available |
|
||||
| 🚧 **In Progress** | Currently under active development |
|
||||
| 📅 **Planned** | Scheduled for future development |
|
||||
| 💡 **Under Consideration** | Being evaluated for future inclusion |
|
||||
|
||||
## Core Infrastructure
|
||||
|
||||
### Completed Features ✅
|
||||
|
||||
| Feature | Available Since |
|
||||
|---------|-----------------|
|
||||
| **API Development Environment Improvements** | v4.0.0 |
|
||||
| **Include API in Unraid OS** | Unraid v7.2-beta.1 |
|
||||
| **Separate API from Connect Plugin** | Unraid v7.2-beta.1 |
|
||||
|
||||
### Upcoming Features 📅
|
||||
|
||||
| Feature | Target Timeline |
|
||||
|---------|-----------------|
|
||||
| **Make API Open Source** | Q1 2025 |
|
||||
| **Developer Tools for Plugins** | Q2 2025 |
|
||||
|
||||
## Security & Authentication
|
||||
|
||||
### Completed Features ✅
|
||||
|
||||
| Feature | Available Since |
|
||||
|---------|-----------------|
|
||||
| **Permissions System Rewrite** | v4.0.0 |
|
||||
| **OIDC/SSO Support** | Unraid v7.2-beta.1 |
|
||||
|
||||
### In Development 🚧
|
||||
|
||||
- **User Interface Component Library** - Enhanced security components for the UI
|
||||
|
||||
## User Interface Improvements
|
||||
|
||||
### Planned Features 📅
|
||||
|
||||
| Feature | Target Timeline | Description |
|
||||
|---------|-----------------|-------------|
|
||||
| **New Settings Pages** | Q2 2025 | Modernized settings interface with improved UX |
|
||||
| **Custom Theme Creator** | Q2-Q3 2025 | Allow users to create and share custom themes |
|
||||
| **New Connect Settings Interface** | Q1 2025 | Redesigned Unraid Connect configuration |
|
||||
|
||||
## Array Management
|
||||
|
||||
### Completed Features ✅
|
||||
|
||||
| Feature | Available Since |
|
||||
|---------|-----------------|
|
||||
| **Array Status Monitoring** | v4.0.0 |
|
||||
|
||||
### Planned Features 📅
|
||||
|
||||
| Feature | Target Timeline | Description |
|
||||
|---------|-----------------|-------------|
|
||||
| **Storage Pool Creation Interface** | Q2 2025 | Simplified pool creation workflow |
|
||||
| **Storage Pool Status Interface** | Q2 2025 | Real-time pool health monitoring |
|
||||
|
||||
## Docker Integration
|
||||
|
||||
### Completed Features ✅
|
||||
|
||||
| Feature | Available Since |
|
||||
|---------|-----------------|
|
||||
| **Docker Container Status Monitoring** | v4.0.0 |
|
||||
|
||||
### Planned Features 📅
|
||||
|
||||
| Feature | Target Timeline | Description |
|
||||
|---------|-----------------|-------------|
|
||||
| **New Docker Status Interface Design** | Q3 2025 | Modern container management UI |
|
||||
| **New Docker Status Interface** | Q3 2025 | Implementation of new design |
|
||||
| **Docker Container Setup Interface** | Q3 2025 | Streamlined container deployment |
|
||||
| **Docker Compose Support** | TBD | Native docker-compose.yml support |
|
||||
|
||||
## Share Management
|
||||
|
||||
### Completed Features ✅
|
||||
|
||||
| Feature | Available Since |
|
||||
|---------|-----------------|
|
||||
| **Array/Cache Share Status Monitoring** | v4.0.0 |
|
||||
|
||||
### Under Consideration 💡
|
||||
|
||||
- **Storage Share Creation & Settings** - Enhanced share configuration options
|
||||
- **Storage Share Management Interface** - Unified share management dashboard
|
||||
|
||||
## Plugin System
|
||||
|
||||
### Planned Features 📅
|
||||
|
||||
| Feature | Target Timeline | Description |
|
||||
|---------|-----------------|-------------|
|
||||
| **New Plugins Interface** | Q3 2025 | Redesigned plugin management UI |
|
||||
| **Plugin Management Interface** | TBD | Advanced plugin configuration |
|
||||
| **Plugin Development Tools** | TBD | SDK and tooling for developers |
|
||||
|
||||
## Notifications
|
||||
|
||||
### Completed Features ✅
|
||||
|
||||
| Feature | Available Since |
|
||||
|---------|-----------------|
|
||||
| **Notifications System** | v4.0.0 |
|
||||
| **Notifications Interface** | v4.0.0 |
|
||||
|
||||
---
|
||||
|
||||
## Recent Releases
|
||||
|
||||
:::info Full Release History
|
||||
For a complete list of all releases, changelogs, and download links, visit the [Unraid API GitHub Releases](https://github.com/unraid/api/releases) page.
|
||||
:::
|
||||
|
||||
### Unraid v7.2-beta.1 Highlights
|
||||
|
||||
- 🎉 **API included in Unraid OS** - Native integration
|
||||
- 🔐 **OIDC/SSO Support** - Enterprise authentication
|
||||
- 📦 **Standalone API** - Separated from Connect plugin
|
||||
|
||||
### v4.0.0 Highlights
|
||||
|
||||
- 🛡️ **Permissions System Rewrite** - Enhanced security
|
||||
- 📊 **Comprehensive Monitoring** - Array, Docker, and Share status
|
||||
- 🔔 **Notifications System** - Real-time alerts and notifications
|
||||
- 🛠️ **Developer Environment** - Improved development tools
|
||||
|
||||
## Community Feedback
|
||||
|
||||
:::tip Have a Feature Request?
|
||||
We value community input! Please submit feature requests and feedback through:
|
||||
|
||||
- [Unraid Forums](https://forums.unraid.net)
|
||||
- [GitHub Issues](https://github.com/unraid/api/issues) - API is open source!
|
||||
|
||||
:::
|
||||
|
||||
## Version Support
|
||||
|
||||
| Unraid Version | API Version | Support Status |
|
||||
|----------------|-------------|----------------|
|
||||
| Unraid v7.2-beta.1+ | Latest | ✅ Active |
|
||||
| 7.0 - 7.1.x | v4.x via Plugin | ⚠️ Limited |
|
||||
| 6.12.x | v4.x via Plugin | ⚠️ Limited |
|
||||
| < 6.12 | Not Supported | ❌ EOL |
|
||||
|
||||
:::warning Legacy Support
|
||||
Versions prior to Unraid 7.2 require the API to be installed through the Unraid Connect plugin. Some features may not be available on older versions.
|
||||
:::
|
||||
|
||||
:::tip Pre-release Versions
|
||||
You can always install the Unraid Connect plugin to access pre-release versions of the API and get early access to new features before they're included in Unraid OS releases.
|
||||
:::
|
||||
@@ -7,7 +7,7 @@
|
||||
"cwd": "/usr/local/unraid-api",
|
||||
"exec_mode": "fork",
|
||||
"wait_ready": true,
|
||||
"listen_timeout": 15000,
|
||||
"listen_timeout": 30000,
|
||||
"max_restarts": 10,
|
||||
"min_uptime": 10000,
|
||||
"watch": false,
|
||||
|
||||
@@ -862,6 +862,38 @@ type DockerMutations {
|
||||
|
||||
"""Stop a container"""
|
||||
stop(id: PrefixedID!): DockerContainer!
|
||||
|
||||
"""Pause (Suspend) a container"""
|
||||
pause(id: PrefixedID!): DockerContainer!
|
||||
|
||||
"""Unpause (Resume) a container"""
|
||||
unpause(id: PrefixedID!): DockerContainer!
|
||||
|
||||
"""Remove a container"""
|
||||
removeContainer(id: PrefixedID!, withImage: Boolean): Boolean!
|
||||
|
||||
"""Update auto-start configuration for Docker containers"""
|
||||
updateAutostartConfiguration(entries: [DockerAutostartEntryInput!]!, persistUserPreferences: Boolean): Boolean!
|
||||
|
||||
"""Update a container to the latest image"""
|
||||
updateContainer(id: PrefixedID!): DockerContainer!
|
||||
|
||||
"""Update multiple containers to the latest images"""
|
||||
updateContainers(ids: [PrefixedID!]!): [DockerContainer!]!
|
||||
|
||||
"""Update all containers that have available updates"""
|
||||
updateAllContainers: [DockerContainer!]!
|
||||
}
|
||||
|
||||
input DockerAutostartEntryInput {
|
||||
"""Docker container identifier"""
|
||||
id: PrefixedID!
|
||||
|
||||
"""Whether the container should auto-start"""
|
||||
autoStart: Boolean!
|
||||
|
||||
"""Number of seconds to wait after starting the container"""
|
||||
wait: Int
|
||||
}
|
||||
|
||||
type VmMutations {
|
||||
@@ -944,6 +976,23 @@ input UpdateApiKeyInput {
|
||||
permissions: [AddPermissionInput!]
|
||||
}
|
||||
|
||||
"""Customization related mutations"""
|
||||
type CustomizationMutations {
|
||||
"""Update the UI theme (writes dynamix.cfg)"""
|
||||
setTheme(
|
||||
"""Theme to apply"""
|
||||
theme: ThemeName!
|
||||
): Theme!
|
||||
}
|
||||
|
||||
"""The theme name"""
|
||||
enum ThemeName {
|
||||
azure
|
||||
black
|
||||
gray
|
||||
white
|
||||
}
|
||||
|
||||
"""
|
||||
Parity check related mutations, WIP, response types and functionaliy will change
|
||||
"""
|
||||
@@ -1042,14 +1091,6 @@ type Theme {
|
||||
headerSecondaryTextColor: String
|
||||
}
|
||||
|
||||
"""The theme name"""
|
||||
enum ThemeName {
|
||||
azure
|
||||
black
|
||||
gray
|
||||
white
|
||||
}
|
||||
|
||||
type ExplicitStatusItem {
|
||||
name: String!
|
||||
updateStatus: UpdateStatus!
|
||||
@@ -1080,6 +1121,29 @@ enum ContainerPortType {
|
||||
UDP
|
||||
}
|
||||
|
||||
type DockerPortConflictContainer {
|
||||
id: PrefixedID!
|
||||
name: String!
|
||||
}
|
||||
|
||||
type DockerContainerPortConflict {
|
||||
privatePort: Port!
|
||||
type: ContainerPortType!
|
||||
containers: [DockerPortConflictContainer!]!
|
||||
}
|
||||
|
||||
type DockerLanPortConflict {
|
||||
lanIpPort: String!
|
||||
publicPort: Port
|
||||
type: ContainerPortType!
|
||||
containers: [DockerPortConflictContainer!]!
|
||||
}
|
||||
|
||||
type DockerPortConflicts {
|
||||
containerPorts: [DockerContainerPortConflict!]!
|
||||
lanPorts: [DockerLanPortConflict!]!
|
||||
}
|
||||
|
||||
type ContainerHostConfig {
|
||||
networkMode: String!
|
||||
}
|
||||
@@ -1093,8 +1157,17 @@ type DockerContainer implements Node {
|
||||
created: Int!
|
||||
ports: [ContainerPort!]!
|
||||
|
||||
"""List of LAN-accessible host:port values"""
|
||||
lanIpPorts: [String!]
|
||||
|
||||
"""Total size of all files in the container (in bytes)"""
|
||||
sizeRootFs: BigInt
|
||||
|
||||
"""Size of writable layer (in bytes)"""
|
||||
sizeRw: BigInt
|
||||
|
||||
"""Size of container logs (in bytes)"""
|
||||
sizeLog: BigInt
|
||||
labels: JSON
|
||||
state: ContainerState!
|
||||
status: String!
|
||||
@@ -1102,12 +1175,50 @@ type DockerContainer implements Node {
|
||||
networkSettings: JSON
|
||||
mounts: [JSON!]
|
||||
autoStart: Boolean!
|
||||
|
||||
"""Zero-based order in the auto-start list"""
|
||||
autoStartOrder: Int
|
||||
|
||||
"""Wait time in seconds applied after start"""
|
||||
autoStartWait: Int
|
||||
templatePath: String
|
||||
|
||||
"""Project/Product homepage URL"""
|
||||
projectUrl: String
|
||||
|
||||
"""Registry/Docker Hub URL"""
|
||||
registryUrl: String
|
||||
|
||||
"""Support page/thread URL"""
|
||||
supportUrl: String
|
||||
|
||||
"""Icon URL"""
|
||||
iconUrl: String
|
||||
|
||||
"""Resolved WebUI URL from template"""
|
||||
webUiUrl: String
|
||||
|
||||
"""Shell to use for console access (from template)"""
|
||||
shell: String
|
||||
|
||||
"""Port mappings from template (used when container is not running)"""
|
||||
templatePorts: [ContainerPort!]
|
||||
|
||||
"""Whether the container is orphaned (no template found)"""
|
||||
isOrphaned: Boolean!
|
||||
isUpdateAvailable: Boolean
|
||||
isRebuildReady: Boolean
|
||||
|
||||
"""Whether Tailscale is enabled for this container"""
|
||||
tailscaleEnabled: Boolean!
|
||||
|
||||
"""Tailscale status for this container (fetched via docker exec)"""
|
||||
tailscaleStatus(forceRefresh: Boolean = false): TailscaleStatus
|
||||
}
|
||||
|
||||
enum ContainerState {
|
||||
RUNNING
|
||||
PAUSED
|
||||
EXITED
|
||||
}
|
||||
|
||||
@@ -1129,49 +1240,221 @@ type DockerNetwork implements Node {
|
||||
labels: JSON!
|
||||
}
|
||||
|
||||
type DockerContainerLogLine {
|
||||
timestamp: DateTime!
|
||||
message: String!
|
||||
}
|
||||
|
||||
type DockerContainerLogs {
|
||||
containerId: PrefixedID!
|
||||
lines: [DockerContainerLogLine!]!
|
||||
|
||||
"""
|
||||
Cursor that can be passed back through the since argument to continue streaming logs.
|
||||
"""
|
||||
cursor: DateTime
|
||||
}
|
||||
|
||||
type DockerContainerStats {
|
||||
id: PrefixedID!
|
||||
|
||||
"""CPU Usage Percentage"""
|
||||
cpuPercent: Float!
|
||||
|
||||
"""Memory Usage String (e.g. 100MB / 1GB)"""
|
||||
memUsage: String!
|
||||
|
||||
"""Memory Usage Percentage"""
|
||||
memPercent: Float!
|
||||
|
||||
"""Network I/O String (e.g. 100MB / 1GB)"""
|
||||
netIO: String!
|
||||
|
||||
"""Block I/O String (e.g. 100MB / 1GB)"""
|
||||
blockIO: String!
|
||||
}
|
||||
|
||||
"""Tailscale exit node connection status"""
|
||||
type TailscaleExitNodeStatus {
|
||||
"""Whether the exit node is online"""
|
||||
online: Boolean!
|
||||
|
||||
"""Tailscale IPs of the exit node"""
|
||||
tailscaleIps: [String!]
|
||||
}
|
||||
|
||||
"""Tailscale status for a Docker container"""
|
||||
type TailscaleStatus {
|
||||
"""Whether Tailscale is online in the container"""
|
||||
online: Boolean!
|
||||
|
||||
"""Current Tailscale version"""
|
||||
version: String
|
||||
|
||||
"""Latest available Tailscale version"""
|
||||
latestVersion: String
|
||||
|
||||
"""Whether a Tailscale update is available"""
|
||||
updateAvailable: Boolean!
|
||||
|
||||
"""Configured Tailscale hostname"""
|
||||
hostname: String
|
||||
|
||||
"""Actual Tailscale DNS name"""
|
||||
dnsName: String
|
||||
|
||||
"""DERP relay code"""
|
||||
relay: String
|
||||
|
||||
"""DERP relay region name"""
|
||||
relayName: String
|
||||
|
||||
"""Tailscale IPv4 and IPv6 addresses"""
|
||||
tailscaleIps: [String!]
|
||||
|
||||
"""Advertised subnet routes"""
|
||||
primaryRoutes: [String!]
|
||||
|
||||
"""Whether this container is an exit node"""
|
||||
isExitNode: Boolean!
|
||||
|
||||
"""Status of the connected exit node (if using one)"""
|
||||
exitNodeStatus: TailscaleExitNodeStatus
|
||||
|
||||
"""Tailscale Serve/Funnel WebUI URL"""
|
||||
webUiUrl: String
|
||||
|
||||
"""Tailscale key expiry date"""
|
||||
keyExpiry: DateTime
|
||||
|
||||
"""Days until key expires"""
|
||||
keyExpiryDays: Int
|
||||
|
||||
"""Whether the Tailscale key has expired"""
|
||||
keyExpired: Boolean!
|
||||
|
||||
"""Tailscale backend state (Running, NeedsLogin, Stopped, etc.)"""
|
||||
backendState: String
|
||||
|
||||
"""Authentication URL if Tailscale needs login"""
|
||||
authUrl: String
|
||||
}
|
||||
|
||||
type Docker implements Node {
|
||||
id: PrefixedID!
|
||||
containers(skipCache: Boolean! = false): [DockerContainer!]!
|
||||
networks(skipCache: Boolean! = false): [DockerNetwork!]!
|
||||
organizer: ResolvedOrganizerV1!
|
||||
portConflicts(skipCache: Boolean! = false): DockerPortConflicts!
|
||||
|
||||
"""
|
||||
Access container logs. Requires specifying a target container id through resolver arguments.
|
||||
"""
|
||||
logs(id: PrefixedID!, since: DateTime, tail: Int): DockerContainerLogs!
|
||||
container(id: PrefixedID!): DockerContainer
|
||||
organizer(skipCache: Boolean! = false): ResolvedOrganizerV1!
|
||||
containerUpdateStatuses: [ExplicitStatusItem!]!
|
||||
}
|
||||
|
||||
type DockerTemplateSyncResult {
|
||||
scanned: Int!
|
||||
matched: Int!
|
||||
skipped: Int!
|
||||
errors: [String!]!
|
||||
}
|
||||
|
||||
type ResolvedOrganizerView {
|
||||
id: String!
|
||||
name: String!
|
||||
root: ResolvedOrganizerEntry!
|
||||
rootId: String!
|
||||
flatEntries: [FlatOrganizerEntry!]!
|
||||
prefs: JSON
|
||||
}
|
||||
|
||||
union ResolvedOrganizerEntry = ResolvedOrganizerFolder | OrganizerContainerResource | OrganizerResource
|
||||
|
||||
type ResolvedOrganizerFolder {
|
||||
id: String!
|
||||
type: String!
|
||||
name: String!
|
||||
children: [ResolvedOrganizerEntry!]!
|
||||
}
|
||||
|
||||
type OrganizerContainerResource {
|
||||
id: String!
|
||||
type: String!
|
||||
name: String!
|
||||
meta: DockerContainer
|
||||
}
|
||||
|
||||
type OrganizerResource {
|
||||
id: String!
|
||||
type: String!
|
||||
name: String!
|
||||
meta: JSON
|
||||
}
|
||||
|
||||
type ResolvedOrganizerV1 {
|
||||
version: Float!
|
||||
views: [ResolvedOrganizerView!]!
|
||||
}
|
||||
|
||||
type FlatOrganizerEntry {
|
||||
id: String!
|
||||
type: String!
|
||||
name: String!
|
||||
parentId: String
|
||||
depth: Float!
|
||||
position: Float!
|
||||
path: [String!]!
|
||||
hasChildren: Boolean!
|
||||
childrenIds: [String!]!
|
||||
meta: DockerContainer
|
||||
}
|
||||
|
||||
type NotificationCounts {
|
||||
info: Int!
|
||||
warning: Int!
|
||||
alert: Int!
|
||||
total: Int!
|
||||
}
|
||||
|
||||
type NotificationSettings {
|
||||
position: String!
|
||||
expand: Boolean!
|
||||
duration: Int!
|
||||
max: Int!
|
||||
}
|
||||
|
||||
type NotificationOverview {
|
||||
unread: NotificationCounts!
|
||||
archive: NotificationCounts!
|
||||
}
|
||||
|
||||
type Notification implements Node {
|
||||
id: PrefixedID!
|
||||
|
||||
"""Also known as 'event'"""
|
||||
title: String!
|
||||
subject: String!
|
||||
description: String!
|
||||
importance: NotificationImportance!
|
||||
link: String
|
||||
type: NotificationType!
|
||||
|
||||
"""ISO Timestamp for when the notification occurred"""
|
||||
timestamp: String
|
||||
formattedTimestamp: String
|
||||
}
|
||||
|
||||
enum NotificationImportance {
|
||||
ALERT
|
||||
INFO
|
||||
WARNING
|
||||
}
|
||||
|
||||
enum NotificationType {
|
||||
UNREAD
|
||||
ARCHIVE
|
||||
}
|
||||
|
||||
type Notifications implements Node {
|
||||
id: PrefixedID!
|
||||
|
||||
"""A cached overview of the notifications in the system & their severity."""
|
||||
overview: NotificationOverview!
|
||||
list(filter: NotificationFilter!): [Notification!]!
|
||||
|
||||
"""
|
||||
Deduplicated list of unread warning and alert notifications, sorted latest first.
|
||||
"""
|
||||
warningsAndAlerts: [Notification!]!
|
||||
settings: NotificationSettings!
|
||||
}
|
||||
|
||||
input NotificationFilter {
|
||||
importance: NotificationImportance
|
||||
type: NotificationType!
|
||||
offset: Int!
|
||||
limit: Int!
|
||||
}
|
||||
|
||||
type FlashBackupStatus {
|
||||
"""Status message indicating the outcome of the backup initiation."""
|
||||
status: String!
|
||||
@@ -1391,6 +1674,19 @@ type CpuLoad {
|
||||
percentSteal: Float!
|
||||
}
|
||||
|
||||
type CpuPackages implements Node {
|
||||
id: PrefixedID!
|
||||
|
||||
"""Total CPU package power draw (W)"""
|
||||
totalPower: Float!
|
||||
|
||||
"""Power draw per package (W)"""
|
||||
power: [Float!]!
|
||||
|
||||
"""Temperature per package (°C)"""
|
||||
temp: [Float!]!
|
||||
}
|
||||
|
||||
type CpuUtilization implements Node {
|
||||
id: PrefixedID!
|
||||
|
||||
@@ -1454,6 +1750,12 @@ type InfoCpu implements Node {
|
||||
|
||||
"""CPU feature flags"""
|
||||
flags: [String!]
|
||||
|
||||
"""
|
||||
Per-package array of core/thread pairs, e.g. [[[0,1],[2,3]], [[4,5],[6,7]]]
|
||||
"""
|
||||
topology: [[[Int!]!]!]!
|
||||
packages: CpuPackages!
|
||||
}
|
||||
|
||||
type MemoryLayout implements Node {
|
||||
@@ -1753,60 +2055,6 @@ type Metrics implements Node {
|
||||
memory: MemoryUtilization
|
||||
}
|
||||
|
||||
type NotificationCounts {
|
||||
info: Int!
|
||||
warning: Int!
|
||||
alert: Int!
|
||||
total: Int!
|
||||
}
|
||||
|
||||
type NotificationOverview {
|
||||
unread: NotificationCounts!
|
||||
archive: NotificationCounts!
|
||||
}
|
||||
|
||||
type Notification implements Node {
|
||||
id: PrefixedID!
|
||||
|
||||
"""Also known as 'event'"""
|
||||
title: String!
|
||||
subject: String!
|
||||
description: String!
|
||||
importance: NotificationImportance!
|
||||
link: String
|
||||
type: NotificationType!
|
||||
|
||||
"""ISO Timestamp for when the notification occurred"""
|
||||
timestamp: String
|
||||
formattedTimestamp: String
|
||||
}
|
||||
|
||||
enum NotificationImportance {
|
||||
ALERT
|
||||
INFO
|
||||
WARNING
|
||||
}
|
||||
|
||||
enum NotificationType {
|
||||
UNREAD
|
||||
ARCHIVE
|
||||
}
|
||||
|
||||
type Notifications implements Node {
|
||||
id: PrefixedID!
|
||||
|
||||
"""A cached overview of the notifications in the system & their severity."""
|
||||
overview: NotificationOverview!
|
||||
list(filter: NotificationFilter!): [Notification!]!
|
||||
}
|
||||
|
||||
input NotificationFilter {
|
||||
importance: NotificationImportance
|
||||
type: NotificationType!
|
||||
offset: Int!
|
||||
limit: Int!
|
||||
}
|
||||
|
||||
type Owner {
|
||||
username: String!
|
||||
url: String!
|
||||
@@ -2416,6 +2664,11 @@ type Mutation {
|
||||
"""Marks a notification as archived."""
|
||||
archiveNotification(id: PrefixedID!): Notification!
|
||||
archiveNotifications(ids: [PrefixedID!]!): NotificationOverview!
|
||||
|
||||
"""
|
||||
Creates a notification if an equivalent unread notification does not already exist.
|
||||
"""
|
||||
notifyIfUnique(input: NotificationData!): Notification
|
||||
archiveAll(importance: NotificationImportance): NotificationOverview!
|
||||
|
||||
"""Marks a notification as unread."""
|
||||
@@ -2430,11 +2683,22 @@ type Mutation {
|
||||
vm: VmMutations!
|
||||
parityCheck: ParityCheckMutations!
|
||||
apiKey: ApiKeyMutations!
|
||||
customization: CustomizationMutations!
|
||||
rclone: RCloneMutations!
|
||||
createDockerFolder(name: String!, parentId: String, childrenIds: [String!]): ResolvedOrganizerV1!
|
||||
setDockerFolderChildren(folderId: String, childrenIds: [String!]!): ResolvedOrganizerV1!
|
||||
deleteDockerEntries(entryIds: [String!]!): ResolvedOrganizerV1!
|
||||
moveDockerEntriesToFolder(sourceEntryIds: [String!]!, destinationFolderId: String!): ResolvedOrganizerV1!
|
||||
moveDockerItemsToPosition(sourceEntryIds: [String!]!, destinationFolderId: String!, position: Float!): ResolvedOrganizerV1!
|
||||
renameDockerFolder(folderId: String!, newName: String!): ResolvedOrganizerV1!
|
||||
createDockerFolderWithItems(name: String!, parentId: String, sourceEntryIds: [String!], position: Float): ResolvedOrganizerV1!
|
||||
updateDockerViewPreferences(viewId: String = "default", prefs: JSON!): ResolvedOrganizerV1!
|
||||
syncDockerTemplatePaths: DockerTemplateSyncResult!
|
||||
|
||||
"""
|
||||
Reset Docker template mappings to defaults. Use this to recover from corrupted state.
|
||||
"""
|
||||
resetDockerTemplateMappings: Boolean!
|
||||
refreshDockerDigests: Boolean!
|
||||
|
||||
"""Initiates a flash drive backup using a configured remote."""
|
||||
@@ -2636,12 +2900,15 @@ input AccessUrlInput {
|
||||
type Subscription {
|
||||
notificationAdded: Notification!
|
||||
notificationsOverview: NotificationOverview!
|
||||
notificationsWarningsAndAlerts: [Notification!]!
|
||||
ownerSubscription: Owner!
|
||||
serversSubscription: Server!
|
||||
parityHistorySubscription: ParityCheck!
|
||||
arraySubscription: UnraidArray!
|
||||
dockerContainerStats: DockerContainerStats!
|
||||
logFile(path: String!): LogFileContent!
|
||||
systemMetricsCpu: CpuUtilization!
|
||||
systemMetricsCpuTelemetry: CpuPackages!
|
||||
systemMetricsMemory: MemoryUtilization!
|
||||
upsUpdates: UPSDevice!
|
||||
}
|
||||
@@ -12,8 +12,13 @@ default:
|
||||
@deploy remote:
|
||||
./scripts/deploy-dev.sh {{remote}}
|
||||
|
||||
# watches typescript files and restarts dev server on changes
|
||||
@watch:
|
||||
watchexec -e ts -r -- pnpm dev
|
||||
|
||||
alias b := build
|
||||
alias d := deploy
|
||||
alias w := watch
|
||||
|
||||
sync-env server:
|
||||
rsync -avz --progress --stats -e ssh .env* root@{{server}}:/usr/local/unraid-api
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@unraid/api",
|
||||
"version": "4.23.0",
|
||||
"version": "4.29.2",
|
||||
"main": "src/cli/index.ts",
|
||||
"type": "module",
|
||||
"corepack": {
|
||||
@@ -30,6 +30,8 @@
|
||||
"// GraphQL Codegen": "",
|
||||
"codegen": "graphql-codegen --config codegen.ts",
|
||||
"codegen:watch": "graphql-codegen --config codegen.ts --watch",
|
||||
"// Internationalization": "",
|
||||
"i18n:extract": "node ./scripts/extract-translations.mjs",
|
||||
"// Code Quality": "",
|
||||
"lint": "eslint --config .eslintrc.ts src/",
|
||||
"lint:fix": "eslint --fix --config .eslintrc.ts src/",
|
||||
@@ -102,6 +104,7 @@
|
||||
"escape-html": "1.0.3",
|
||||
"execa": "9.6.0",
|
||||
"exit-hook": "4.0.0",
|
||||
"fast-xml-parser": "^5.3.0",
|
||||
"fastify": "5.5.0",
|
||||
"filenamify": "7.0.0",
|
||||
"fs-extra": "11.3.1",
|
||||
@@ -114,6 +117,7 @@
|
||||
"graphql-subscriptions": "3.0.0",
|
||||
"graphql-tag": "2.12.6",
|
||||
"graphql-ws": "6.0.6",
|
||||
"html-entities": "^2.6.0",
|
||||
"ini": "5.0.0",
|
||||
"ip": "2.0.1",
|
||||
"jose": "6.0.13",
|
||||
@@ -190,7 +194,7 @@
|
||||
"@types/stoppable": "1.1.3",
|
||||
"@types/strftime": "0.9.8",
|
||||
"@types/supertest": "6.0.3",
|
||||
"@types/uuid": "10.0.0",
|
||||
"@types/uuid": "11.0.0",
|
||||
"@types/ws": "8.18.1",
|
||||
"@types/wtfnode": "0.10.0",
|
||||
"@vitest/coverage-v8": "3.2.4",
|
||||
|
||||
@@ -7,7 +7,7 @@ import { exit } from 'process';
|
||||
import type { PackageJson } from 'type-fest';
|
||||
import { $, cd } from 'zx';
|
||||
|
||||
import { getDeploymentVersion } from './get-deployment-version.js';
|
||||
import { getDeploymentVersion } from '@app/../scripts/get-deployment-version.js';
|
||||
|
||||
type ApiPackageJson = PackageJson & {
|
||||
version: string;
|
||||
@@ -83,6 +83,10 @@ try {
|
||||
if (parsedPackageJson.dependencies?.[dep]) {
|
||||
delete parsedPackageJson.dependencies[dep];
|
||||
}
|
||||
// Also strip from peerDependencies (npm doesn't understand workspace: protocol)
|
||||
if (parsedPackageJson.peerDependencies?.[dep]) {
|
||||
delete parsedPackageJson.peerDependencies[dep];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
162
api/scripts/extract-translations.mjs
Normal file
162
api/scripts/extract-translations.mjs
Normal file
@@ -0,0 +1,162 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
import { readFile, writeFile } from 'node:fs/promises';
|
||||
import path from 'node:path';
|
||||
import { glob } from 'glob';
|
||||
import ts from 'typescript';
|
||||
|
||||
const projectRoot = process.cwd();
|
||||
const sourcePatterns = 'src/**/*.{ts,js}';
|
||||
const ignorePatterns = [
|
||||
'**/__tests__/**',
|
||||
'**/__test__/**',
|
||||
'**/*.spec.ts',
|
||||
'**/*.spec.js',
|
||||
'**/*.test.ts',
|
||||
'**/*.test.js',
|
||||
];
|
||||
|
||||
const englishLocaleFile = path.resolve(projectRoot, 'src/i18n/en.json');
|
||||
|
||||
const identifierTargets = new Set(['t', 'translate']);
|
||||
const propertyTargets = new Set([
|
||||
'i18n.t',
|
||||
'i18n.translate',
|
||||
'ctx.t',
|
||||
'this.translate',
|
||||
'this.i18n.translate',
|
||||
'this.i18n.t',
|
||||
]);
|
||||
|
||||
function getPropertyChain(node) {
|
||||
if (ts.isIdentifier(node)) {
|
||||
return node.text;
|
||||
}
|
||||
if (ts.isPropertyAccessExpression(node)) {
|
||||
const left = getPropertyChain(node.expression);
|
||||
if (!left) return undefined;
|
||||
return `${left}.${node.name.text}`;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function extractLiteral(node) {
|
||||
if (ts.isStringLiteralLike(node)) {
|
||||
return node.text;
|
||||
}
|
||||
if (ts.isNoSubstitutionTemplateLiteral(node)) {
|
||||
return node.text;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function collectKeysFromSource(sourceFile) {
|
||||
const keys = new Set();
|
||||
|
||||
function visit(node) {
|
||||
if (ts.isCallExpression(node)) {
|
||||
const expr = node.expression;
|
||||
let matches = false;
|
||||
|
||||
if (ts.isIdentifier(expr) && identifierTargets.has(expr.text)) {
|
||||
matches = true;
|
||||
} else if (ts.isPropertyAccessExpression(expr)) {
|
||||
const chain = getPropertyChain(expr);
|
||||
if (chain && propertyTargets.has(chain)) {
|
||||
matches = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (matches) {
|
||||
const [firstArg] = node.arguments;
|
||||
if (firstArg) {
|
||||
const literal = extractLiteral(firstArg);
|
||||
if (literal) {
|
||||
keys.add(literal);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ts.forEachChild(node, visit);
|
||||
}
|
||||
|
||||
visit(sourceFile);
|
||||
return keys;
|
||||
}
|
||||
|
||||
async function loadEnglishCatalog() {
|
||||
try {
|
||||
const raw = await readFile(englishLocaleFile, 'utf8');
|
||||
const parsed = raw.trim() ? JSON.parse(raw) : {};
|
||||
if (typeof parsed !== 'object' || Array.isArray(parsed)) {
|
||||
throw new Error('English locale file must contain a JSON object.');
|
||||
}
|
||||
return parsed;
|
||||
} catch (error) {
|
||||
if (error && error.code === 'ENOENT') {
|
||||
return {};
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async function ensureEnglishCatalog(keys) {
|
||||
const existingCatalog = await loadEnglishCatalog();
|
||||
const existingKeys = new Set(Object.keys(existingCatalog));
|
||||
|
||||
let added = 0;
|
||||
const combinedKeys = new Set([...existingKeys, ...keys]);
|
||||
const sortedKeys = Array.from(combinedKeys).sort((a, b) => a.localeCompare(b));
|
||||
const nextCatalog = {};
|
||||
|
||||
for (const key of sortedKeys) {
|
||||
if (Object.prototype.hasOwnProperty.call(existingCatalog, key)) {
|
||||
nextCatalog[key] = existingCatalog[key];
|
||||
} else {
|
||||
nextCatalog[key] = key;
|
||||
added += 1;
|
||||
}
|
||||
}
|
||||
|
||||
const nextJson = `${JSON.stringify(nextCatalog, null, 2)}\n`;
|
||||
const existingJson = JSON.stringify(existingCatalog, null, 2) + '\n';
|
||||
|
||||
if (nextJson !== existingJson) {
|
||||
await writeFile(englishLocaleFile, nextJson, 'utf8');
|
||||
}
|
||||
|
||||
return added;
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const files = await glob(sourcePatterns, {
|
||||
cwd: projectRoot,
|
||||
ignore: ignorePatterns,
|
||||
absolute: true,
|
||||
});
|
||||
|
||||
const collectedKeys = new Set();
|
||||
|
||||
await Promise.all(
|
||||
files.map(async (file) => {
|
||||
const content = await readFile(file, 'utf8');
|
||||
const sourceFile = ts.createSourceFile(file, content, ts.ScriptTarget.Latest, true);
|
||||
const keys = collectKeysFromSource(sourceFile);
|
||||
keys.forEach((key) => collectedKeys.add(key));
|
||||
}),
|
||||
);
|
||||
|
||||
const added = await ensureEnglishCatalog(collectedKeys);
|
||||
|
||||
if (added === 0) {
|
||||
console.log('[i18n] No new backend translation keys detected.');
|
||||
} else {
|
||||
console.log(`[i18n] Added ${added} key(s) to src/i18n/en.json.`);
|
||||
}
|
||||
}
|
||||
|
||||
main().catch((error) => {
|
||||
console.error('[i18n] Failed to extract backend translations.', error);
|
||||
process.exitCode = 1;
|
||||
});
|
||||
@@ -4,23 +4,18 @@ import {
|
||||
getBannerPathIfPresent,
|
||||
getCasePathIfPresent,
|
||||
} from '@app/core/utils/images/image-file-helpers.js';
|
||||
import { loadDynamixConfigFile } from '@app/store/actions/load-dynamix-config-file.js';
|
||||
import { store } from '@app/store/index.js';
|
||||
import { loadDynamixConfig } from '@app/store/index.js';
|
||||
|
||||
test('get case path returns expected result', async () => {
|
||||
await expect(getCasePathIfPresent()).resolves.toContain('/dev/dynamix/case-model.png');
|
||||
});
|
||||
|
||||
test('get banner path returns null (state unloaded)', async () => {
|
||||
await expect(getBannerPathIfPresent()).resolves.toMatchInlineSnapshot('null');
|
||||
});
|
||||
|
||||
test('get banner path returns the banner (state loaded)', async () => {
|
||||
await store.dispatch(loadDynamixConfigFile()).unwrap();
|
||||
loadDynamixConfig();
|
||||
await expect(getBannerPathIfPresent()).resolves.toContain('/dev/dynamix/banner.png');
|
||||
});
|
||||
|
||||
test('get banner path returns null when no banner (state loaded)', async () => {
|
||||
await store.dispatch(loadDynamixConfigFile()).unwrap();
|
||||
loadDynamixConfig();
|
||||
await expect(getBannerPathIfPresent('notabanner.png')).resolves.toMatchInlineSnapshot('null');
|
||||
});
|
||||
|
||||
178
api/src/__test__/core/utils/parsers/ini-boolean-parser.test.ts
Normal file
178
api/src/__test__/core/utils/parsers/ini-boolean-parser.test.ts
Normal file
@@ -0,0 +1,178 @@
|
||||
import { describe, expect, test } from 'vitest';
|
||||
|
||||
import {
|
||||
iniBooleanOrAutoToJsBoolean,
|
||||
iniBooleanToJsBoolean,
|
||||
} from '@app/core/utils/parsers/ini-boolean-parser.js';
|
||||
|
||||
describe('iniBooleanToJsBoolean', () => {
|
||||
describe('valid boolean values', () => {
|
||||
test('returns false for "no"', () => {
|
||||
expect(iniBooleanToJsBoolean('no')).toBe(false);
|
||||
});
|
||||
|
||||
test('returns false for "false"', () => {
|
||||
expect(iniBooleanToJsBoolean('false')).toBe(false);
|
||||
});
|
||||
|
||||
test('returns true for "yes"', () => {
|
||||
expect(iniBooleanToJsBoolean('yes')).toBe(true);
|
||||
});
|
||||
|
||||
test('returns true for "true"', () => {
|
||||
expect(iniBooleanToJsBoolean('true')).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('malformed values', () => {
|
||||
test('handles "no*" as false', () => {
|
||||
expect(iniBooleanToJsBoolean('no*')).toBe(false);
|
||||
});
|
||||
|
||||
test('handles "yes*" as true', () => {
|
||||
expect(iniBooleanToJsBoolean('yes*')).toBe(true);
|
||||
});
|
||||
|
||||
test('handles "true*" as true', () => {
|
||||
expect(iniBooleanToJsBoolean('true*')).toBe(true);
|
||||
});
|
||||
|
||||
test('handles "false*" as false', () => {
|
||||
expect(iniBooleanToJsBoolean('false*')).toBe(false);
|
||||
});
|
||||
|
||||
test('returns undefined for "n0!" (cleans to "n" which is invalid)', () => {
|
||||
expect(iniBooleanToJsBoolean('n0!')).toBe(undefined);
|
||||
});
|
||||
|
||||
test('returns undefined for "y3s!" (cleans to "ys" which is invalid)', () => {
|
||||
expect(iniBooleanToJsBoolean('y3s!')).toBe(undefined);
|
||||
});
|
||||
|
||||
test('handles mixed case with extra chars "YES*" as true', () => {
|
||||
expect(iniBooleanToJsBoolean('YES*')).toBe(true);
|
||||
});
|
||||
|
||||
test('handles mixed case with extra chars "NO*" as false', () => {
|
||||
expect(iniBooleanToJsBoolean('NO*')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('default values', () => {
|
||||
test('returns default value for invalid input when provided', () => {
|
||||
expect(iniBooleanToJsBoolean('invalid', true)).toBe(true);
|
||||
expect(iniBooleanToJsBoolean('invalid', false)).toBe(false);
|
||||
});
|
||||
|
||||
test('returns default value for empty string when provided', () => {
|
||||
expect(iniBooleanToJsBoolean('', true)).toBe(true);
|
||||
expect(iniBooleanToJsBoolean('', false)).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('undefined fallback cases', () => {
|
||||
test('returns undefined for invalid input without default', () => {
|
||||
expect(iniBooleanToJsBoolean('invalid')).toBe(undefined);
|
||||
});
|
||||
|
||||
test('returns undefined for empty string without default', () => {
|
||||
expect(iniBooleanToJsBoolean('')).toBe(undefined);
|
||||
});
|
||||
|
||||
test('returns undefined for numeric string without default', () => {
|
||||
expect(iniBooleanToJsBoolean('123')).toBe(undefined);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('iniBooleanOrAutoToJsBoolean', () => {
|
||||
describe('valid boolean values', () => {
|
||||
test('returns false for "no"', () => {
|
||||
expect(iniBooleanOrAutoToJsBoolean('no')).toBe(false);
|
||||
});
|
||||
|
||||
test('returns false for "false"', () => {
|
||||
expect(iniBooleanOrAutoToJsBoolean('false')).toBe(false);
|
||||
});
|
||||
|
||||
test('returns true for "yes"', () => {
|
||||
expect(iniBooleanOrAutoToJsBoolean('yes')).toBe(true);
|
||||
});
|
||||
|
||||
test('returns true for "true"', () => {
|
||||
expect(iniBooleanOrAutoToJsBoolean('true')).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('auto value', () => {
|
||||
test('returns null for "auto"', () => {
|
||||
expect(iniBooleanOrAutoToJsBoolean('auto')).toBe(null);
|
||||
});
|
||||
});
|
||||
|
||||
describe('malformed values', () => {
|
||||
test('handles "no*" as false', () => {
|
||||
expect(iniBooleanOrAutoToJsBoolean('no*')).toBe(false);
|
||||
});
|
||||
|
||||
test('handles "yes*" as true', () => {
|
||||
expect(iniBooleanOrAutoToJsBoolean('yes*')).toBe(true);
|
||||
});
|
||||
|
||||
test('handles "auto*" as null', () => {
|
||||
expect(iniBooleanOrAutoToJsBoolean('auto*')).toBe(null);
|
||||
});
|
||||
|
||||
test('handles "true*" as true', () => {
|
||||
expect(iniBooleanOrAutoToJsBoolean('true*')).toBe(true);
|
||||
});
|
||||
|
||||
test('handles "false*" as false', () => {
|
||||
expect(iniBooleanOrAutoToJsBoolean('false*')).toBe(false);
|
||||
});
|
||||
|
||||
test('handles "n0!" as undefined fallback (cleans to "n" which is invalid)', () => {
|
||||
expect(iniBooleanOrAutoToJsBoolean('n0!')).toBe(undefined);
|
||||
});
|
||||
|
||||
test('handles "a1ut2o!" as null (removes non-alphabetic chars)', () => {
|
||||
expect(iniBooleanOrAutoToJsBoolean('a1ut2o!')).toBe(null);
|
||||
});
|
||||
|
||||
test('handles mixed case "AUTO*" as null', () => {
|
||||
expect(iniBooleanOrAutoToJsBoolean('AUTO*')).toBe(null);
|
||||
});
|
||||
});
|
||||
|
||||
describe('fallback behavior', () => {
|
||||
test('returns undefined for completely invalid input', () => {
|
||||
expect(iniBooleanOrAutoToJsBoolean('invalid123')).toBe(undefined);
|
||||
});
|
||||
|
||||
test('returns undefined for empty string', () => {
|
||||
expect(iniBooleanOrAutoToJsBoolean('')).toBe(undefined);
|
||||
});
|
||||
|
||||
test('returns undefined for numeric string', () => {
|
||||
expect(iniBooleanOrAutoToJsBoolean('123')).toBe(undefined);
|
||||
});
|
||||
|
||||
test('returns undefined for special characters only', () => {
|
||||
expect(iniBooleanOrAutoToJsBoolean('!@#$')).toBe(undefined);
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases', () => {
|
||||
test('handles undefined gracefully', () => {
|
||||
expect(iniBooleanOrAutoToJsBoolean(undefined as any)).toBe(undefined);
|
||||
});
|
||||
|
||||
test('handles null gracefully', () => {
|
||||
expect(iniBooleanOrAutoToJsBoolean(null as any)).toBe(undefined);
|
||||
});
|
||||
|
||||
test('handles non-string input gracefully', () => {
|
||||
expect(iniBooleanOrAutoToJsBoolean(123 as any)).toBe(undefined);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -6,6 +6,7 @@ exports[`Returns paths 1`] = `
|
||||
"unraid-api-base",
|
||||
"unraid-data",
|
||||
"docker-autostart",
|
||||
"docker-userprefs",
|
||||
"docker-socket",
|
||||
"rclone-socket",
|
||||
"parity-checks",
|
||||
|
||||
@@ -11,6 +11,7 @@ test('Returns paths', async () => {
|
||||
'unraid-api-base': '/usr/local/unraid-api/',
|
||||
'unraid-data': expect.stringContaining('api/dev/data'),
|
||||
'docker-autostart': '/var/lib/docker/unraid-autostart',
|
||||
'docker-userprefs': '/boot/config/plugins/dockerMan/userprefs.cfg',
|
||||
'docker-socket': '/var/run/docker.sock',
|
||||
'parity-checks': expect.stringContaining('api/dev/states/parity-checks.log'),
|
||||
htpasswd: '/etc/nginx/htpasswd',
|
||||
|
||||
151
api/src/__test__/store/watch/registration-watch.test.ts
Normal file
151
api/src/__test__/store/watch/registration-watch.test.ts
Normal file
@@ -0,0 +1,151 @@
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { StateFileKey } from '@app/store/types.js';
|
||||
import { RegistrationType } from '@app/unraid-api/graph/resolvers/registration/registration.model.js';
|
||||
|
||||
// Mock the store module
|
||||
vi.mock('@app/store/index.js', () => ({
|
||||
store: {
|
||||
dispatch: vi.fn(),
|
||||
},
|
||||
getters: {
|
||||
emhttp: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
// Mock the emhttp module
|
||||
vi.mock('@app/store/modules/emhttp.js', () => ({
|
||||
loadSingleStateFile: vi.fn((key) => ({ type: 'emhttp/load-single-state-file', payload: key })),
|
||||
}));
|
||||
|
||||
// Mock the registration module
|
||||
vi.mock('@app/store/modules/registration.js', () => ({
|
||||
loadRegistrationKey: vi.fn(() => ({ type: 'registration/load-registration-key' })),
|
||||
}));
|
||||
|
||||
// Mock the logger
|
||||
vi.mock('@app/core/log.js', () => ({
|
||||
keyServerLogger: {
|
||||
info: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
describe('reloadVarIniWithRetry', () => {
|
||||
let store: { dispatch: ReturnType<typeof vi.fn> };
|
||||
let getters: { emhttp: ReturnType<typeof vi.fn> };
|
||||
let loadSingleStateFile: ReturnType<typeof vi.fn>;
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.useFakeTimers();
|
||||
|
||||
const storeModule = await import('@app/store/index.js');
|
||||
const emhttpModule = await import('@app/store/modules/emhttp.js');
|
||||
|
||||
store = storeModule.store as unknown as typeof store;
|
||||
getters = storeModule.getters as unknown as typeof getters;
|
||||
loadSingleStateFile = emhttpModule.loadSingleStateFile as unknown as typeof loadSingleStateFile;
|
||||
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.useRealTimers();
|
||||
});
|
||||
|
||||
it('returns early when registration state changes on first retry', async () => {
|
||||
// Initial state is TRIAL
|
||||
getters.emhttp
|
||||
.mockReturnValueOnce({ var: { regTy: RegistrationType.TRIAL } }) // First call (beforeState)
|
||||
.mockReturnValueOnce({ var: { regTy: RegistrationType.UNLEASHED } }); // After first reload
|
||||
|
||||
const { reloadVarIniWithRetry } = await import('@app/store/watch/registration-watch.js');
|
||||
|
||||
const promise = reloadVarIniWithRetry();
|
||||
|
||||
// Advance past the first delay (500ms)
|
||||
await vi.advanceTimersByTimeAsync(500);
|
||||
await promise;
|
||||
|
||||
// Should only dispatch once since state changed
|
||||
expect(store.dispatch).toHaveBeenCalledTimes(1);
|
||||
expect(loadSingleStateFile).toHaveBeenCalledWith(StateFileKey.var);
|
||||
});
|
||||
|
||||
it('retries up to maxRetries when state does not change', async () => {
|
||||
// State never changes
|
||||
getters.emhttp.mockReturnValue({ var: { regTy: RegistrationType.TRIAL } });
|
||||
|
||||
const { reloadVarIniWithRetry } = await import('@app/store/watch/registration-watch.js');
|
||||
|
||||
const promise = reloadVarIniWithRetry(3);
|
||||
|
||||
// Advance through all retries: 500ms, 1000ms, 2000ms
|
||||
await vi.advanceTimersByTimeAsync(500);
|
||||
await vi.advanceTimersByTimeAsync(1000);
|
||||
await vi.advanceTimersByTimeAsync(2000);
|
||||
await promise;
|
||||
|
||||
// Should dispatch 3 times (maxRetries)
|
||||
expect(store.dispatch).toHaveBeenCalledTimes(3);
|
||||
});
|
||||
|
||||
it('stops retrying when state changes on second attempt', async () => {
|
||||
getters.emhttp
|
||||
.mockReturnValueOnce({ var: { regTy: RegistrationType.TRIAL } }) // beforeState
|
||||
.mockReturnValueOnce({ var: { regTy: RegistrationType.TRIAL } }) // After first reload (no change)
|
||||
.mockReturnValueOnce({ var: { regTy: RegistrationType.UNLEASHED } }); // After second reload (changed!)
|
||||
|
||||
const { reloadVarIniWithRetry } = await import('@app/store/watch/registration-watch.js');
|
||||
|
||||
const promise = reloadVarIniWithRetry(3);
|
||||
|
||||
// First retry
|
||||
await vi.advanceTimersByTimeAsync(500);
|
||||
// Second retry
|
||||
await vi.advanceTimersByTimeAsync(1000);
|
||||
await promise;
|
||||
|
||||
// Should dispatch twice - stopped after state changed
|
||||
expect(store.dispatch).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
it('handles undefined regTy gracefully', async () => {
|
||||
getters.emhttp.mockReturnValue({ var: {} });
|
||||
|
||||
const { reloadVarIniWithRetry } = await import('@app/store/watch/registration-watch.js');
|
||||
|
||||
const promise = reloadVarIniWithRetry(1);
|
||||
|
||||
await vi.advanceTimersByTimeAsync(500);
|
||||
await promise;
|
||||
|
||||
// Should still dispatch even with undefined regTy
|
||||
expect(store.dispatch).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('uses exponential backoff delays', async () => {
|
||||
getters.emhttp.mockReturnValue({ var: { regTy: RegistrationType.TRIAL } });
|
||||
|
||||
const { reloadVarIniWithRetry } = await import('@app/store/watch/registration-watch.js');
|
||||
|
||||
const promise = reloadVarIniWithRetry(3);
|
||||
|
||||
// At 0ms, no dispatch yet
|
||||
expect(store.dispatch).toHaveBeenCalledTimes(0);
|
||||
|
||||
// At 500ms, first dispatch
|
||||
await vi.advanceTimersByTimeAsync(500);
|
||||
expect(store.dispatch).toHaveBeenCalledTimes(1);
|
||||
|
||||
// At 1500ms (500 + 1000), second dispatch
|
||||
await vi.advanceTimersByTimeAsync(1000);
|
||||
expect(store.dispatch).toHaveBeenCalledTimes(2);
|
||||
|
||||
// At 3500ms (500 + 1000 + 2000), third dispatch
|
||||
await vi.advanceTimersByTimeAsync(2000);
|
||||
expect(store.dispatch).toHaveBeenCalledTimes(3);
|
||||
|
||||
await promise;
|
||||
});
|
||||
});
|
||||
234
api/src/common/compare-semver-version.spec.ts
Normal file
234
api/src/common/compare-semver-version.spec.ts
Normal file
@@ -0,0 +1,234 @@
|
||||
import { eq, gt, gte, lt, lte, parse } from 'semver';
|
||||
import { describe, expect, it } from 'vitest';
|
||||
|
||||
import { compareVersions } from '@app/common/compare-semver-version.js';
|
||||
|
||||
describe('compareVersions', () => {
|
||||
describe('basic comparisons', () => {
|
||||
it('should return true when current version is greater than compared (gte)', () => {
|
||||
const current = parse('7.3.0')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, gte)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return true when current version equals compared (gte)', () => {
|
||||
const current = parse('7.2.0')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, gte)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false when current version is less than compared (gte)', () => {
|
||||
const current = parse('7.1.0')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, gte)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return true when current version is less than compared (lte)', () => {
|
||||
const current = parse('7.1.0')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, lte)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return true when current version equals compared (lte)', () => {
|
||||
const current = parse('7.2.0')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, lte)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false when current version is greater than compared (lte)', () => {
|
||||
const current = parse('7.3.0')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, lte)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return true when current version is greater than compared (gt)', () => {
|
||||
const current = parse('7.3.0')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, gt)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false when current version equals compared (gt)', () => {
|
||||
const current = parse('7.2.0')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, gt)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return true when current version is less than compared (lt)', () => {
|
||||
const current = parse('7.1.0')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, lt)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false when current version equals compared (lt)', () => {
|
||||
const current = parse('7.2.0')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, lt)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return true when versions are equal (eq)', () => {
|
||||
const current = parse('7.2.0')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, eq)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false when versions are not equal (eq)', () => {
|
||||
const current = parse('7.3.0')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, eq)).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('prerelease handling - current has prerelease, compared is stable', () => {
|
||||
it('should return true for gte when current prerelease > stable (same base)', () => {
|
||||
const current = parse('7.2.0-beta.1')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, gte)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return true for gt when current prerelease > stable (same base)', () => {
|
||||
const current = parse('7.2.0-beta.1')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, gt)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false for lte when current prerelease < stable (same base)', () => {
|
||||
const current = parse('7.2.0-beta.1')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, lte)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for lt when current prerelease < stable (same base)', () => {
|
||||
const current = parse('7.2.0-beta.1')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, lt)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for eq when current prerelease != stable (same base)', () => {
|
||||
const current = parse('7.2.0-beta.1')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, eq)).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('prerelease handling - current is stable, compared has prerelease', () => {
|
||||
it('should use normal comparison when current is stable and compared has prerelease', () => {
|
||||
const current = parse('7.2.0')!;
|
||||
const compared = parse('7.2.0-beta.1')!;
|
||||
expect(compareVersions(current, compared, gte)).toBe(true);
|
||||
});
|
||||
|
||||
it('should use normal comparison for lte when current is stable and compared has prerelease', () => {
|
||||
const current = parse('7.2.0')!;
|
||||
const compared = parse('7.2.0-beta.1')!;
|
||||
expect(compareVersions(current, compared, lte)).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('prerelease handling - both have prerelease', () => {
|
||||
it('should use normal comparison when both versions have prerelease', () => {
|
||||
const current = parse('7.2.0-beta.2')!;
|
||||
const compared = parse('7.2.0-beta.1')!;
|
||||
expect(compareVersions(current, compared, gte)).toBe(true);
|
||||
});
|
||||
|
||||
it('should use normal comparison for lte when both have prerelease', () => {
|
||||
const current = parse('7.2.0-beta.1')!;
|
||||
const compared = parse('7.2.0-beta.2')!;
|
||||
expect(compareVersions(current, compared, lte)).toBe(true);
|
||||
});
|
||||
|
||||
it('should use normal comparison when prerelease versions are equal', () => {
|
||||
const current = parse('7.2.0-beta.1')!;
|
||||
const compared = parse('7.2.0-beta.1')!;
|
||||
expect(compareVersions(current, compared, eq)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('prerelease handling - different base versions', () => {
|
||||
it('should use normal comparison when base versions differ (current prerelease)', () => {
|
||||
const current = parse('7.3.0-beta.1')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, gte)).toBe(true);
|
||||
});
|
||||
|
||||
it('should use normal comparison when base versions differ (current prerelease, less)', () => {
|
||||
const current = parse('7.1.0-beta.1')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, gte)).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('includePrerelease flag', () => {
|
||||
it('should apply special prerelease handling when includePrerelease is true', () => {
|
||||
const current = parse('7.2.0-beta.1')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, gte, { includePrerelease: true })).toBe(true);
|
||||
});
|
||||
|
||||
it('should skip special prerelease handling when includePrerelease is false', () => {
|
||||
const current = parse('7.2.0-beta.1')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, gte, { includePrerelease: false })).toBe(false);
|
||||
});
|
||||
|
||||
it('should default to includePrerelease true', () => {
|
||||
const current = parse('7.2.0-beta.1')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, gte)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases', () => {
|
||||
it('should handle patch version differences', () => {
|
||||
const current = parse('7.2.1')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, gte)).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle minor version differences', () => {
|
||||
const current = parse('7.3.0')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, gte)).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle major version differences', () => {
|
||||
const current = parse('8.0.0')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, gte)).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle complex prerelease tags', () => {
|
||||
const current = parse('7.2.0-beta.2.4')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, gte)).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle alpha prerelease tags', () => {
|
||||
const current = parse('7.2.0-alpha.1')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, gte)).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle rc prerelease tags', () => {
|
||||
const current = parse('7.2.0-rc.1')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, gte)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('comparison function edge cases', () => {
|
||||
it('should handle custom comparison functions that are not gte/lte/gt/lt', () => {
|
||||
const current = parse('7.2.0-beta.1')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
const customCompare = (a: typeof current, b: typeof compared) => a.compare(b) === 1;
|
||||
expect(compareVersions(current, compared, customCompare)).toBe(false);
|
||||
});
|
||||
|
||||
it('should fall through to normal comparison for unknown functions with prerelease', () => {
|
||||
const current = parse('7.2.0-beta.1')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
const customCompare = () => false;
|
||||
expect(compareVersions(current, compared, customCompare)).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
44
api/src/common/compare-semver-version.ts
Normal file
44
api/src/common/compare-semver-version.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
import type { SemVer } from 'semver';
|
||||
import { gt, gte, lt, lte } from 'semver';
|
||||
|
||||
/**
|
||||
* Shared version comparison logic with special handling for prerelease versions.
|
||||
*
|
||||
* When base versions are equal and current version has a prerelease tag while compared doesn't:
|
||||
* - For gte/gt: prerelease is considered greater than stable (returns true)
|
||||
* - For lte/lt: prerelease is considered less than stable (returns false)
|
||||
* - For eq: prerelease is not equal to stable (returns false)
|
||||
*
|
||||
* @param currentVersion - The current Unraid version (SemVer object)
|
||||
* @param comparedVersion - The version to compare against (SemVer object)
|
||||
* @param compareFn - The comparison function (e.g., gte, lte, lt, gt, eq)
|
||||
* @param includePrerelease - Whether to include special prerelease handling
|
||||
* @returns The result of the comparison
|
||||
*/
|
||||
export const compareVersions = (
|
||||
currentVersion: SemVer,
|
||||
comparedVersion: SemVer,
|
||||
compareFn: (a: SemVer, b: SemVer) => boolean,
|
||||
{ includePrerelease = true }: { includePrerelease?: boolean } = {}
|
||||
): boolean => {
|
||||
if (includePrerelease) {
|
||||
const baseCurrent = `${currentVersion.major}.${currentVersion.minor}.${currentVersion.patch}`;
|
||||
const baseCompared = `${comparedVersion.major}.${comparedVersion.minor}.${comparedVersion.patch}`;
|
||||
|
||||
if (baseCurrent === baseCompared) {
|
||||
const currentHasPrerelease = currentVersion.prerelease.length > 0;
|
||||
const comparedHasPrerelease = comparedVersion.prerelease.length > 0;
|
||||
|
||||
if (currentHasPrerelease && !comparedHasPrerelease) {
|
||||
if (compareFn === gte || compareFn === gt) {
|
||||
return true;
|
||||
}
|
||||
if (compareFn === lte || compareFn === lt) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return compareFn(currentVersion, comparedVersion);
|
||||
};
|
||||
60
api/src/common/get-unraid-version-sync.ts
Normal file
60
api/src/common/get-unraid-version-sync.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
import type { SemVer } from 'semver';
|
||||
import { coerce } from 'semver';
|
||||
|
||||
import { compareVersions } from '@app/common/compare-semver-version.js';
|
||||
import { fileExistsSync } from '@app/core/utils/files/file-exists.js';
|
||||
import { parseConfig } from '@app/core/utils/misc/parse-config.js';
|
||||
|
||||
type UnraidVersionIni = {
|
||||
version?: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* Synchronously reads the Unraid version from /etc/unraid-version
|
||||
* @returns The Unraid version string, or 'unknown' if the file cannot be read
|
||||
*/
|
||||
export const getUnraidVersionSync = (): string => {
|
||||
const versionPath = '/etc/unraid-version';
|
||||
|
||||
if (!fileExistsSync(versionPath)) {
|
||||
return 'unknown';
|
||||
}
|
||||
|
||||
try {
|
||||
const versionIni = parseConfig<UnraidVersionIni>({ filePath: versionPath, type: 'ini' });
|
||||
return versionIni.version || 'unknown';
|
||||
} catch {
|
||||
return 'unknown';
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Compares the Unraid version against a specified version using a comparison function
|
||||
* @param compareFn - The comparison function from semver (e.g., lt, gte, lte, gt, eq)
|
||||
* @param version - The version to compare against (e.g., '7.3.0')
|
||||
* @param options - Options for the comparison
|
||||
* @returns The result of the comparison, or false if the version cannot be determined
|
||||
*/
|
||||
export const compareUnraidVersionSync = (
|
||||
compareFn: (a: SemVer, b: SemVer) => boolean,
|
||||
version: string,
|
||||
{ includePrerelease = true }: { includePrerelease?: boolean } = {}
|
||||
): boolean => {
|
||||
const currentVersion = getUnraidVersionSync();
|
||||
if (currentVersion === 'unknown') {
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
const current = coerce(currentVersion, { includePrerelease });
|
||||
const compared = coerce(version, { includePrerelease });
|
||||
|
||||
if (!current || !compared) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return compareVersions(current, compared, compareFn, { includePrerelease });
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
12
api/src/connect-plugin-cleanup.ts
Normal file
12
api/src/connect-plugin-cleanup.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
import { existsSync } from 'node:fs';
|
||||
|
||||
/**
|
||||
* Local filesystem and env checks stay synchronous so we can branch at module load.
|
||||
* @returns True if the Connect Unraid plugin is installed, false otherwise.
|
||||
*/
|
||||
export const isConnectPluginInstalled = () => {
|
||||
if (process.env.SKIP_CONNECT_PLUGIN_CHECK === 'true') {
|
||||
return true;
|
||||
}
|
||||
return existsSync('/boot/config/plugins/dynamix.unraid.net.plg');
|
||||
};
|
||||
@@ -1,7 +1,7 @@
|
||||
import pino from 'pino';
|
||||
import pretty from 'pino-pretty';
|
||||
|
||||
import { API_VERSION, LOG_LEVEL, LOG_TYPE, SUPPRESS_LOGS } from '@app/environment.js';
|
||||
import { API_VERSION, LOG_LEVEL, LOG_TYPE, PATHS_LOGS_FILE, SUPPRESS_LOGS } from '@app/environment.js';
|
||||
|
||||
export const levels = ['trace', 'debug', 'info', 'warn', 'error', 'fatal'] as const;
|
||||
|
||||
@@ -15,18 +15,24 @@ const nullDestination = pino.destination({
|
||||
},
|
||||
});
|
||||
|
||||
const LOG_TRANSPORT = process.env.LOG_TRANSPORT ?? 'file';
|
||||
const useConsole = LOG_TRANSPORT === 'console';
|
||||
|
||||
export const logDestination =
|
||||
process.env.SUPPRESS_LOGS === 'true' ? nullDestination : pino.destination();
|
||||
// Since PM2 captures stdout and writes to the log file, we should not colorize stdout
|
||||
// to avoid ANSI escape codes in the log file
|
||||
process.env.SUPPRESS_LOGS === 'true'
|
||||
? nullDestination
|
||||
: useConsole
|
||||
? pino.destination(1) // stdout
|
||||
: pino.destination({ dest: PATHS_LOGS_FILE, mkdir: true });
|
||||
|
||||
const stream = SUPPRESS_LOGS
|
||||
? nullDestination
|
||||
: LOG_TYPE === 'pretty'
|
||||
? pretty({
|
||||
singleLine: true,
|
||||
hideObject: false,
|
||||
colorize: false, // No colors since PM2 writes stdout to file
|
||||
colorizeObjects: false,
|
||||
colorize: useConsole, // Enable colors when outputting to console
|
||||
colorizeObjects: useConsole,
|
||||
levelFirst: false,
|
||||
ignore: 'hostname,pid',
|
||||
destination: logDestination,
|
||||
@@ -34,10 +40,10 @@ const stream = SUPPRESS_LOGS
|
||||
customPrettifiers: {
|
||||
time: (timestamp: string | object) => `[${timestamp}`,
|
||||
level: (_logLevel: string | object, _key: string, log: any, extras: any) => {
|
||||
// Use label instead of labelColorized for non-colored output
|
||||
const { label } = extras;
|
||||
const { label, labelColorized } = extras;
|
||||
const context = log.context || log.logger || 'app';
|
||||
return `${label} ${context}]`;
|
||||
// Use colorized label when outputting to console
|
||||
return `${useConsole ? labelColorized : label} ${context}]`;
|
||||
},
|
||||
},
|
||||
messageFormat: (log: any, messageKey: string) => {
|
||||
|
||||
@@ -93,6 +93,9 @@ interface Notify {
|
||||
system: string;
|
||||
version: string;
|
||||
docker_update: string;
|
||||
expand?: string | boolean;
|
||||
duration?: string | number;
|
||||
max?: string | number;
|
||||
}
|
||||
|
||||
interface Ssmtp {
|
||||
|
||||
66
api/src/core/utils/__test__/safe-mode.test.ts
Normal file
66
api/src/core/utils/__test__/safe-mode.test.ts
Normal file
@@ -0,0 +1,66 @@
|
||||
import { afterEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { isSafeModeEnabled } from '@app/core/utils/safe-mode.js';
|
||||
import { store } from '@app/store/index.js';
|
||||
import * as stateFileLoader from '@app/store/services/state-file-loader.js';
|
||||
|
||||
describe('isSafeModeEnabled', () => {
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
it('returns the safe mode flag already present in the store', () => {
|
||||
const baseState = store.getState();
|
||||
vi.spyOn(store, 'getState').mockReturnValue({
|
||||
...baseState,
|
||||
emhttp: {
|
||||
...baseState.emhttp,
|
||||
var: {
|
||||
...(baseState.emhttp?.var ?? {}),
|
||||
safeMode: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
const loaderSpy = vi.spyOn(stateFileLoader, 'loadStateFileSync');
|
||||
|
||||
expect(isSafeModeEnabled()).toBe(true);
|
||||
expect(loaderSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('falls back to the synchronous loader when store state is missing', () => {
|
||||
const baseState = store.getState();
|
||||
vi.spyOn(store, 'getState').mockReturnValue({
|
||||
...baseState,
|
||||
emhttp: {
|
||||
...baseState.emhttp,
|
||||
var: {
|
||||
...(baseState.emhttp?.var ?? {}),
|
||||
safeMode: undefined as unknown as boolean,
|
||||
} as typeof baseState.emhttp.var,
|
||||
} as typeof baseState.emhttp,
|
||||
} as typeof baseState);
|
||||
vi.spyOn(stateFileLoader, 'loadStateFileSync').mockReturnValue({
|
||||
...(baseState.emhttp?.var ?? {}),
|
||||
safeMode: true,
|
||||
} as any);
|
||||
|
||||
expect(isSafeModeEnabled()).toBe(true);
|
||||
});
|
||||
|
||||
it('defaults to false when loader cannot provide state', () => {
|
||||
const baseState = store.getState();
|
||||
vi.spyOn(store, 'getState').mockReturnValue({
|
||||
...baseState,
|
||||
emhttp: {
|
||||
...baseState.emhttp,
|
||||
var: {
|
||||
...(baseState.emhttp?.var ?? {}),
|
||||
safeMode: undefined as unknown as boolean,
|
||||
} as typeof baseState.emhttp.var,
|
||||
} as typeof baseState.emhttp,
|
||||
} as typeof baseState);
|
||||
vi.spyOn(stateFileLoader, 'loadStateFileSync').mockReturnValue(null);
|
||||
|
||||
expect(isSafeModeEnabled()).toBe(false);
|
||||
});
|
||||
});
|
||||
231
api/src/core/utils/misc/__test__/timeout-budget.test.ts
Normal file
231
api/src/core/utils/misc/__test__/timeout-budget.test.ts
Normal file
@@ -0,0 +1,231 @@
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { TimeoutBudget } from '@app/core/utils/misc/timeout-budget.js';
|
||||
|
||||
describe('TimeoutBudget', () => {
|
||||
beforeEach(() => {
|
||||
vi.useFakeTimers();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.useRealTimers();
|
||||
});
|
||||
|
||||
describe('constructor', () => {
|
||||
it('initializes with the given budget', () => {
|
||||
const budget = new TimeoutBudget(10000);
|
||||
expect(budget.remaining()).toBe(10000);
|
||||
expect(budget.elapsed()).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('remaining', () => {
|
||||
it('returns full budget immediately after construction', () => {
|
||||
const budget = new TimeoutBudget(5000);
|
||||
expect(budget.remaining()).toBe(5000);
|
||||
});
|
||||
|
||||
it('decreases as time passes', () => {
|
||||
const budget = new TimeoutBudget(5000);
|
||||
|
||||
vi.advanceTimersByTime(1000);
|
||||
expect(budget.remaining()).toBe(4000);
|
||||
|
||||
vi.advanceTimersByTime(2000);
|
||||
expect(budget.remaining()).toBe(2000);
|
||||
});
|
||||
|
||||
it('never returns negative values', () => {
|
||||
const budget = new TimeoutBudget(1000);
|
||||
|
||||
vi.advanceTimersByTime(5000); // Well past the budget
|
||||
expect(budget.remaining()).toBe(0);
|
||||
});
|
||||
|
||||
it('returns zero when budget is exactly exhausted', () => {
|
||||
const budget = new TimeoutBudget(1000);
|
||||
|
||||
vi.advanceTimersByTime(1000);
|
||||
expect(budget.remaining()).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('elapsed', () => {
|
||||
it('returns zero immediately after construction', () => {
|
||||
const budget = new TimeoutBudget(5000);
|
||||
expect(budget.elapsed()).toBe(0);
|
||||
});
|
||||
|
||||
it('increases as time passes', () => {
|
||||
const budget = new TimeoutBudget(5000);
|
||||
|
||||
vi.advanceTimersByTime(1000);
|
||||
expect(budget.elapsed()).toBe(1000);
|
||||
|
||||
vi.advanceTimersByTime(500);
|
||||
expect(budget.elapsed()).toBe(1500);
|
||||
});
|
||||
|
||||
it('continues increasing past the budget limit', () => {
|
||||
const budget = new TimeoutBudget(1000);
|
||||
|
||||
vi.advanceTimersByTime(2000);
|
||||
expect(budget.elapsed()).toBe(2000);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getTimeout', () => {
|
||||
it('returns maxMs when plenty of budget remains', () => {
|
||||
const budget = new TimeoutBudget(10000);
|
||||
expect(budget.getTimeout(2000)).toBe(2000);
|
||||
});
|
||||
|
||||
it('returns maxMs when budget minus reserve is sufficient', () => {
|
||||
const budget = new TimeoutBudget(10000);
|
||||
expect(budget.getTimeout(2000, 5000)).toBe(2000);
|
||||
});
|
||||
|
||||
it('caps timeout to available budget minus reserve', () => {
|
||||
const budget = new TimeoutBudget(10000);
|
||||
vi.advanceTimersByTime(5000); // 5000ms remaining
|
||||
|
||||
// Want 2000ms but reserve 4000ms, only 1000ms available
|
||||
expect(budget.getTimeout(2000, 4000)).toBe(1000);
|
||||
});
|
||||
|
||||
it('caps timeout to remaining budget when no reserve', () => {
|
||||
const budget = new TimeoutBudget(1000);
|
||||
vi.advanceTimersByTime(800); // 200ms remaining
|
||||
|
||||
expect(budget.getTimeout(500)).toBe(200);
|
||||
});
|
||||
|
||||
it('returns minimum of 100ms even when budget is exhausted', () => {
|
||||
const budget = new TimeoutBudget(1000);
|
||||
vi.advanceTimersByTime(2000); // Budget exhausted
|
||||
|
||||
expect(budget.getTimeout(500)).toBe(100);
|
||||
});
|
||||
|
||||
it('returns minimum of 100ms when reserve exceeds remaining', () => {
|
||||
const budget = new TimeoutBudget(5000);
|
||||
vi.advanceTimersByTime(4000); // 1000ms remaining
|
||||
|
||||
// Reserve 2000ms but only 1000ms remaining
|
||||
expect(budget.getTimeout(500, 2000)).toBe(100);
|
||||
});
|
||||
|
||||
it('uses default reserve of 0 when not specified', () => {
|
||||
const budget = new TimeoutBudget(1000);
|
||||
vi.advanceTimersByTime(500); // 500ms remaining
|
||||
|
||||
expect(budget.getTimeout(1000)).toBe(500); // Capped to remaining
|
||||
});
|
||||
});
|
||||
|
||||
describe('hasTimeFor', () => {
|
||||
it('returns true when enough time remains', () => {
|
||||
const budget = new TimeoutBudget(5000);
|
||||
expect(budget.hasTimeFor(3000)).toBe(true);
|
||||
});
|
||||
|
||||
it('returns true when exactly enough time remains', () => {
|
||||
const budget = new TimeoutBudget(5000);
|
||||
expect(budget.hasTimeFor(5000)).toBe(true);
|
||||
});
|
||||
|
||||
it('returns false when not enough time remains', () => {
|
||||
const budget = new TimeoutBudget(5000);
|
||||
expect(budget.hasTimeFor(6000)).toBe(false);
|
||||
});
|
||||
|
||||
it('accounts for elapsed time', () => {
|
||||
const budget = new TimeoutBudget(5000);
|
||||
vi.advanceTimersByTime(3000); // 2000ms remaining
|
||||
|
||||
expect(budget.hasTimeFor(2000)).toBe(true);
|
||||
expect(budget.hasTimeFor(3000)).toBe(false);
|
||||
});
|
||||
|
||||
it('returns false when budget is exhausted', () => {
|
||||
const budget = new TimeoutBudget(1000);
|
||||
vi.advanceTimersByTime(2000);
|
||||
|
||||
expect(budget.hasTimeFor(1)).toBe(false);
|
||||
});
|
||||
|
||||
it('returns true for zero required time', () => {
|
||||
const budget = new TimeoutBudget(1000);
|
||||
vi.advanceTimersByTime(2000); // Budget exhausted
|
||||
|
||||
expect(budget.hasTimeFor(0)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('integration scenarios', () => {
|
||||
it('simulates a typical startup sequence', () => {
|
||||
const budget = new TimeoutBudget(13000); // 13 second budget
|
||||
const BOOTSTRAP_RESERVE = 8000;
|
||||
const MAX_OP_TIMEOUT = 2000;
|
||||
|
||||
// First operation - should get full 2000ms
|
||||
const op1Timeout = budget.getTimeout(MAX_OP_TIMEOUT, BOOTSTRAP_RESERVE);
|
||||
expect(op1Timeout).toBe(2000);
|
||||
|
||||
// Simulate operation taking 500ms
|
||||
vi.advanceTimersByTime(500);
|
||||
|
||||
// Second operation - still have plenty of budget
|
||||
const op2Timeout = budget.getTimeout(MAX_OP_TIMEOUT, BOOTSTRAP_RESERVE);
|
||||
expect(op2Timeout).toBe(2000);
|
||||
|
||||
// Simulate operation taking 1000ms
|
||||
vi.advanceTimersByTime(1000);
|
||||
|
||||
// Third operation
|
||||
const op3Timeout = budget.getTimeout(MAX_OP_TIMEOUT, BOOTSTRAP_RESERVE);
|
||||
expect(op3Timeout).toBe(2000);
|
||||
|
||||
// Simulate slow operation taking 2000ms
|
||||
vi.advanceTimersByTime(2000);
|
||||
|
||||
// Now 3500ms elapsed, 9500ms remaining
|
||||
// After reserve, only 1500ms available - less than max
|
||||
const op4Timeout = budget.getTimeout(MAX_OP_TIMEOUT, BOOTSTRAP_RESERVE);
|
||||
expect(op4Timeout).toBe(1500);
|
||||
|
||||
// Simulate operation completing
|
||||
vi.advanceTimersByTime(1000);
|
||||
|
||||
// Bootstrap phase - use all remaining time
|
||||
const bootstrapTimeout = budget.remaining();
|
||||
expect(bootstrapTimeout).toBe(8500);
|
||||
expect(budget.hasTimeFor(8000)).toBe(true);
|
||||
});
|
||||
|
||||
it('handles worst-case scenario where all operations timeout', () => {
|
||||
const budget = new TimeoutBudget(13000);
|
||||
const BOOTSTRAP_RESERVE = 8000;
|
||||
const MAX_OP_TIMEOUT = 2000;
|
||||
|
||||
// Each operation times out at its limit
|
||||
// Available for operations: 13000 - 8000 = 5000ms
|
||||
|
||||
// Op 1: gets 2000ms, times out
|
||||
budget.getTimeout(MAX_OP_TIMEOUT, BOOTSTRAP_RESERVE);
|
||||
vi.advanceTimersByTime(2000);
|
||||
|
||||
// Op 2: gets 2000ms, times out
|
||||
budget.getTimeout(MAX_OP_TIMEOUT, BOOTSTRAP_RESERVE);
|
||||
vi.advanceTimersByTime(2000);
|
||||
|
||||
// Op 3: only 1000ms available (5000 - 4000), times out
|
||||
const op3Timeout = budget.getTimeout(MAX_OP_TIMEOUT, BOOTSTRAP_RESERVE);
|
||||
expect(op3Timeout).toBe(1000);
|
||||
vi.advanceTimersByTime(1000);
|
||||
|
||||
// Bootstrap: should still have 8000ms
|
||||
expect(budget.remaining()).toBe(8000);
|
||||
});
|
||||
});
|
||||
});
|
||||
65
api/src/core/utils/misc/__test__/with-timeout.test.ts
Normal file
65
api/src/core/utils/misc/__test__/with-timeout.test.ts
Normal file
@@ -0,0 +1,65 @@
|
||||
import { describe, expect, it } from 'vitest';
|
||||
|
||||
import { withTimeout } from '@app/core/utils/misc/with-timeout.js';
|
||||
|
||||
describe('withTimeout', () => {
|
||||
it('resolves when promise completes before timeout', async () => {
|
||||
const promise = Promise.resolve('success');
|
||||
const result = await withTimeout(promise, 1000, 'testOp');
|
||||
expect(result).toBe('success');
|
||||
});
|
||||
|
||||
it('resolves with correct value for delayed promise within timeout', async () => {
|
||||
const promise = new Promise<number>((resolve) => setTimeout(() => resolve(42), 50));
|
||||
const result = await withTimeout(promise, 1000, 'testOp');
|
||||
expect(result).toBe(42);
|
||||
});
|
||||
|
||||
it('rejects when promise takes longer than timeout', async () => {
|
||||
const promise = new Promise<string>((resolve) => setTimeout(() => resolve('late'), 500));
|
||||
await expect(withTimeout(promise, 50, 'slowOp')).rejects.toThrow('slowOp timed out after 50ms');
|
||||
});
|
||||
|
||||
it('includes operation name in timeout error message', async () => {
|
||||
const promise = new Promise<void>(() => {}); // Never resolves
|
||||
await expect(withTimeout(promise, 10, 'myCustomOperation')).rejects.toThrow(
|
||||
'myCustomOperation timed out after 10ms'
|
||||
);
|
||||
});
|
||||
|
||||
it('propagates rejection from the original promise', async () => {
|
||||
const promise = Promise.reject(new Error('original error'));
|
||||
await expect(withTimeout(promise, 1000, 'testOp')).rejects.toThrow('original error');
|
||||
});
|
||||
|
||||
it('resolves immediately for already-resolved promises', async () => {
|
||||
const promise = Promise.resolve('immediate');
|
||||
const start = Date.now();
|
||||
const result = await withTimeout(promise, 1000, 'testOp');
|
||||
const elapsed = Date.now() - start;
|
||||
|
||||
expect(result).toBe('immediate');
|
||||
expect(elapsed).toBeLessThan(50); // Should be nearly instant
|
||||
});
|
||||
|
||||
it('works with zero timeout (immediately times out for pending promises)', async () => {
|
||||
const promise = new Promise<void>(() => {}); // Never resolves
|
||||
await expect(withTimeout(promise, 0, 'zeroTimeout')).rejects.toThrow(
|
||||
'zeroTimeout timed out after 0ms'
|
||||
);
|
||||
});
|
||||
|
||||
it('preserves the type of the resolved value', async () => {
|
||||
interface TestType {
|
||||
id: number;
|
||||
name: string;
|
||||
}
|
||||
const testObj: TestType = { id: 1, name: 'test' };
|
||||
const promise = Promise.resolve(testObj);
|
||||
|
||||
const result = await withTimeout(promise, 1000, 'testOp');
|
||||
|
||||
expect(result.id).toBe(1);
|
||||
expect(result.name).toBe('test');
|
||||
});
|
||||
});
|
||||
@@ -2,7 +2,7 @@ import { AppError } from '@app/core/errors/app-error.js';
|
||||
import { getters } from '@app/store/index.js';
|
||||
|
||||
interface DockerError extends NodeJS.ErrnoException {
|
||||
address: string;
|
||||
address?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
70
api/src/core/utils/misc/timeout-budget.ts
Normal file
70
api/src/core/utils/misc/timeout-budget.ts
Normal file
@@ -0,0 +1,70 @@
|
||||
/**
|
||||
* Tracks remaining time budget to ensure we don't exceed external timeouts (e.g., PM2's listen_timeout).
|
||||
*
|
||||
* This class helps coordinate multiple async operations by:
|
||||
* - Tracking elapsed time from construction
|
||||
* - Calculating dynamic timeouts based on remaining budget
|
||||
* - Reserving time for critical operations (like server bootstrap)
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const budget = new TimeoutBudget(15000); // 15 second total budget
|
||||
*
|
||||
* // Each operation gets a timeout capped by remaining budget
|
||||
* await withTimeout(loadConfig(), budget.getTimeout(2000, 8000), 'loadConfig');
|
||||
* await withTimeout(loadState(), budget.getTimeout(2000, 8000), 'loadState');
|
||||
*
|
||||
* // Bootstrap gets all remaining time
|
||||
* await withTimeout(bootstrap(), budget.remaining(), 'bootstrap');
|
||||
*
|
||||
* console.log(`Completed in ${budget.elapsed()}ms`);
|
||||
* ```
|
||||
*/
|
||||
export class TimeoutBudget {
|
||||
private startTime: number;
|
||||
private budgetMs: number;
|
||||
|
||||
/**
|
||||
* Creates a new startup budget tracker.
|
||||
* @param budgetMs Total time budget in milliseconds
|
||||
*/
|
||||
constructor(budgetMs: number) {
|
||||
this.startTime = Date.now();
|
||||
this.budgetMs = budgetMs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns remaining time in milliseconds.
|
||||
* Never returns negative values.
|
||||
*/
|
||||
remaining(): number {
|
||||
return Math.max(0, this.budgetMs - (Date.now() - this.startTime));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns elapsed time in milliseconds since construction.
|
||||
*/
|
||||
elapsed(): number {
|
||||
return Date.now() - this.startTime;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns timeout for an operation, capped by remaining budget.
|
||||
*
|
||||
* @param maxMs Maximum timeout for this operation
|
||||
* @param reserveMs Time to reserve for future operations (e.g., server bootstrap)
|
||||
* @returns Timeout in milliseconds (minimum 100ms to avoid instant failures)
|
||||
*/
|
||||
getTimeout(maxMs: number, reserveMs: number = 0): number {
|
||||
const available = this.remaining() - reserveMs;
|
||||
return Math.max(100, Math.min(maxMs, available));
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if there's enough time remaining for an operation.
|
||||
* @param requiredMs Time required in milliseconds
|
||||
*/
|
||||
hasTimeFor(requiredMs: number): boolean {
|
||||
return this.remaining() >= requiredMs;
|
||||
}
|
||||
}
|
||||
25
api/src/core/utils/misc/with-timeout.ts
Normal file
25
api/src/core/utils/misc/with-timeout.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
/**
|
||||
* Wraps a promise with a timeout to prevent hangs.
|
||||
* If the operation takes longer than timeoutMs, it rejects with a timeout error.
|
||||
*
|
||||
* @param promise The promise to wrap with a timeout
|
||||
* @param timeoutMs Maximum time in milliseconds before timing out
|
||||
* @param operationName Name of the operation for the error message
|
||||
* @returns The result of the promise if it completes in time
|
||||
* @throws Error if the operation times out
|
||||
*/
|
||||
export const withTimeout = <T>(
|
||||
promise: Promise<T>,
|
||||
timeoutMs: number,
|
||||
operationName: string
|
||||
): Promise<T> => {
|
||||
return Promise.race([
|
||||
promise,
|
||||
new Promise<never>((_, reject) =>
|
||||
setTimeout(
|
||||
() => reject(new Error(`${operationName} timed out after ${timeoutMs}ms`)),
|
||||
timeoutMs
|
||||
)
|
||||
),
|
||||
]);
|
||||
};
|
||||
19
api/src/core/utils/network.ts
Normal file
19
api/src/core/utils/network.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
import { getters } from '@app/store/index.js';
|
||||
|
||||
/**
|
||||
* Returns the LAN IPv4 address reported by emhttp, if available.
|
||||
*/
|
||||
export function getLanIp(): string {
|
||||
const emhttp = getters.emhttp();
|
||||
const lanFromNetworks = emhttp?.networks?.[0]?.ipaddr?.[0];
|
||||
if (lanFromNetworks) {
|
||||
return lanFromNetworks;
|
||||
}
|
||||
|
||||
const lanFromNginx = emhttp?.nginx?.lanIp;
|
||||
if (lanFromNginx) {
|
||||
return lanFromNginx;
|
||||
}
|
||||
|
||||
return '';
|
||||
}
|
||||
86
api/src/core/utils/parsers/ini-boolean-parser.ts
Normal file
86
api/src/core/utils/parsers/ini-boolean-parser.ts
Normal file
@@ -0,0 +1,86 @@
|
||||
import { type IniStringBoolean, type IniStringBooleanOrAuto } from '@app/core/types/ini.js';
|
||||
|
||||
/**
|
||||
* Converts INI boolean string values to JavaScript boolean values.
|
||||
* Handles malformed values by cleaning them of non-alphabetic characters.
|
||||
*
|
||||
* @param value - The string value to parse ("yes", "no", "true", "false", etc.)
|
||||
* @returns boolean value or undefined if parsing fails
|
||||
*/
|
||||
export function iniBooleanToJsBoolean(value: string): boolean | undefined;
|
||||
/**
|
||||
* Converts INI boolean string values to JavaScript boolean values.
|
||||
* Handles malformed values by cleaning them of non-alphabetic characters.
|
||||
*
|
||||
* @param value - The string value to parse ("yes", "no", "true", "false", etc.)
|
||||
* @param defaultValue - Default value to return if parsing fails
|
||||
* @returns boolean value or defaultValue if parsing fails (never undefined when defaultValue is provided)
|
||||
*/
|
||||
export function iniBooleanToJsBoolean(value: string, defaultValue: boolean): boolean;
|
||||
export function iniBooleanToJsBoolean(value: string, defaultValue?: boolean): boolean | undefined {
|
||||
if (value === 'no' || value === 'false') {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (value === 'yes' || value === 'true') {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Handle malformed values by cleaning them first
|
||||
if (typeof value === 'string') {
|
||||
const cleanValue = value.replace(/[^a-zA-Z]/g, '').toLowerCase();
|
||||
if (cleanValue === 'no' || cleanValue === 'false') {
|
||||
return false;
|
||||
}
|
||||
if (cleanValue === 'yes' || cleanValue === 'true') {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// Always return defaultValue when provided (even if undefined)
|
||||
if (arguments.length >= 2) {
|
||||
return defaultValue;
|
||||
}
|
||||
|
||||
// Return undefined only when no default was provided
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts INI boolean or auto string values to JavaScript boolean or null values.
|
||||
* Handles malformed values by cleaning them of non-alphabetic characters.
|
||||
*
|
||||
* @param value - The string value to parse ("yes", "no", "auto", "true", "false", etc.)
|
||||
* @returns boolean value for yes/no/true/false, null for auto, or undefined as fallback
|
||||
*/
|
||||
export const iniBooleanOrAutoToJsBoolean = (
|
||||
value: IniStringBooleanOrAuto | string
|
||||
): boolean | null | undefined => {
|
||||
// Handle auto first
|
||||
if (value === 'auto') {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Try to parse as boolean
|
||||
const boolResult = iniBooleanToJsBoolean(value as IniStringBoolean);
|
||||
if (boolResult !== undefined) {
|
||||
return boolResult;
|
||||
}
|
||||
|
||||
// Handle malformed values like "auto*" by extracting the base value
|
||||
if (typeof value === 'string') {
|
||||
const cleanValue = value.replace(/[^a-zA-Z]/g, '').toLowerCase();
|
||||
if (cleanValue === 'auto') {
|
||||
return null;
|
||||
}
|
||||
if (cleanValue === 'no' || cleanValue === 'false') {
|
||||
return false;
|
||||
}
|
||||
if (cleanValue === 'yes' || cleanValue === 'true') {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// Return undefined as fallback instead of throwing to prevent API crash
|
||||
return undefined;
|
||||
};
|
||||
17
api/src/core/utils/safe-mode.ts
Normal file
17
api/src/core/utils/safe-mode.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { store } from '@app/store/index.js';
|
||||
import { loadStateFileSync } from '@app/store/services/state-file-loader.js';
|
||||
import { StateFileKey } from '@app/store/types.js';
|
||||
|
||||
export const isSafeModeEnabled = (): boolean => {
|
||||
const safeModeFromStore = store.getState().emhttp?.var?.safeMode;
|
||||
if (typeof safeModeFromStore === 'boolean') {
|
||||
return safeModeFromStore;
|
||||
}
|
||||
|
||||
const varState = loadStateFileSync(StateFileKey.var);
|
||||
if (varState) {
|
||||
return Boolean(varState.safeMode);
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
@@ -111,5 +111,10 @@ export const PATHS_CONFIG_MODULES =
|
||||
export const PATHS_LOCAL_SESSION_FILE =
|
||||
process.env.PATHS_LOCAL_SESSION_FILE ?? '/var/run/unraid-api/local-session';
|
||||
|
||||
export const PATHS_DOCKER_TEMPLATES = process.env.PATHS_DOCKER_TEMPLATES?.split(',') ?? [
|
||||
'/boot/config/plugins/dockerMan/templates-user',
|
||||
'/boot/config/plugins/dockerMan/templates',
|
||||
];
|
||||
|
||||
/** feature flag for the upcoming docker release */
|
||||
export const ENABLE_NEXT_DOCKER_RELEASE = process.env.ENABLE_NEXT_DOCKER_RELEASE === 'true';
|
||||
|
||||
1
api/src/i18n/ar.json
Normal file
1
api/src/i18n/ar.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/bn.json
Normal file
1
api/src/i18n/bn.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/ca.json
Normal file
1
api/src/i18n/ca.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/cs.json
Normal file
1
api/src/i18n/cs.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/da.json
Normal file
1
api/src/i18n/da.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/de.json
Normal file
1
api/src/i18n/de.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/en.json
Normal file
1
api/src/i18n/en.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/es.json
Normal file
1
api/src/i18n/es.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/fr.json
Normal file
1
api/src/i18n/fr.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/hi.json
Normal file
1
api/src/i18n/hi.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/hr.json
Normal file
1
api/src/i18n/hr.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/hu.json
Normal file
1
api/src/i18n/hu.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/it.json
Normal file
1
api/src/i18n/it.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/ja.json
Normal file
1
api/src/i18n/ja.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/ko.json
Normal file
1
api/src/i18n/ko.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/lv.json
Normal file
1
api/src/i18n/lv.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/nl.json
Normal file
1
api/src/i18n/nl.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/no.json
Normal file
1
api/src/i18n/no.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/pl.json
Normal file
1
api/src/i18n/pl.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/pt.json
Normal file
1
api/src/i18n/pt.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/ro.json
Normal file
1
api/src/i18n/ro.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/ru.json
Normal file
1
api/src/i18n/ru.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/sv.json
Normal file
1
api/src/i18n/sv.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/uk.json
Normal file
1
api/src/i18n/uk.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/zh.json
Normal file
1
api/src/i18n/zh.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
113
api/src/index.ts
113
api/src/index.ts
@@ -15,28 +15,38 @@ import { WebSocket } from 'ws';
|
||||
|
||||
import { logger } from '@app/core/log.js';
|
||||
import { fileExistsSync } from '@app/core/utils/files/file-exists.js';
|
||||
import { TimeoutBudget } from '@app/core/utils/misc/timeout-budget.js';
|
||||
import { withTimeout } from '@app/core/utils/misc/with-timeout.js';
|
||||
import { getServerIdentifier } from '@app/core/utils/server-identifier.js';
|
||||
import { environment, PATHS_CONFIG_MODULES, PORT } from '@app/environment.js';
|
||||
import * as envVars from '@app/environment.js';
|
||||
import { loadDynamixConfigFile } from '@app/store/actions/load-dynamix-config-file.js';
|
||||
import { shutdownApiEvent } from '@app/store/actions/shutdown-api-event.js';
|
||||
import { store } from '@app/store/index.js';
|
||||
import { loadDynamixConfig, store } from '@app/store/index.js';
|
||||
import { startMiddlewareListeners } from '@app/store/listeners/listener-middleware.js';
|
||||
import { loadStateFiles } from '@app/store/modules/emhttp.js';
|
||||
import { loadRegistrationKey } from '@app/store/modules/registration.js';
|
||||
import { setupDynamixConfigWatch } from '@app/store/watch/dynamix-config-watch.js';
|
||||
import { setupRegistrationKeyWatch } from '@app/store/watch/registration-watch.js';
|
||||
import { StateManager } from '@app/store/watch/state-watch.js';
|
||||
|
||||
let server: NestFastifyApplication<RawServerDefault> | null = null;
|
||||
|
||||
// PM2 listen_timeout is 15 seconds (ecosystem.config.json)
|
||||
// We use 13 seconds as our total budget to ensure our timeout triggers before PM2 kills us
|
||||
const TOTAL_STARTUP_BUDGET_MS = 30_000;
|
||||
// Reserve time for the NestJS bootstrap (the most critical and time-consuming operation)
|
||||
const BOOTSTRAP_RESERVED_MS = 20_000;
|
||||
// Maximum time for any single pre-bootstrap operation
|
||||
const MAX_OPERATION_TIMEOUT_MS = 5_000;
|
||||
|
||||
const unlinkUnixPort = () => {
|
||||
if (isNaN(parseInt(PORT, 10))) {
|
||||
if (fileExistsSync(PORT)) unlinkSync(PORT);
|
||||
}
|
||||
};
|
||||
|
||||
export const viteNodeApp = async () => {
|
||||
export const viteNodeApp = async (): Promise<NestFastifyApplication<RawServerDefault>> => {
|
||||
const budget = new TimeoutBudget(TOTAL_STARTUP_BUDGET_MS);
|
||||
|
||||
try {
|
||||
await import('json-bigint-patch');
|
||||
environment.IS_MAIN_PROCESS = true;
|
||||
@@ -44,15 +54,15 @@ export const viteNodeApp = async () => {
|
||||
/**------------------------------------------------------------------------
|
||||
* Attaching getServerIdentifier to globalThis
|
||||
|
||||
* getServerIdentifier is tightly coupled to the deprecated redux store,
|
||||
* getServerIdentifier is tightly coupled to the deprecated redux store,
|
||||
* which we don't want to share with other packages or plugins.
|
||||
*
|
||||
*
|
||||
* At the same time, we need to use it in @unraid/shared as a building block,
|
||||
* where it's used & available outside of NestJS's DI context.
|
||||
*
|
||||
* Attaching to globalThis is a temporary solution to avoid refactoring
|
||||
*
|
||||
* Attaching to globalThis is a temporary solution to avoid refactoring
|
||||
* config sync & management outside of NestJS's DI context.
|
||||
*
|
||||
*
|
||||
* Plugin authors should import getServerIdentifier from @unraid/shared instead,
|
||||
* to avoid breaking changes to their code.
|
||||
*------------------------------------------------------------------------**/
|
||||
@@ -60,7 +70,18 @@ export const viteNodeApp = async () => {
|
||||
logger.info('ENV %o', envVars);
|
||||
logger.info('PATHS %o', store.getState().paths);
|
||||
|
||||
await mkdir(PATHS_CONFIG_MODULES, { recursive: true });
|
||||
// Note: we use logger.info for checkpoints instead of a lower log level
|
||||
// to ensure emission during an unraid server's boot,
|
||||
// where the log level will be set to INFO by default.
|
||||
|
||||
// Create config directory
|
||||
try {
|
||||
await mkdir(PATHS_CONFIG_MODULES, { recursive: true });
|
||||
logger.info('Config directory ready');
|
||||
} catch (error) {
|
||||
logger.error(error, 'Failed to create config directory');
|
||||
throw error;
|
||||
}
|
||||
|
||||
const cacheable = new CacheableLookup();
|
||||
|
||||
@@ -70,32 +91,73 @@ export const viteNodeApp = async () => {
|
||||
cacheable.install(https.globalAgent);
|
||||
|
||||
// Load emhttp state into store
|
||||
await store.dispatch(loadStateFiles());
|
||||
try {
|
||||
const timeout = budget.getTimeout(MAX_OPERATION_TIMEOUT_MS, BOOTSTRAP_RESERVED_MS);
|
||||
await withTimeout(store.dispatch(loadStateFiles()), timeout, 'loadStateFiles');
|
||||
logger.info('Emhttp state loaded');
|
||||
} catch (error) {
|
||||
logger.error(error, 'Failed to load emhttp state files');
|
||||
logger.warn('Continuing with default state');
|
||||
}
|
||||
|
||||
// Load initial registration key into store
|
||||
await store.dispatch(loadRegistrationKey());
|
||||
try {
|
||||
const timeout = budget.getTimeout(MAX_OPERATION_TIMEOUT_MS, BOOTSTRAP_RESERVED_MS);
|
||||
await withTimeout(store.dispatch(loadRegistrationKey()), timeout, 'loadRegistrationKey');
|
||||
logger.info('Registration key loaded');
|
||||
} catch (error) {
|
||||
logger.error(error, 'Failed to load registration key');
|
||||
logger.warn('Continuing without registration key');
|
||||
}
|
||||
|
||||
// Load my dynamix config file into store
|
||||
await store.dispatch(loadDynamixConfigFile());
|
||||
try {
|
||||
loadDynamixConfig();
|
||||
logger.info('Dynamix config loaded');
|
||||
} catch (error) {
|
||||
logger.error(error, 'Failed to load dynamix config');
|
||||
logger.warn('Continuing with default dynamix config');
|
||||
}
|
||||
|
||||
// Start listening to file updates
|
||||
StateManager.getInstance();
|
||||
try {
|
||||
StateManager.getInstance();
|
||||
logger.info('State manager initialized');
|
||||
} catch (error) {
|
||||
logger.error(error, 'Failed to initialize state manager');
|
||||
logger.warn('Continuing without state watching');
|
||||
}
|
||||
|
||||
// Start listening to key file changes
|
||||
setupRegistrationKeyWatch();
|
||||
|
||||
// Start listening to dynamix config file changes
|
||||
setupDynamixConfigWatch();
|
||||
try {
|
||||
setupRegistrationKeyWatch();
|
||||
logger.info('Registration key watch active');
|
||||
} catch (error) {
|
||||
logger.error(error, 'Failed to setup registration key watch');
|
||||
logger.warn('Continuing without key file watching');
|
||||
}
|
||||
|
||||
// If port is unix socket, delete old socket before starting http server
|
||||
unlinkUnixPort();
|
||||
|
||||
startMiddlewareListeners();
|
||||
|
||||
// Start webserver
|
||||
const { bootstrapNestServer } = await import('@app/unraid-api/main.js');
|
||||
|
||||
server = await bootstrapNestServer();
|
||||
// Start webserver - use all remaining budget
|
||||
try {
|
||||
const bootstrapTimeout = budget.remaining();
|
||||
if (bootstrapTimeout < 1000) {
|
||||
logger.warn(
|
||||
`Insufficient startup budget remaining (${bootstrapTimeout}ms) for NestJS bootstrap`
|
||||
);
|
||||
}
|
||||
logger.info('Bootstrapping NestJS server (budget: %dms)...', bootstrapTimeout);
|
||||
const { bootstrapNestServer } = await import('@app/unraid-api/main.js');
|
||||
server = await withTimeout(bootstrapNestServer(), bootstrapTimeout, 'bootstrapNestServer');
|
||||
logger.info('Startup complete in %dms', budget.elapsed());
|
||||
} catch (error) {
|
||||
logger.error(error, 'Failed to start NestJS server');
|
||||
throw error; // This is critical - must rethrow to trigger graceful exit
|
||||
}
|
||||
|
||||
asyncExitHook(
|
||||
async (signal) => {
|
||||
@@ -108,8 +170,10 @@ export const viteNodeApp = async () => {
|
||||
|
||||
gracefulExit();
|
||||
},
|
||||
{ wait: 9999 }
|
||||
{ wait: 10_000 }
|
||||
);
|
||||
|
||||
return server;
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
logger.error(error, 'API-ERROR');
|
||||
@@ -120,8 +184,9 @@ export const viteNodeApp = async () => {
|
||||
await server?.close?.();
|
||||
}
|
||||
shutdownApiEvent();
|
||||
// Kill application
|
||||
// Kill application - gracefulExit calls process.exit but TS doesn't know it never returns
|
||||
gracefulExit(1);
|
||||
throw new Error('Unreachable');
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -1,12 +1,9 @@
|
||||
import { F_OK } from 'constants';
|
||||
import { access } from 'fs/promises';
|
||||
|
||||
import { createAsyncThunk } from '@reduxjs/toolkit';
|
||||
import { createTtlMemoizedLoader } from '@unraid/shared';
|
||||
|
||||
import type { RecursivePartial } from '@app/types/index.js';
|
||||
import { type DynamixConfig } from '@app/core/types/ini.js';
|
||||
import { fileExistsSync } from '@app/core/utils/files/file-exists.js';
|
||||
import { parseConfig } from '@app/core/utils/misc/parse-config.js';
|
||||
import { type RecursiveNullable, type RecursivePartial } from '@app/types/index.js';
|
||||
import { batchProcess } from '@app/utils.js';
|
||||
|
||||
/**
|
||||
* Loads a configuration file from disk, parses it to a RecursivePartial of the provided type, and returns it.
|
||||
@@ -16,11 +13,8 @@ import { batchProcess } from '@app/utils.js';
|
||||
* @param path The path to the configuration file on disk.
|
||||
* @returns A parsed RecursivePartial of the provided type.
|
||||
*/
|
||||
async function loadConfigFile<ConfigType>(path: string): Promise<RecursivePartial<ConfigType>> {
|
||||
const fileIsAccessible = await access(path, F_OK)
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
return fileIsAccessible
|
||||
function loadConfigFileSync<ConfigType>(path: string): RecursivePartial<ConfigType> {
|
||||
return fileExistsSync(path)
|
||||
? parseConfig<RecursivePartial<ConfigType>>({
|
||||
filePath: path,
|
||||
type: 'ini',
|
||||
@@ -28,21 +22,40 @@ async function loadConfigFile<ConfigType>(path: string): Promise<RecursivePartia
|
||||
: {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Load the dynamix.cfg into the store.
|
||||
*
|
||||
* Note: If the file doesn't exist this will fallback to default values.
|
||||
*/
|
||||
export const loadDynamixConfigFile = createAsyncThunk<
|
||||
RecursiveNullable<RecursivePartial<DynamixConfig>>,
|
||||
string | undefined
|
||||
>('config/load-dynamix-config-file', async (filePath) => {
|
||||
if (filePath) {
|
||||
return loadConfigFile<DynamixConfig>(filePath);
|
||||
}
|
||||
const store = await import('@app/store/index.js');
|
||||
const paths = store.getters.paths()['dynamix-config'];
|
||||
const { data: configs } = await batchProcess(paths, (path) => loadConfigFile<DynamixConfig>(path));
|
||||
const [defaultConfig = {}, customConfig = {}] = configs;
|
||||
return { ...defaultConfig, ...customConfig };
|
||||
type ConfigPaths = readonly (string | undefined | null)[];
|
||||
const CACHE_WINDOW_MS = 250;
|
||||
|
||||
const memoizedConfigLoader = createTtlMemoizedLoader<
|
||||
RecursivePartial<DynamixConfig>,
|
||||
ConfigPaths,
|
||||
string
|
||||
>({
|
||||
ttlMs: CACHE_WINDOW_MS,
|
||||
getCacheKey: (configPaths: ConfigPaths): string => JSON.stringify(configPaths),
|
||||
load: (configPaths: ConfigPaths) => {
|
||||
const validPaths = configPaths.filter((path): path is string => Boolean(path));
|
||||
if (validPaths.length === 0) {
|
||||
return {};
|
||||
}
|
||||
const configFiles = validPaths.map((path) => loadConfigFileSync<DynamixConfig>(path));
|
||||
return configFiles.reduce<RecursivePartial<DynamixConfig>>(
|
||||
(accumulator, configFile) => ({
|
||||
...accumulator,
|
||||
...configFile,
|
||||
}),
|
||||
{}
|
||||
);
|
||||
},
|
||||
});
|
||||
|
||||
/**
|
||||
* Loads dynamix config from disk with TTL caching.
|
||||
*
|
||||
* @param configPaths - Array of config file paths to load and merge
|
||||
* @returns Merged config object from all valid paths
|
||||
*/
|
||||
export const loadDynamixConfigFromDiskSync = (
|
||||
configPaths: readonly (string | undefined | null)[]
|
||||
): RecursivePartial<DynamixConfig> => {
|
||||
return memoizedConfigLoader.get(configPaths);
|
||||
};
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
import { configureStore } from '@reduxjs/toolkit';
|
||||
|
||||
import { logger } from '@app/core/log.js';
|
||||
import { loadDynamixConfigFromDiskSync } from '@app/store/actions/load-dynamix-config-file.js';
|
||||
import { listenerMiddleware } from '@app/store/listeners/listener-middleware.js';
|
||||
import { updateDynamixConfig } from '@app/store/modules/dynamix.js';
|
||||
import { rootReducer } from '@app/store/root-reducer.js';
|
||||
import { FileLoadStatus } from '@app/store/types.js';
|
||||
|
||||
export const store = configureStore({
|
||||
reducer: rootReducer,
|
||||
@@ -15,8 +19,36 @@ export type RootState = ReturnType<typeof store.getState>;
|
||||
export type AppDispatch = typeof store.dispatch;
|
||||
export type ApiStore = typeof store;
|
||||
|
||||
// loadDynamixConfig is located here and not in the actions/load-dynamix-config-file.js file because it needs to access the store,
|
||||
// and injecting it seemed circular and convoluted for this use case.
|
||||
/**
|
||||
* Loads the dynamix config into the store.
|
||||
* Can be called multiple times - uses TTL caching internally.
|
||||
* @returns The loaded dynamix config.
|
||||
*/
|
||||
export const loadDynamixConfig = () => {
|
||||
const configPaths = store.getState().paths['dynamix-config'] ?? [];
|
||||
try {
|
||||
const config = loadDynamixConfigFromDiskSync(configPaths);
|
||||
store.dispatch(
|
||||
updateDynamixConfig({
|
||||
...config,
|
||||
status: FileLoadStatus.LOADED,
|
||||
})
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error(error, 'Failed to load dynamix config from disk');
|
||||
store.dispatch(
|
||||
updateDynamixConfig({
|
||||
status: FileLoadStatus.FAILED_LOADING,
|
||||
})
|
||||
);
|
||||
}
|
||||
return store.getState().dynamix;
|
||||
};
|
||||
|
||||
export const getters = {
|
||||
dynamix: () => store.getState().dynamix,
|
||||
dynamix: () => loadDynamixConfig(),
|
||||
emhttp: () => store.getState().emhttp,
|
||||
paths: () => store.getState().paths,
|
||||
registration: () => store.getState().registration,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user