mirror of
https://github.com/unraid/api.git
synced 2026-01-02 14:40:01 -06:00
Compare commits
147 Commits
v4.25.0
...
refactor/r
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4ee5506266 | ||
|
|
86ec429079 | ||
|
|
bff05e0c5d | ||
|
|
18f3227d9e | ||
|
|
9504b4a8c5 | ||
|
|
b0ff85b2af | ||
|
|
1c4f33e4fb | ||
|
|
00f015f1b4 | ||
|
|
443335ed8e | ||
|
|
ad2a5cc443 | ||
|
|
9ed71e5df9 | ||
|
|
8ef00ce27f | ||
|
|
716789fa00 | ||
|
|
b456678fd8 | ||
|
|
902307ae55 | ||
|
|
2f88228937 | ||
|
|
5628bf9a0e | ||
|
|
3c9c04d684 | ||
|
|
a8578bf79b | ||
|
|
88e76d7fa2 | ||
|
|
1e5deccb70 | ||
|
|
4c49bafe15 | ||
|
|
17f0176857 | ||
|
|
4abc79a7b5 | ||
|
|
e817d6a317 | ||
|
|
c2d2fbea40 | ||
|
|
ad78a0251a | ||
|
|
c18de73b29 | ||
|
|
e61657c05b | ||
|
|
7969e44211 | ||
|
|
fd1a04641a | ||
|
|
d10e0540eb | ||
|
|
6458457a7f | ||
|
|
53ee465e92 | ||
|
|
4c42b4b810 | ||
|
|
4a19d4d536 | ||
|
|
ddd6b0b54f | ||
|
|
2122e6d8a2 | ||
|
|
e3bf57184b | ||
|
|
6a04a06a72 | ||
|
|
875aa0972a | ||
|
|
f7fe9597ef | ||
|
|
f690c23a20 | ||
|
|
fe74e53cf5 | ||
|
|
be1e2f5ee2 | ||
|
|
67ac30787f | ||
|
|
c21ecc7cb7 | ||
|
|
1000976296 | ||
|
|
9f27eb787f | ||
|
|
80bbf2dd6b | ||
|
|
4798c89a5f | ||
|
|
88766adeea | ||
|
|
e80ea795fe | ||
|
|
7679d71c3e | ||
|
|
a8459c7431 | ||
|
|
29312d78e6 | ||
|
|
a32dd2182a | ||
|
|
d07aa42063 | ||
|
|
9ef1cf1eca | ||
|
|
a0745e15ca | ||
|
|
c39b0b267c | ||
|
|
73135b8328 | ||
|
|
e42d619b6d | ||
|
|
560db880cc | ||
|
|
d6055f102b | ||
|
|
d099e7521d | ||
|
|
bb9b539732 | ||
|
|
0e44e73bf7 | ||
|
|
277ac42046 | ||
|
|
e1e3ea7eb6 | ||
|
|
8b155d1f1c | ||
|
|
d13a1f6174 | ||
|
|
e243ae836e | ||
|
|
01a63fd86b | ||
|
|
df78608457 | ||
|
|
ca3bee4ad5 | ||
|
|
024ae69343 | ||
|
|
99ce88bfdc | ||
|
|
73b2ce360c | ||
|
|
d6e29395c8 | ||
|
|
317e0fa307 | ||
|
|
331c913329 | ||
|
|
abf3461348 | ||
|
|
079a09ec90 | ||
|
|
e4223ab5a1 | ||
|
|
6f54206a4a | ||
|
|
e35bcc72f1 | ||
|
|
74df938e45 | ||
|
|
51f025b105 | ||
|
|
23a71207dd | ||
|
|
832e9d04f2 | ||
|
|
31af99e52f | ||
|
|
933cefa020 | ||
|
|
375dcd0598 | ||
|
|
64875edbba | ||
|
|
330e81a484 | ||
|
|
b8f0fdf8d2 | ||
|
|
36c104915e | ||
|
|
dc9a036c73 | ||
|
|
c71b0487ad | ||
|
|
e7340431a5 | ||
|
|
e4a9b8291b | ||
|
|
6b6b78fa2e | ||
|
|
e2fdf6cadb | ||
|
|
3d4f193fa4 | ||
|
|
b28ef1ea33 | ||
|
|
ee0f240233 | ||
|
|
3aacaa1fb5 | ||
|
|
0cd4c0ae16 | ||
|
|
66625ded6a | ||
|
|
f8a6785e9c | ||
|
|
d7aca81c60 | ||
|
|
854b403fbd | ||
|
|
c264a1843c | ||
|
|
45cda4af80 | ||
|
|
64eb9ce9b5 | ||
|
|
d56797c59f | ||
|
|
92af3b6115 | ||
|
|
35f8bc2258 | ||
|
|
c4cd0c6352 | ||
|
|
818e7ce997 | ||
|
|
7e13202aa1 | ||
|
|
d18eaf2364 | ||
|
|
42406e795d | ||
|
|
11d2de5d08 | ||
|
|
031c1ab5dc | ||
|
|
34075e44c5 | ||
|
|
ff2906e52a | ||
|
|
a0d6cc92c8 | ||
|
|
57acfaacf0 | ||
|
|
ea816c7a5c | ||
|
|
cafde72d38 | ||
|
|
2b481c397c | ||
|
|
8c4e9dd7ae | ||
|
|
f212dce88b | ||
|
|
8cd2a4c124 | ||
|
|
10f048ee1f | ||
|
|
e9e271ade5 | ||
|
|
31c41027fc | ||
|
|
fabe6a2c4b | ||
|
|
754966d5d3 | ||
|
|
ed594e9147 | ||
|
|
50d83313a1 | ||
|
|
e57ec00627 | ||
|
|
84f4a7221d | ||
|
|
d73953f8ff | ||
|
|
0d165a6087 |
@@ -241,4 +241,3 @@ const pinia = createTestingPinia({
|
||||
- Set initial state for focused testing
|
||||
- Test computed properties by accessing them directly
|
||||
- Verify state changes by updating the store
|
||||
|
||||
|
||||
208
.github/workflows/build-artifacts.yml
vendored
Normal file
208
.github/workflows/build-artifacts.yml
vendored
Normal file
@@ -0,0 +1,208 @@
|
||||
name: Build Artifacts
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ref:
|
||||
type: string
|
||||
required: false
|
||||
description: "Git ref to checkout (commit SHA, branch, or tag)"
|
||||
version_override:
|
||||
type: string
|
||||
required: false
|
||||
description: "Override version (for manual releases)"
|
||||
outputs:
|
||||
build_number:
|
||||
description: "Build number for the artifacts"
|
||||
value: ${{ jobs.build-api.outputs.build_number }}
|
||||
secrets:
|
||||
VITE_ACCOUNT:
|
||||
required: true
|
||||
VITE_CONNECT:
|
||||
required: true
|
||||
VITE_UNRAID_NET:
|
||||
required: true
|
||||
VITE_CALLBACK_KEY:
|
||||
required: true
|
||||
UNRAID_BOT_GITHUB_ADMIN_TOKEN:
|
||||
required: false
|
||||
|
||||
jobs:
|
||||
build-api:
|
||||
name: Build API
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
build_number: ${{ steps.buildnumber.outputs.build_number || steps.fallback_buildnumber.outputs.build_number }}
|
||||
defaults:
|
||||
run:
|
||||
working-directory: api
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
ref: ${{ inputs.ref || github.ref }}
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential
|
||||
version: 1.0
|
||||
|
||||
- name: PNPM Install
|
||||
run: |
|
||||
cd ${{ github.workspace }}
|
||||
pnpm install --frozen-lockfile
|
||||
|
||||
- name: Get Git Short Sha and API version
|
||||
id: vars
|
||||
run: |
|
||||
GIT_SHA=$(git rev-parse --short HEAD)
|
||||
IS_TAGGED=$(git describe --tags --abbrev=0 --exact-match || echo '')
|
||||
PACKAGE_LOCK_VERSION=$(jq -r '.version' package.json)
|
||||
API_VERSION=${{ inputs.version_override && format('"{0}"', inputs.version_override) || '${PACKAGE_LOCK_VERSION}' }}
|
||||
if [ -z "${{ inputs.version_override }}" ] && [ -z "$IS_TAGGED" ]; then
|
||||
API_VERSION="${PACKAGE_LOCK_VERSION}+${GIT_SHA}"
|
||||
fi
|
||||
export API_VERSION
|
||||
echo "API_VERSION=${API_VERSION}" >> $GITHUB_ENV
|
||||
echo "PACKAGE_LOCK_VERSION=${PACKAGE_LOCK_VERSION}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Generate build number
|
||||
id: buildnumber
|
||||
if: github.repository == 'unraid/api'
|
||||
continue-on-error: true
|
||||
uses: onyxmueller/build-tag-number@v1
|
||||
with:
|
||||
token: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN || github.token }}
|
||||
prefix: ${{ inputs.version_override || steps.vars.outputs.PACKAGE_LOCK_VERSION }}
|
||||
|
||||
- name: Generate fallback build number
|
||||
id: fallback_buildnumber
|
||||
if: steps.buildnumber.outcome != 'success'
|
||||
run: echo "build_number=${GITHUB_RUN_NUMBER}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
pnpm run build:release
|
||||
tar -czf deploy/unraid-api.tgz -C deploy/pack/ .
|
||||
|
||||
- name: Upload tgz to Github artifacts
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: unraid-api
|
||||
path: ${{ github.workspace }}/api/deploy/unraid-api.tgz
|
||||
|
||||
build-unraid-ui-webcomponents:
|
||||
name: Build Unraid UI Library (Webcomponent Version)
|
||||
defaults:
|
||||
run:
|
||||
working-directory: unraid-ui
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
ref: ${{ inputs.ref || github.ref }}
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential
|
||||
version: 1.0
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
cd ${{ github.workspace }}
|
||||
pnpm install --frozen-lockfile --filter @unraid/ui
|
||||
|
||||
- name: Lint
|
||||
run: pnpm run lint
|
||||
|
||||
- name: Build
|
||||
run: pnpm run build:wc
|
||||
|
||||
- name: Upload Artifact to Github
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: unraid-wc-ui
|
||||
path: unraid-ui/dist-wc/
|
||||
|
||||
build-web:
|
||||
name: Build Web App
|
||||
defaults:
|
||||
run:
|
||||
working-directory: web
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
ref: ${{ inputs.ref || github.ref }}
|
||||
|
||||
- name: Create env file
|
||||
run: |
|
||||
touch .env
|
||||
echo VITE_ACCOUNT=${{ secrets.VITE_ACCOUNT }} >> .env
|
||||
echo VITE_CONNECT=${{ secrets.VITE_CONNECT }} >> .env
|
||||
echo VITE_UNRAID_NET=${{ secrets.VITE_UNRAID_NET }} >> .env
|
||||
echo VITE_CALLBACK_KEY=${{ secrets.VITE_CALLBACK_KEY }} >> .env
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: PNPM Install
|
||||
run: |
|
||||
cd ${{ github.workspace }}
|
||||
pnpm install --frozen-lockfile --filter @unraid/web --filter @unraid/ui
|
||||
|
||||
- name: Build Unraid UI
|
||||
run: |
|
||||
cd ${{ github.workspace }}/unraid-ui
|
||||
pnpm run build
|
||||
|
||||
- name: Lint files
|
||||
run: pnpm run lint
|
||||
|
||||
- name: Type Check
|
||||
run: pnpm run type-check
|
||||
|
||||
- name: Build
|
||||
run: pnpm run build
|
||||
|
||||
- name: Upload build to Github artifacts
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: unraid-wc-rich
|
||||
path: web/dist
|
||||
|
||||
40
.github/workflows/build-plugin.yml
vendored
40
.github/workflows/build-plugin.yml
vendored
@@ -27,6 +27,15 @@ on:
|
||||
type: string
|
||||
required: true
|
||||
description: "Build number for the plugin builds"
|
||||
ref:
|
||||
type: string
|
||||
required: false
|
||||
description: "Git ref (commit SHA, branch, or tag) to checkout"
|
||||
TRIGGER_PRODUCTION_RELEASE:
|
||||
type: boolean
|
||||
required: false
|
||||
default: false
|
||||
description: "Whether to automatically trigger the release-production workflow (default: false)"
|
||||
secrets:
|
||||
CF_ACCESS_KEY_ID:
|
||||
required: true
|
||||
@@ -47,8 +56,9 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
ref: ${{ inputs.ref }}
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
@@ -57,7 +67,7 @@ jobs:
|
||||
run_install: false
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
@@ -68,7 +78,21 @@ jobs:
|
||||
GIT_SHA=$(git rev-parse --short HEAD)
|
||||
IS_TAGGED=$(git describe --tags --abbrev=0 --exact-match || echo '')
|
||||
PACKAGE_LOCK_VERSION=$(jq -r '.version' package.json)
|
||||
API_VERSION=$([[ -n "$IS_TAGGED" ]] && echo "$PACKAGE_LOCK_VERSION" || echo "${PACKAGE_LOCK_VERSION}+${GIT_SHA}")
|
||||
|
||||
# For release builds, trust the release tag version to avoid stale checkouts
|
||||
if [ "${{ inputs.RELEASE_CREATED }}" = "true" ] && [ -n "${{ inputs.RELEASE_TAG }}" ]; then
|
||||
TAG_VERSION="${{ inputs.RELEASE_TAG }}"
|
||||
TAG_VERSION="${TAG_VERSION#v}" # trim leading v if present
|
||||
|
||||
if [ "$TAG_VERSION" != "$PACKAGE_LOCK_VERSION" ]; then
|
||||
echo "::warning::Release tag version ($TAG_VERSION) does not match package.json version ($PACKAGE_LOCK_VERSION). Using tag version for TXZ naming."
|
||||
fi
|
||||
|
||||
API_VERSION="$TAG_VERSION"
|
||||
else
|
||||
API_VERSION=$([[ -n "$IS_TAGGED" ]] && echo "$PACKAGE_LOCK_VERSION" || echo "${PACKAGE_LOCK_VERSION}+${GIT_SHA}")
|
||||
fi
|
||||
|
||||
echo "API_VERSION=${API_VERSION}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Install dependencies
|
||||
@@ -77,19 +101,19 @@ jobs:
|
||||
pnpm install --frozen-lockfile --filter @unraid/connect-plugin
|
||||
|
||||
- name: Download Unraid UI Components
|
||||
uses: actions/download-artifact@v5
|
||||
uses: actions/download-artifact@v7
|
||||
with:
|
||||
name: unraid-wc-ui
|
||||
path: ${{ github.workspace }}/plugin/source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components/uui
|
||||
merge-multiple: true
|
||||
- name: Download Unraid Web Components
|
||||
uses: actions/download-artifact@v5
|
||||
uses: actions/download-artifact@v7
|
||||
with:
|
||||
pattern: unraid-wc-rich
|
||||
path: ${{ github.workspace }}/plugin/source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components/standalone
|
||||
merge-multiple: true
|
||||
- name: Download Unraid API
|
||||
uses: actions/download-artifact@v5
|
||||
uses: actions/download-artifact@v7
|
||||
with:
|
||||
name: unraid-api
|
||||
path: ${{ github.workspace }}/plugin/api/
|
||||
@@ -118,7 +142,7 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Upload to GHA
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: unraid-plugin-${{ github.run_id }}-${{ inputs.RELEASE_TAG }}
|
||||
path: plugin/deploy/
|
||||
@@ -136,7 +160,7 @@ jobs:
|
||||
done
|
||||
|
||||
- name: Workflow Dispatch and wait
|
||||
if: inputs.RELEASE_CREATED == 'true'
|
||||
if: inputs.RELEASE_CREATED == 'true' && inputs.TRIGGER_PRODUCTION_RELEASE == true
|
||||
uses: the-actions-org/workflow-dispatch@v4.0.0
|
||||
with:
|
||||
workflow: release-production.yml
|
||||
|
||||
103
.github/workflows/claude-code-review.yml
vendored
103
.github/workflows/claude-code-review.yml
vendored
@@ -1,103 +0,0 @@
|
||||
name: Claude Code Review
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize]
|
||||
# Skip reviews for non-code changes
|
||||
paths-ignore:
|
||||
- "**/*.md"
|
||||
- "**/package-lock.json"
|
||||
- "**/pnpm-lock.yaml"
|
||||
- "**/.gitignore"
|
||||
- "**/LICENSE"
|
||||
- "**/*.config.js"
|
||||
- "**/*.config.ts"
|
||||
- "**/tsconfig.json"
|
||||
- "**/.github/workflows/*.yml"
|
||||
- "**/docs/**"
|
||||
|
||||
jobs:
|
||||
claude-review:
|
||||
# Skip review for bot PRs and WIP/skip-review PRs
|
||||
# Only run if changes are significant (>10 lines)
|
||||
if: |
|
||||
(github.event.pull_request.additions > 10 || github.event.pull_request.deletions > 10) &&
|
||||
!contains(github.event.pull_request.title, '[skip-review]') &&
|
||||
!contains(github.event.pull_request.title, '[WIP]') &&
|
||||
!endsWith(github.event.pull_request.user.login, '[bot]') &&
|
||||
github.event.pull_request.user.login != 'dependabot' &&
|
||||
github.event.pull_request.user.login != 'renovate'
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: read
|
||||
issues: read
|
||||
id-token: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Run Claude Code Review
|
||||
id: claude-review
|
||||
uses: anthropics/claude-code-action@beta
|
||||
with:
|
||||
claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
|
||||
|
||||
# Optional: Specify model (defaults to Claude Sonnet 4, uncomment for Claude Opus 4)
|
||||
# model: "claude-opus-4-20250514"
|
||||
|
||||
# Direct prompt for automated review (no @claude mention needed)
|
||||
direct_prompt: |
|
||||
IMPORTANT: Review ONLY the DIFF/CHANGESET - the actual lines that were added or modified in this PR.
|
||||
DO NOT review the entire file context, only analyze the specific changes being made.
|
||||
|
||||
Look for HIGH-PRIORITY issues in the CHANGED LINES ONLY:
|
||||
|
||||
1. CRITICAL BUGS: Logic errors, null pointer issues, infinite loops, race conditions
|
||||
2. SECURITY: SQL injection, XSS, authentication bypass, exposed secrets, unsafe operations
|
||||
3. BREAKING CHANGES: API contract violations, removed exports, changed function signatures
|
||||
4. DATA LOSS RISKS: Destructive operations without safeguards, missing data validation
|
||||
|
||||
DO NOT comment on:
|
||||
- Code that wasn't changed in this PR
|
||||
- Style, formatting, or documentation
|
||||
- Test coverage (unless tests are broken by the changes)
|
||||
- Minor optimizations or best practices
|
||||
- Existing code issues that weren't introduced by this PR
|
||||
|
||||
If you find no critical issues in the DIFF, respond with: "✅ No critical issues found in changes"
|
||||
|
||||
Keep response under 10 lines. Reference specific line numbers from the diff when reporting issues.
|
||||
|
||||
# Optional: Use sticky comments to make Claude reuse the same comment on subsequent pushes to the same PR
|
||||
use_sticky_comment: true
|
||||
|
||||
# Context-aware review based on PR characteristics
|
||||
# Uncomment to enable different review strategies based on context
|
||||
# direct_prompt: |
|
||||
# ${{
|
||||
# (github.event.pull_request.additions > 500) &&
|
||||
# 'Large PR detected. Focus only on architectural issues and breaking changes. Skip minor issues.' ||
|
||||
# contains(github.event.pull_request.title, 'fix') &&
|
||||
# 'Bug fix PR: Verify the fix addresses the root cause and check for regression risks.' ||
|
||||
# contains(github.event.pull_request.title, 'deps') &&
|
||||
# 'Dependency update: Check for breaking changes and security advisories only.' ||
|
||||
# contains(github.event.pull_request.title, 'refactor') &&
|
||||
# 'Refactor PR: Verify no behavior changes and check for performance regressions.' ||
|
||||
# contains(github.event.pull_request.title, 'feat') &&
|
||||
# 'New feature: Check for security issues, edge cases, and integration problems only.' ||
|
||||
# 'Standard review: Check for critical bugs, security issues, and breaking changes only.'
|
||||
# }}
|
||||
|
||||
# Optional: Add specific tools for running tests or linting
|
||||
# allowed_tools: "Bash(npm run test),Bash(npm run lint),Bash(npm run typecheck)"
|
||||
|
||||
# Optional: Skip review for certain conditions
|
||||
# if: |
|
||||
# !contains(github.event.pull_request.title, '[skip-review]') &&
|
||||
# !contains(github.event.pull_request.title, '[WIP]')
|
||||
|
||||
8
.github/workflows/codeql-analysis.yml
vendored
8
.github/workflows/codeql-analysis.yml
vendored
@@ -24,17 +24,17 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
uses: github/codeql-action/init@v4
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/codeql-config.yml
|
||||
queries: +security-and-quality
|
||||
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v3
|
||||
uses: github/codeql-action/autobuild@v4
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3
|
||||
uses: github/codeql-action/analyze@v4
|
||||
4
.github/workflows/deploy-storybook.yml
vendored
4
.github/workflows/deploy-storybook.yml
vendored
@@ -20,7 +20,7 @@ jobs:
|
||||
name: Deploy Storybook
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
@@ -28,7 +28,7 @@ jobs:
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v5
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
210
.github/workflows/generate-release-notes.yml
vendored
Normal file
210
.github/workflows/generate-release-notes.yml
vendored
Normal file
@@ -0,0 +1,210 @@
|
||||
name: Generate Release Notes
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
version:
|
||||
description: 'Version number (e.g., 4.25.3)'
|
||||
required: true
|
||||
type: string
|
||||
target_commitish:
|
||||
description: 'Commit SHA or branch (leave empty for current HEAD)'
|
||||
required: false
|
||||
type: string
|
||||
release_notes:
|
||||
description: 'Custom release notes (leave empty to auto-generate)'
|
||||
required: false
|
||||
type: string
|
||||
outputs:
|
||||
release_notes:
|
||||
description: 'Generated or provided release notes'
|
||||
value: ${{ jobs.generate.outputs.release_notes }}
|
||||
secrets:
|
||||
UNRAID_BOT_GITHUB_ADMIN_TOKEN:
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
generate:
|
||||
name: Generate Release Notes
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
release_notes: ${{ steps.generate_notes.outputs.release_notes }}
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
ref: ${{ inputs.target_commitish || github.ref }}
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN }}
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
- name: Generate Release Notes
|
||||
id: generate_notes
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
TAG_NAME="v${{ inputs.version }}"
|
||||
VERSION="${{ inputs.version }}"
|
||||
|
||||
if [ -n "${{ inputs.release_notes }}" ]; then
|
||||
NOTES="${{ inputs.release_notes }}"
|
||||
else
|
||||
CHANGELOG_PATH="api/CHANGELOG.md"
|
||||
|
||||
if [ -f "$CHANGELOG_PATH" ]; then
|
||||
echo "Extracting release notes from CHANGELOG.md for version ${VERSION}"
|
||||
|
||||
NOTES=$(awk -v ver="$VERSION" '
|
||||
BEGIN {
|
||||
found=0; capture=0; output="";
|
||||
gsub(/\./, "\\.", ver);
|
||||
}
|
||||
/^## \[/ {
|
||||
if (capture) exit;
|
||||
if ($0 ~ "\\[" ver "\\]") {
|
||||
found=1;
|
||||
capture=1;
|
||||
}
|
||||
}
|
||||
capture {
|
||||
if (output != "") output = output "\n";
|
||||
output = output $0;
|
||||
}
|
||||
END {
|
||||
if (found) print output;
|
||||
else exit 1;
|
||||
}
|
||||
' "$CHANGELOG_PATH") || EXTRACTION_STATUS=$?
|
||||
|
||||
if [ ${EXTRACTION_STATUS:-0} -eq 0 ] && [ -n "$NOTES" ]; then
|
||||
echo "✓ Found release notes in CHANGELOG.md"
|
||||
else
|
||||
echo "⚠ Version ${VERSION} not found in CHANGELOG.md, generating with conventional-changelog"
|
||||
|
||||
PREV_TAG=$(git describe --tags --abbrev=0 HEAD^ 2>/dev/null || echo "")
|
||||
CHANGELOG_GENERATED=false
|
||||
|
||||
if [ -n "$PREV_TAG" ]; then
|
||||
echo "Generating changelog from ${PREV_TAG}..HEAD using conventional-changelog"
|
||||
|
||||
npm install -g conventional-changelog-cli
|
||||
|
||||
TEMP_NOTES=$(mktemp)
|
||||
conventional-changelog -p conventionalcommits \
|
||||
--release-count 1 \
|
||||
--output-unreleased \
|
||||
> "$TEMP_NOTES" 2>/dev/null || true
|
||||
|
||||
if [ -s "$TEMP_NOTES" ]; then
|
||||
NOTES=$(cat "$TEMP_NOTES")
|
||||
|
||||
if [ -n "$NOTES" ]; then
|
||||
echo "✓ Generated changelog with conventional-changelog"
|
||||
CHANGELOG_GENERATED=true
|
||||
|
||||
TEMP_CHANGELOG=$(mktemp)
|
||||
{
|
||||
if [ -f "$CHANGELOG_PATH" ]; then
|
||||
head -n 1 "$CHANGELOG_PATH"
|
||||
echo ""
|
||||
echo "$NOTES"
|
||||
echo ""
|
||||
tail -n +2 "$CHANGELOG_PATH"
|
||||
else
|
||||
echo "# Changelog"
|
||||
echo ""
|
||||
echo "$NOTES"
|
||||
fi
|
||||
} > "$TEMP_CHANGELOG"
|
||||
|
||||
mv "$TEMP_CHANGELOG" "$CHANGELOG_PATH"
|
||||
echo "✓ Updated CHANGELOG.md with generated notes"
|
||||
else
|
||||
echo "⚠ conventional-changelog produced empty output, using GitHub auto-generation"
|
||||
NOTES=$(gh api repos/${{ github.repository }}/releases/generate-notes \
|
||||
-f tag_name="${TAG_NAME}" \
|
||||
-f target_commitish="${{ inputs.target_commitish || github.sha }}" \
|
||||
-f previous_tag_name="${PREV_TAG}" \
|
||||
--jq '.body')
|
||||
fi
|
||||
else
|
||||
echo "⚠ conventional-changelog failed, using GitHub auto-generation"
|
||||
NOTES=$(gh api repos/${{ github.repository }}/releases/generate-notes \
|
||||
-f tag_name="${TAG_NAME}" \
|
||||
-f target_commitish="${{ inputs.target_commitish || github.sha }}" \
|
||||
-f previous_tag_name="${PREV_TAG}" \
|
||||
--jq '.body')
|
||||
fi
|
||||
|
||||
rm -f "$TEMP_NOTES"
|
||||
else
|
||||
echo "⚠ No previous tag found, using GitHub auto-generation"
|
||||
NOTES=$(gh api repos/${{ github.repository }}/releases/generate-notes \
|
||||
-f tag_name="${TAG_NAME}" \
|
||||
-f target_commitish="${{ inputs.target_commitish || github.sha }}" \
|
||||
--jq '.body' || echo "Release ${VERSION}")
|
||||
fi
|
||||
|
||||
if [ "$CHANGELOG_GENERATED" = true ]; then
|
||||
BRANCH_OR_SHA="${{ inputs.target_commitish || github.ref }}"
|
||||
|
||||
if git show-ref --verify --quiet "refs/heads/${BRANCH_OR_SHA}"; then
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "CHANGELOG GENERATED AND COMMITTED"
|
||||
echo "=========================================="
|
||||
echo ""
|
||||
|
||||
git config user.name "github-actions[bot]"
|
||||
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||
|
||||
BEFORE_SHA=$(git rev-parse HEAD)
|
||||
|
||||
git add "$CHANGELOG_PATH"
|
||||
git commit -m "chore: add changelog for version ${VERSION}"
|
||||
git push origin "HEAD:${BRANCH_OR_SHA}"
|
||||
|
||||
AFTER_SHA=$(git rev-parse HEAD)
|
||||
|
||||
echo "✓ Changelog committed and pushed successfully"
|
||||
echo ""
|
||||
echo "Previous SHA: ${BEFORE_SHA}"
|
||||
echo "New SHA: ${AFTER_SHA}"
|
||||
echo ""
|
||||
echo "⚠️ CRITICAL: A new commit was created, but github.sha is immutable."
|
||||
echo "⚠️ github.sha = ${BEFORE_SHA} (original workflow trigger)"
|
||||
echo "⚠️ The release tag must point to ${AFTER_SHA} (with changelog)"
|
||||
echo ""
|
||||
echo "Re-run this workflow to create the release with the correct commit."
|
||||
echo ""
|
||||
exit 1
|
||||
else
|
||||
echo "⚠ Target is a commit SHA, not a branch. Cannot push changelog updates."
|
||||
echo "Changelog was generated but not committed."
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
else
|
||||
echo "⚠ CHANGELOG.md not found, using GitHub auto-generation"
|
||||
PREV_TAG=$(git describe --tags --abbrev=0 HEAD^ 2>/dev/null || echo "")
|
||||
|
||||
if [ -n "$PREV_TAG" ]; then
|
||||
NOTES=$(gh api repos/${{ github.repository }}/releases/generate-notes \
|
||||
-f tag_name="${TAG_NAME}" \
|
||||
-f target_commitish="${{ inputs.target_commitish || github.sha }}" \
|
||||
-f previous_tag_name="${PREV_TAG}" \
|
||||
--jq '.body')
|
||||
else
|
||||
NOTES="Release ${VERSION}"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "release_notes<<EOF" >> $GITHUB_OUTPUT
|
||||
echo "$NOTES" >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
|
||||
200
.github/workflows/main.yml
vendored
200
.github/workflows/main.yml
vendored
@@ -23,7 +23,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
@@ -33,7 +33,7 @@ jobs:
|
||||
run_install: false
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
@@ -154,170 +154,15 @@ jobs:
|
||||
files: ./coverage/coverage-final.json,../web/coverage/coverage-final.json,../unraid-ui/coverage/coverage-final.json,../packages/unraid-api-plugin-connect/coverage/coverage-final.json,../packages/unraid-shared/coverage/coverage-final.json
|
||||
fail_ci_if_error: false
|
||||
|
||||
build-api:
|
||||
name: Build API
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
build_number: ${{ steps.buildnumber.outputs.build_number }}
|
||||
defaults:
|
||||
run:
|
||||
working-directory: api
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential
|
||||
version: 1.0
|
||||
|
||||
- name: PNPM Install
|
||||
run: |
|
||||
cd ${{ github.workspace }}
|
||||
pnpm install --frozen-lockfile
|
||||
|
||||
- name: Build
|
||||
run: pnpm run build
|
||||
|
||||
- name: Get Git Short Sha and API version
|
||||
id: vars
|
||||
run: |
|
||||
GIT_SHA=$(git rev-parse --short HEAD)
|
||||
IS_TAGGED=$(git describe --tags --abbrev=0 --exact-match || echo '')
|
||||
PACKAGE_LOCK_VERSION=$(jq -r '.version' package.json)
|
||||
API_VERSION=$([[ -n "$IS_TAGGED" ]] && echo "$PACKAGE_LOCK_VERSION" || echo "${PACKAGE_LOCK_VERSION}+${GIT_SHA}")
|
||||
export API_VERSION
|
||||
echo "API_VERSION=${API_VERSION}" >> $GITHUB_ENV
|
||||
echo "PACKAGE_LOCK_VERSION=${PACKAGE_LOCK_VERSION}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Generate build number
|
||||
id: buildnumber
|
||||
uses: onyxmueller/build-tag-number@v1
|
||||
with:
|
||||
token: ${{secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN}}
|
||||
prefix: ${{steps.vars.outputs.PACKAGE_LOCK_VERSION}}
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
pnpm run build:release
|
||||
tar -czf deploy/unraid-api.tgz -C deploy/pack/ .
|
||||
|
||||
- name: Upload tgz to Github artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: unraid-api
|
||||
path: ${{ github.workspace }}/api/deploy/unraid-api.tgz
|
||||
|
||||
build-unraid-ui-webcomponents:
|
||||
name: Build Unraid UI Library (Webcomponent Version)
|
||||
defaults:
|
||||
run:
|
||||
working-directory: unraid-ui
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential
|
||||
version: 1.0
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
cd ${{ github.workspace }}
|
||||
pnpm install --frozen-lockfile --filter @unraid/ui
|
||||
|
||||
- name: Lint
|
||||
run: pnpm run lint
|
||||
|
||||
- name: Build
|
||||
run: pnpm run build:wc
|
||||
|
||||
- name: Upload Artifact to Github
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: unraid-wc-ui
|
||||
path: unraid-ui/dist-wc/
|
||||
|
||||
build-web:
|
||||
# needs: [build-unraid-ui]
|
||||
name: Build Web App
|
||||
defaults:
|
||||
run:
|
||||
working-directory: web
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Create env file
|
||||
run: |
|
||||
touch .env
|
||||
echo VITE_ACCOUNT=${{ secrets.VITE_ACCOUNT }} >> .env
|
||||
echo VITE_CONNECT=${{ secrets.VITE_CONNECT }} >> .env
|
||||
echo VITE_UNRAID_NET=${{ secrets.VITE_UNRAID_NET }} >> .env
|
||||
echo VITE_CALLBACK_KEY=${{ secrets.VITE_CALLBACK_KEY }} >> .env
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: PNPM Install
|
||||
run: |
|
||||
cd ${{ github.workspace }}
|
||||
pnpm install --frozen-lockfile --filter @unraid/web --filter @unraid/ui
|
||||
|
||||
- name: Build Unraid UI
|
||||
run: |
|
||||
cd ${{ github.workspace }}/unraid-ui
|
||||
pnpm run build
|
||||
|
||||
- name: Lint files
|
||||
run: pnpm run lint
|
||||
|
||||
- name: Type Check
|
||||
run: pnpm run type-check
|
||||
|
||||
- name: Build
|
||||
run: pnpm run build
|
||||
|
||||
- name: Upload build to Github artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: unraid-wc-rich
|
||||
path: web/dist
|
||||
build-artifacts:
|
||||
name: Build All Artifacts
|
||||
uses: ./.github/workflows/build-artifacts.yml
|
||||
secrets:
|
||||
VITE_ACCOUNT: ${{ secrets.VITE_ACCOUNT }}
|
||||
VITE_CONNECT: ${{ secrets.VITE_CONNECT }}
|
||||
VITE_UNRAID_NET: ${{ secrets.VITE_UNRAID_NET }}
|
||||
VITE_CALLBACK_KEY: ${{ secrets.VITE_CALLBACK_KEY }}
|
||||
UNRAID_BOT_GITHUB_ADMIN_TOKEN: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN }}
|
||||
|
||||
release-please:
|
||||
name: Release Please
|
||||
@@ -326,15 +171,15 @@ jobs:
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
||||
needs:
|
||||
- test-api
|
||||
- build-api
|
||||
- build-web
|
||||
- build-unraid-ui-webcomponents
|
||||
- build-artifacts
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- id: release
|
||||
uses: googleapis/release-please-action@v4
|
||||
@@ -345,17 +190,15 @@ jobs:
|
||||
build-plugin-staging-pr:
|
||||
name: Build and Deploy Plugin
|
||||
needs:
|
||||
- build-api
|
||||
- build-web
|
||||
- build-unraid-ui-webcomponents
|
||||
- build-artifacts
|
||||
- test-api
|
||||
uses: ./.github/workflows/build-plugin.yml
|
||||
with:
|
||||
RELEASE_CREATED: false
|
||||
RELEASE_CREATED: 'false'
|
||||
TAG: ${{ github.event.pull_request.number && format('PR{0}', github.event.pull_request.number) || '' }}
|
||||
BUCKET_PATH: ${{ github.event.pull_request.number && format('unraid-api/tag/PR{0}', github.event.pull_request.number) || 'unraid-api' }}
|
||||
BASE_URL: "https://preview.dl.unraid.net/unraid-api"
|
||||
BUILD_NUMBER: ${{ needs.build-api.outputs.build_number }}
|
||||
BUILD_NUMBER: ${{ needs.build-artifacts.outputs.build_number }}
|
||||
secrets:
|
||||
CF_ACCESS_KEY_ID: ${{ secrets.CF_ACCESS_KEY_ID }}
|
||||
CF_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
@@ -367,15 +210,16 @@ jobs:
|
||||
name: Build and Deploy Production Plugin
|
||||
needs:
|
||||
- release-please
|
||||
- build-api
|
||||
- build-artifacts
|
||||
uses: ./.github/workflows/build-plugin.yml
|
||||
with:
|
||||
RELEASE_CREATED: true
|
||||
RELEASE_CREATED: 'true'
|
||||
RELEASE_TAG: ${{ needs.release-please.outputs.tag_name }}
|
||||
TAG: ""
|
||||
BUCKET_PATH: unraid-api
|
||||
BASE_URL: "https://stable.dl.unraid.net/unraid-api"
|
||||
BUILD_NUMBER: ${{ needs.build-api.outputs.build_number }}
|
||||
BUILD_NUMBER: ${{ needs.build-artifacts.outputs.build_number }}
|
||||
TRIGGER_PRODUCTION_RELEASE: true
|
||||
secrets:
|
||||
CF_ACCESS_KEY_ID: ${{ secrets.CF_ACCESS_KEY_ID }}
|
||||
CF_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
|
||||
239
.github/workflows/manual-release.yml
vendored
Normal file
239
.github/workflows/manual-release.yml
vendored
Normal file
@@ -0,0 +1,239 @@
|
||||
name: Manual Release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: 'Version to release (e.g., 4.25.3)'
|
||||
required: true
|
||||
type: string
|
||||
target_commitish:
|
||||
description: 'Commit SHA or branch (leave empty for current HEAD)'
|
||||
required: false
|
||||
type: string
|
||||
release_notes:
|
||||
description: 'Release notes/changelog (leave empty to auto-generate from commits)'
|
||||
required: false
|
||||
type: string
|
||||
prerelease:
|
||||
description: 'Mark as prerelease'
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
validate-version:
|
||||
name: Validate and Update Package Versions
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
ref: ${{ inputs.target_commitish || github.ref }}
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN }}
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
- name: Check and Update Package Versions
|
||||
run: |
|
||||
EXPECTED_VERSION="${{ inputs.version }}"
|
||||
MISMATCHES_FOUND=false
|
||||
|
||||
PACKAGE_JSONS=(
|
||||
"package.json"
|
||||
"api/package.json"
|
||||
"web/package.json"
|
||||
"unraid-ui/package.json"
|
||||
"plugin/package.json"
|
||||
"packages/unraid-shared/package.json"
|
||||
"packages/unraid-api-plugin-health/package.json"
|
||||
"packages/unraid-api-plugin-generator/package.json"
|
||||
"packages/unraid-api-plugin-connect/package.json"
|
||||
)
|
||||
|
||||
echo "Checking package.json versions against expected version: ${EXPECTED_VERSION}"
|
||||
|
||||
for pkg in "${PACKAGE_JSONS[@]}"; do
|
||||
if [ -f "$pkg" ]; then
|
||||
CURRENT_VERSION=$(node -p "require('./$pkg').version")
|
||||
if [ "$CURRENT_VERSION" != "$EXPECTED_VERSION" ]; then
|
||||
echo "❌ Version mismatch in $pkg: $CURRENT_VERSION != $EXPECTED_VERSION"
|
||||
MISMATCHES_FOUND=true
|
||||
|
||||
# Detect indentation by checking the first property line
|
||||
INDENT_SPACES=$(head -10 "$pkg" | grep '^ *"' | head -1 | sed 's/".*//g' | wc -c)
|
||||
INDENT_SPACES=$((INDENT_SPACES - 1))
|
||||
|
||||
jq --indent "$INDENT_SPACES" --arg version "$EXPECTED_VERSION" '.version = $version' "$pkg" > "$pkg.tmp" && mv "$pkg.tmp" "$pkg"
|
||||
echo "✓ Updated $pkg to version $EXPECTED_VERSION"
|
||||
else
|
||||
echo "✓ $pkg version matches: $CURRENT_VERSION"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
if [ "$MISMATCHES_FOUND" = true ]; then
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "Version mismatches found!"
|
||||
echo "=========================================="
|
||||
echo ""
|
||||
|
||||
BRANCH_OR_SHA="${{ inputs.target_commitish || github.ref }}"
|
||||
|
||||
if git show-ref --verify --quiet "refs/heads/${BRANCH_OR_SHA}"; then
|
||||
echo "Creating commit with version updates and pushing to branch: ${BRANCH_OR_SHA}"
|
||||
|
||||
git config user.name "github-actions[bot]"
|
||||
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||
|
||||
BEFORE_SHA=$(git rev-parse HEAD)
|
||||
|
||||
git add ${PACKAGE_JSONS[@]}
|
||||
git commit -m "chore: update package versions to ${{ inputs.version }}"
|
||||
git push origin "HEAD:${BRANCH_OR_SHA}"
|
||||
|
||||
AFTER_SHA=$(git rev-parse HEAD)
|
||||
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "WORKFLOW MUST BE RE-RUN"
|
||||
echo "=========================================="
|
||||
echo ""
|
||||
echo "✓ Version updates committed and pushed successfully"
|
||||
echo ""
|
||||
echo "Previous SHA: ${BEFORE_SHA}"
|
||||
echo "New SHA: ${AFTER_SHA}"
|
||||
echo ""
|
||||
echo "⚠️ CRITICAL: A new commit was created, but github.sha is immutable."
|
||||
echo "⚠️ github.sha = ${BEFORE_SHA} (original workflow trigger)"
|
||||
echo "⚠️ The release tag must point to ${AFTER_SHA} (with version updates)"
|
||||
echo ""
|
||||
echo "Re-run this workflow to create the release with the correct commit."
|
||||
echo ""
|
||||
exit 1
|
||||
else
|
||||
echo "Target is a commit SHA, not a branch. Cannot push version updates."
|
||||
echo "Please update the package.json versions manually and re-run the workflow."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "✓ All package.json versions match the expected version: ${EXPECTED_VERSION}"
|
||||
|
||||
build-artifacts:
|
||||
name: Build All Artifacts
|
||||
needs:
|
||||
- validate-version
|
||||
uses: ./.github/workflows/build-artifacts.yml
|
||||
with:
|
||||
ref: ${{ inputs.target_commitish || github.ref }}
|
||||
version_override: ${{ inputs.version }}
|
||||
secrets:
|
||||
VITE_ACCOUNT: ${{ secrets.VITE_ACCOUNT }}
|
||||
VITE_CONNECT: ${{ secrets.VITE_CONNECT }}
|
||||
VITE_UNRAID_NET: ${{ secrets.VITE_UNRAID_NET }}
|
||||
VITE_CALLBACK_KEY: ${{ secrets.VITE_CALLBACK_KEY }}
|
||||
UNRAID_BOT_GITHUB_ADMIN_TOKEN: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN }}
|
||||
|
||||
generate-release-notes:
|
||||
name: Generate Release Notes
|
||||
needs:
|
||||
- build-artifacts
|
||||
uses: ./.github/workflows/generate-release-notes.yml
|
||||
with:
|
||||
version: ${{ inputs.version }}
|
||||
target_commitish: ${{ inputs.target_commitish || github.ref }}
|
||||
release_notes: ${{ inputs.release_notes }}
|
||||
secrets:
|
||||
UNRAID_BOT_GITHUB_ADMIN_TOKEN: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN }}
|
||||
|
||||
create-release:
|
||||
name: Create GitHub Release (Draft)
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- generate-release-notes
|
||||
outputs:
|
||||
tag_name: ${{ steps.create_release.outputs.tag_name }}
|
||||
release_notes: ${{ needs.generate-release-notes.outputs.release_notes }}
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
ref: ${{ inputs.target_commitish || github.ref }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Create or Update Release as Draft
|
||||
id: create_release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
TAG_NAME="v${{ inputs.version }}"
|
||||
TARGET="${{ inputs.target_commitish || github.sha }}"
|
||||
|
||||
echo "tag_name=${TAG_NAME}" >> $GITHUB_OUTPUT
|
||||
|
||||
if gh release view "${TAG_NAME}" > /dev/null 2>&1; then
|
||||
echo "Release ${TAG_NAME} already exists, updating as draft..."
|
||||
gh release edit "${TAG_NAME}" \
|
||||
--draft \
|
||||
--notes "${{ needs.generate-release-notes.outputs.release_notes }}" \
|
||||
${{ inputs.prerelease && '--prerelease' || '' }}
|
||||
else
|
||||
echo "Creating new draft release ${TAG_NAME}..."
|
||||
git tag "${TAG_NAME}" "${TARGET}" || true
|
||||
git push origin "${TAG_NAME}" || true
|
||||
|
||||
gh release create "${TAG_NAME}" \
|
||||
--draft \
|
||||
--title "${{ inputs.version }}" \
|
||||
--notes "${{ needs.generate-release-notes.outputs.release_notes }}" \
|
||||
--target "${TARGET}" \
|
||||
${{ inputs.prerelease && '--prerelease' || '' }}
|
||||
fi
|
||||
|
||||
build-plugin-production:
|
||||
name: Build and Deploy Production Plugin
|
||||
needs:
|
||||
- create-release
|
||||
- build-artifacts
|
||||
uses: ./.github/workflows/build-plugin.yml
|
||||
with:
|
||||
RELEASE_CREATED: 'true'
|
||||
RELEASE_TAG: ${{ needs.create-release.outputs.tag_name }}
|
||||
TAG: ""
|
||||
BUCKET_PATH: unraid-api
|
||||
BASE_URL: "https://stable.dl.unraid.net/unraid-api"
|
||||
BUILD_NUMBER: ${{ needs.build-artifacts.outputs.build_number }}
|
||||
ref: ${{ inputs.target_commitish || github.ref }}
|
||||
secrets:
|
||||
CF_ACCESS_KEY_ID: ${{ secrets.CF_ACCESS_KEY_ID }}
|
||||
CF_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
CF_BUCKET_PREVIEW: ${{ secrets.CF_BUCKET_PREVIEW }}
|
||||
CF_ENDPOINT: ${{ secrets.CF_ENDPOINT }}
|
||||
UNRAID_BOT_GITHUB_ADMIN_TOKEN: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN }}
|
||||
|
||||
publish-release:
|
||||
name: Publish Release
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- create-release
|
||||
- build-plugin-production
|
||||
steps:
|
||||
- name: Publish Release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
TAG_NAME="${{ needs.create-release.outputs.tag_name }}"
|
||||
echo "Publishing release ${TAG_NAME}..."
|
||||
gh release edit "${TAG_NAME}" --draft=false --repo ${{ github.repository }}
|
||||
|
||||
30
.github/workflows/publish-schema.yml
vendored
Normal file
30
.github/workflows/publish-schema.yml
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
name: Publish GraphQL Schema
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- 'api/generated-schema.graphql'
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
publish-schema:
|
||||
name: Publish Schema to Apollo Studio
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Install Apollo Rover CLI
|
||||
run: |
|
||||
curl -sSL https://rover.apollo.dev/nix/latest | sh
|
||||
echo "$HOME/.rover/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Publish schema to Apollo Studio
|
||||
env:
|
||||
APOLLO_KEY: ${{ secrets.APOLLO_KEY }}
|
||||
run: |
|
||||
rover graph publish Unraid-API@current \
|
||||
--schema api/generated-schema.graphql
|
||||
|
||||
2
.github/workflows/release-production.yml
vendored
2
.github/workflows/release-production.yml
vendored
@@ -28,7 +28,7 @@ jobs:
|
||||
with:
|
||||
latest: true
|
||||
prerelease: false
|
||||
- uses: actions/setup-node@v5
|
||||
- uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 22.19.0
|
||||
- run: |
|
||||
|
||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -122,4 +122,10 @@ api/dev/Unraid.net/myservers.cfg
|
||||
|
||||
# local Mise settings
|
||||
.mise.toml
|
||||
mise.toml
|
||||
|
||||
# Compiled test pages (generated from Nunjucks templates)
|
||||
web/public/test-pages/*.html
|
||||
|
||||
# local scripts for testing and development
|
||||
.dev-scripts/
|
||||
|
||||
@@ -1 +1 @@
|
||||
{".":"4.25.0"}
|
||||
{".":"4.29.2"}
|
||||
|
||||
@@ -63,15 +63,6 @@
|
||||
*/
|
||||
|
||||
.unapi {
|
||||
--color-alpha: #1c1b1b;
|
||||
--color-beta: #f2f2f2;
|
||||
--color-gamma: #999999;
|
||||
--color-gamma-opaque: rgba(153, 153, 153, 0.5);
|
||||
--color-customgradient-start: rgba(242, 242, 242, 0);
|
||||
--color-customgradient-end: rgba(242, 242, 242, 0.85);
|
||||
--shadow-beta: 0 25px 50px -12px rgba(242, 242, 242, 0.15);
|
||||
--ring-offset-shadow: 0 0 var(--color-beta);
|
||||
--ring-shadow: 0 0 var(--color-beta);
|
||||
}
|
||||
|
||||
.unapi button:not(:disabled),
|
||||
@@ -95,4 +86,4 @@ unraid-sso-button.unapi {
|
||||
--text-7xl: 4.5rem;
|
||||
--text-8xl: 6rem;
|
||||
--text-9xl: 8rem;
|
||||
}
|
||||
}
|
||||
@@ -2,4 +2,4 @@
|
||||
@import './css-variables.css';
|
||||
@import './unraid-theme.css';
|
||||
@import './theme-variants.css';
|
||||
@import './base-utilities.css';
|
||||
@import './base-utilities.css';
|
||||
@@ -5,88 +5,64 @@
|
||||
*/
|
||||
|
||||
/* Default/White Theme */
|
||||
:root,
|
||||
.theme-white {
|
||||
--header-text-primary: #ffffff;
|
||||
--header-text-secondary: #999999;
|
||||
--header-background-color: #1c1b1b;
|
||||
--header-gradient-start: rgba(28, 27, 27, 0);
|
||||
--header-gradient-end: rgba(28, 27, 27, 0.7);
|
||||
.Theme--white {
|
||||
--color-border: #383735;
|
||||
--color-alpha: #ff8c2f;
|
||||
--color-beta: #1c1b1b;
|
||||
--color-gamma: #ffffff;
|
||||
--color-gamma-opaque: rgba(255, 255, 255, 0.3);
|
||||
--color-header-gradient-start: color-mix(in srgb, var(--header-background-color) 0%, transparent);
|
||||
--color-header-gradient-end: color-mix(in srgb, var(--header-background-color) 100%, transparent);
|
||||
--shadow-beta: 0 25px 50px -12px color-mix(in srgb, var(--color-beta) 15%, transparent);
|
||||
--ring-offset-shadow: 0 0 var(--color-beta);
|
||||
--ring-shadow: 0 0 var(--color-beta);
|
||||
}
|
||||
|
||||
/* Black Theme */
|
||||
.theme-black,
|
||||
.theme-black.dark {
|
||||
--header-text-primary: #1c1b1b;
|
||||
--header-text-secondary: #999999;
|
||||
--header-background-color: #f2f2f2;
|
||||
--header-gradient-start: rgba(242, 242, 242, 0);
|
||||
--header-gradient-end: rgba(242, 242, 242, 0.7);
|
||||
.Theme--black,
|
||||
.Theme--black.dark {
|
||||
--color-border: #e0e0e0;
|
||||
--color-alpha: #ff8c2f;
|
||||
--color-beta: #f2f2f2;
|
||||
--color-gamma: #1c1b1b;
|
||||
--color-gamma-opaque: rgba(28, 27, 27, 0.3);
|
||||
--color-header-gradient-start: color-mix(in srgb, var(--header-background-color) 0%, transparent);
|
||||
--color-header-gradient-end: color-mix(in srgb, var(--header-background-color) 100%, transparent);
|
||||
--shadow-beta: 0 25px 50px -12px color-mix(in srgb, var(--color-beta) 15%, transparent);
|
||||
--ring-offset-shadow: 0 0 var(--color-beta);
|
||||
--ring-shadow: 0 0 var(--color-beta);
|
||||
}
|
||||
|
||||
/* Gray Theme */
|
||||
.theme-gray {
|
||||
--header-text-primary: #ffffff;
|
||||
--header-text-secondary: #999999;
|
||||
--header-background-color: #1c1b1b;
|
||||
--header-gradient-start: rgba(28, 27, 27, 0);
|
||||
--header-gradient-end: rgba(28, 27, 27, 0.7);
|
||||
.Theme--gray,
|
||||
.Theme--gray.dark {
|
||||
--color-border: #383735;
|
||||
--color-alpha: #ff8c2f;
|
||||
--color-beta: #383735;
|
||||
--color-gamma: #ffffff;
|
||||
--color-gamma-opaque: rgba(255, 255, 255, 0.3);
|
||||
--color-header-gradient-start: color-mix(in srgb, var(--header-background-color) 0%, transparent);
|
||||
--color-header-gradient-end: color-mix(in srgb, var(--header-background-color) 100%, transparent);
|
||||
--shadow-beta: 0 25px 50px -12px color-mix(in srgb, var(--color-beta) 15%, transparent);
|
||||
--ring-offset-shadow: 0 0 var(--color-beta);
|
||||
--ring-shadow: 0 0 var(--color-beta);
|
||||
}
|
||||
|
||||
/* Azure Theme */
|
||||
.theme-azure {
|
||||
--header-text-primary: #1c1b1b;
|
||||
--header-text-secondary: #999999;
|
||||
--header-background-color: #f2f2f2;
|
||||
--header-gradient-start: rgba(242, 242, 242, 0);
|
||||
--header-gradient-end: rgba(242, 242, 242, 0.7);
|
||||
.Theme--azure {
|
||||
--color-border: #5a8bb8;
|
||||
--color-alpha: #ff8c2f;
|
||||
--color-beta: #e7f2f8;
|
||||
--color-gamma: #336699;
|
||||
--color-gamma-opaque: rgba(51, 102, 153, 0.3);
|
||||
--color-header-gradient-start: color-mix(in srgb, var(--header-background-color) 0%, transparent);
|
||||
--color-header-gradient-end: color-mix(in srgb, var(--header-background-color) 100%, transparent);
|
||||
--shadow-beta: 0 25px 50px -12px color-mix(in srgb, var(--color-beta) 15%, transparent);
|
||||
--ring-offset-shadow: 0 0 var(--color-beta);
|
||||
--ring-shadow: 0 0 var(--color-beta);
|
||||
}
|
||||
|
||||
/* Dark Mode Overrides */
|
||||
.dark {
|
||||
--color-border: #383735;
|
||||
}
|
||||
|
||||
/*
|
||||
* Dynamic color variables for user overrides from GraphQL
|
||||
* These are set via JavaScript and override the theme defaults
|
||||
* Using :root with class for higher specificity to override theme classes
|
||||
*/
|
||||
:root.has-custom-header-text {
|
||||
--header-text-primary: var(--custom-header-text-primary);
|
||||
--color-header-text-primary: var(--custom-header-text-primary);
|
||||
}
|
||||
|
||||
:root.has-custom-header-meta {
|
||||
--header-text-secondary: var(--custom-header-text-secondary);
|
||||
--color-header-text-secondary: var(--custom-header-text-secondary);
|
||||
}
|
||||
|
||||
:root.has-custom-header-bg {
|
||||
--header-background-color: var(--custom-header-background-color);
|
||||
--color-header-background: var(--custom-header-background-color);
|
||||
--header-gradient-start: var(--custom-header-gradient-start);
|
||||
--header-gradient-end: var(--custom-header-gradient-end);
|
||||
--color-header-gradient-start: var(--custom-header-gradient-start);
|
||||
--color-header-gradient-end: var(--custom-header-gradient-end);
|
||||
}
|
||||
@@ -19,6 +19,7 @@ PATHS_LOGS_FILE=./dev/log/graphql-api.log
|
||||
PATHS_CONNECT_STATUS_FILE_PATH=./dev/connectStatus.json # Connect plugin status file
|
||||
PATHS_OIDC_JSON=./dev/configs/oidc.local.json
|
||||
PATHS_LOCAL_SESSION_FILE=./dev/local-session
|
||||
PATHS_DOCKER_TEMPLATES=./dev/docker-templates
|
||||
ENVIRONMENT="development"
|
||||
NODE_ENV="development"
|
||||
PORT="3001"
|
||||
@@ -32,3 +33,4 @@ CHOKIDAR_USEPOLLING=true
|
||||
LOG_TRANSPORT=console
|
||||
LOG_LEVEL=trace
|
||||
ENABLE_NEXT_DOCKER_RELEASE=true
|
||||
SKIP_CONNECT_PLUGIN_CHECK=true
|
||||
|
||||
@@ -3,3 +3,4 @@ NODE_ENV="production"
|
||||
PORT="/var/run/unraid-api.sock"
|
||||
MOTHERSHIP_GRAPHQL_LINK="https://mothership.unraid.net/ws"
|
||||
PATHS_CONFIG_MODULES="/boot/config/plugins/dynamix.my.servers/configs"
|
||||
ENABLE_NEXT_DOCKER_RELEASE=true
|
||||
|
||||
@@ -3,3 +3,4 @@ NODE_ENV="production"
|
||||
PORT="/var/run/unraid-api.sock"
|
||||
MOTHERSHIP_GRAPHQL_LINK="https://staging.mothership.unraid.net/ws"
|
||||
PATHS_CONFIG_MODULES="/boot/config/plugins/dynamix.my.servers/configs"
|
||||
ENABLE_NEXT_DOCKER_RELEASE=true
|
||||
|
||||
@@ -8,7 +8,7 @@ export default tseslint.config(
|
||||
eslint.configs.recommended,
|
||||
...tseslint.configs.recommended,
|
||||
{
|
||||
ignores: ['src/graphql/generated/client/**/*', 'src/**/**/dummy-process.js'],
|
||||
ignores: ['src/graphql/generated/client/**/*', 'src/**/**/dummy-process.js', 'dist/**/*'],
|
||||
},
|
||||
{
|
||||
plugins: {
|
||||
@@ -42,7 +42,10 @@ export default tseslint.config(
|
||||
'ignorePackages',
|
||||
{
|
||||
js: 'always',
|
||||
ts: 'always',
|
||||
mjs: 'always',
|
||||
cjs: 'always',
|
||||
ts: 'never',
|
||||
tsx: 'never',
|
||||
},
|
||||
],
|
||||
'no-restricted-globals': [
|
||||
|
||||
6
api/.gitignore
vendored
6
api/.gitignore
vendored
@@ -83,6 +83,8 @@ deploy/*
|
||||
|
||||
!**/*.login.*
|
||||
|
||||
# Local Development Artifacts
|
||||
|
||||
# local api configs - don't need project-wide tracking
|
||||
dev/connectStatus.json
|
||||
dev/configs/*
|
||||
@@ -96,3 +98,7 @@ dev/configs/oidc.local.json
|
||||
|
||||
# local api keys
|
||||
dev/keys/*
|
||||
# mock docker templates
|
||||
dev/docker-templates
|
||||
# ie unraid notifications
|
||||
dev/notifications
|
||||
@@ -5,3 +5,4 @@ src/unraid-api/unraid-file-modifier/modifications/__fixtures__/downloaded/*
|
||||
|
||||
# Generated Types
|
||||
src/graphql/generated/client/*.ts
|
||||
dist/
|
||||
|
||||
144
api/CHANGELOG.md
144
api/CHANGELOG.md
@@ -1,5 +1,149 @@
|
||||
# Changelog
|
||||
|
||||
## [4.29.2](https://github.com/unraid/api/compare/v4.29.1...v4.29.2) (2025-12-19)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* unraid-connect plugin not loaded when connect is installed ([#1856](https://github.com/unraid/api/issues/1856)) ([73135b8](https://github.com/unraid/api/commit/73135b832801f5c76d60020161492e4770958c3d))
|
||||
|
||||
## [4.29.1](https://github.com/unraid/api/compare/v4.29.0...v4.29.1) (2025-12-19)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* revert replace docker overview table with web component (7.3+) ([#1853](https://github.com/unraid/api/issues/1853)) ([560db88](https://github.com/unraid/api/commit/560db880cc138324f9ff8753f7209b683a84c045))
|
||||
|
||||
## [4.29.0](https://github.com/unraid/api/compare/v4.28.2...v4.29.0) (2025-12-19)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* replace docker overview table with web component (7.3+) ([#1764](https://github.com/unraid/api/issues/1764)) ([277ac42](https://github.com/unraid/api/commit/277ac420464379e7ee6739c4530271caf7717503))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* handle race condition between guid loading and license check ([#1847](https://github.com/unraid/api/issues/1847)) ([8b155d1](https://github.com/unraid/api/commit/8b155d1f1c99bb19efbc9614e000d852e9f0c12d))
|
||||
* resolve issue with "Continue" button when updating ([#1852](https://github.com/unraid/api/issues/1852)) ([d099e75](https://github.com/unraid/api/commit/d099e7521d2062bb9cf84f340e46b169dd2492c5))
|
||||
* update myservers config references to connect config references ([#1810](https://github.com/unraid/api/issues/1810)) ([e1e3ea7](https://github.com/unraid/api/commit/e1e3ea7eb68cc6840f67a8aec937fd3740e75b28))
|
||||
|
||||
## [4.28.2](https://github.com/unraid/api/compare/v4.28.1...v4.28.2) (2025-12-16)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **api:** timeout on startup on 7.0 and 6.12 ([#1844](https://github.com/unraid/api/issues/1844)) ([e243ae8](https://github.com/unraid/api/commit/e243ae836ec1a7fde37dceeb106cc693b20ec82b))
|
||||
|
||||
## [4.28.1](https://github.com/unraid/api/compare/v4.28.0...v4.28.1) (2025-12-16)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* empty commit to release as 4.28.1 ([df78608](https://github.com/unraid/api/commit/df786084572eefb82e086c15939b50cc08b9db10))
|
||||
|
||||
## [4.28.0](https://github.com/unraid/api/compare/v4.27.2...v4.28.0) (2025-12-15)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* when cancelling OS upgrade, delete any plugin files that were d… ([#1823](https://github.com/unraid/api/issues/1823)) ([74df938](https://github.com/unraid/api/commit/74df938e450def2ee3e2864d4b928f53a68e9eb8))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* change keyfile watcher to poll instead of inotify on FAT32 ([#1820](https://github.com/unraid/api/issues/1820)) ([23a7120](https://github.com/unraid/api/commit/23a71207ddde221867562b722f4e65a5fc4dd744))
|
||||
* enhance dark mode support in theme handling ([#1808](https://github.com/unraid/api/issues/1808)) ([d6e2939](https://github.com/unraid/api/commit/d6e29395c8a8b0215d4f5945775de7fa358d06ec))
|
||||
* improve API startup reliability with timeout budget tracking ([#1824](https://github.com/unraid/api/issues/1824)) ([51f025b](https://github.com/unraid/api/commit/51f025b105487b178048afaabf46b260c4a7f9c1))
|
||||
* PHP Warnings in Management Settings ([#1805](https://github.com/unraid/api/issues/1805)) ([832e9d0](https://github.com/unraid/api/commit/832e9d04f207d3ec612c98500a2ffc86659264e5))
|
||||
* **plg:** explicitly stop an existing api before installation ([#1841](https://github.com/unraid/api/issues/1841)) ([99ce88b](https://github.com/unraid/api/commit/99ce88bfdc0a7f020c42f2fe0c6a0f4e32ac8f5a))
|
||||
* update @unraid/shared-callbacks to version 3.0.0 ([#1831](https://github.com/unraid/api/issues/1831)) ([73b2ce3](https://github.com/unraid/api/commit/73b2ce360c66cd9bedc138a5f8306af04b6bde77))
|
||||
* **ups:** convert estimatedRuntime from minutes to seconds ([#1822](https://github.com/unraid/api/issues/1822)) ([024ae69](https://github.com/unraid/api/commit/024ae69343bad5a3cbc19f80e357082e9b2efc1e))
|
||||
|
||||
## [4.27.2](https://github.com/unraid/api/compare/v4.27.1...v4.27.2) (2025-11-21)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* issue with header flashing + issue with trial date ([64875ed](https://github.com/unraid/api/commit/64875edbba786a0d1ba0113c9e9a3d38594eafcc))
|
||||
|
||||
## [4.27.1](https://github.com/unraid/api/compare/v4.27.0...v4.27.1) (2025-11-21)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* missing translations for expiring trials ([#1800](https://github.com/unraid/api/issues/1800)) ([36c1049](https://github.com/unraid/api/commit/36c104915ece203a3cac9e1a13e0c325e536a839))
|
||||
* resolve header flash when background color is set ([#1796](https://github.com/unraid/api/issues/1796)) ([dc9a036](https://github.com/unraid/api/commit/dc9a036c73d8ba110029364e0d044dc24c7d0dfa))
|
||||
|
||||
## [4.27.0](https://github.com/unraid/api/compare/v4.26.2...v4.27.0) (2025-11-19)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* remove Unraid API log download functionality ([#1793](https://github.com/unraid/api/issues/1793)) ([e4a9b82](https://github.com/unraid/api/commit/e4a9b8291b049752a9ff59b17ff50cf464fe0535))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* auto-uninstallation of connect api plugin ([#1791](https://github.com/unraid/api/issues/1791)) ([e734043](https://github.com/unraid/api/commit/e7340431a58821ec1b4f5d1b452fba6613b01fa5))
|
||||
|
||||
## [4.26.2](https://github.com/unraid/api/compare/v4.26.1...v4.26.2) (2025-11-19)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **theme:** Missing header background color ([e2fdf6c](https://github.com/unraid/api/commit/e2fdf6cadbd816559b8c82546c2bc771a81ffa9e))
|
||||
|
||||
## [4.26.1](https://github.com/unraid/api/compare/v4.26.0...v4.26.1) (2025-11-18)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **theme:** update theme class naming and scoping logic ([b28ef1e](https://github.com/unraid/api/commit/b28ef1ea334cb4842f01fa992effa7024185c6c9))
|
||||
|
||||
## [4.26.0](https://github.com/unraid/api/compare/v4.25.3...v4.26.0) (2025-11-17)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add cpu power query & subscription ([#1745](https://github.com/unraid/api/issues/1745)) ([d7aca81](https://github.com/unraid/api/commit/d7aca81c60281bfa47fb9113929c1ead6ed3361b))
|
||||
* add schema publishing to apollo studio ([#1772](https://github.com/unraid/api/issues/1772)) ([7e13202](https://github.com/unraid/api/commit/7e13202aa1c02803095bb72bb1bcb2472716f53a))
|
||||
* add workflow_dispatch trigger to schema publishing workflow ([818e7ce](https://github.com/unraid/api/commit/818e7ce997059663e07efcf1dab706bf0d7fc9da))
|
||||
* apollo studio readme link ([c4cd0c6](https://github.com/unraid/api/commit/c4cd0c63520deec15d735255f38811f0360fe3a1))
|
||||
* **cli:** make `unraid-api plugins remove` scriptable ([#1774](https://github.com/unraid/api/issues/1774)) ([64eb9ce](https://github.com/unraid/api/commit/64eb9ce9b5d1ff4fb1f08d9963522c5d32221ba7))
|
||||
* use persisted theme css to fix flashes on header ([#1784](https://github.com/unraid/api/issues/1784)) ([854b403](https://github.com/unraid/api/commit/854b403fbd85220a3012af58ce033cf0b8418516))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **api:** decode html entities before parsing notifications ([#1768](https://github.com/unraid/api/issues/1768)) ([42406e7](https://github.com/unraid/api/commit/42406e795da1e5b95622951a467722dde72d51a8))
|
||||
* **connect:** disable api plugin if unraid plugin is absent ([#1773](https://github.com/unraid/api/issues/1773)) ([c264a18](https://github.com/unraid/api/commit/c264a1843cf115e8cc1add1ab4f12fdcc932405a))
|
||||
* detection of flash backup activation state ([#1769](https://github.com/unraid/api/issues/1769)) ([d18eaf2](https://github.com/unraid/api/commit/d18eaf2364e0c04992c52af38679ff0a0c570440))
|
||||
* re-add missing header gradient styles ([#1787](https://github.com/unraid/api/issues/1787)) ([f8a6785](https://github.com/unraid/api/commit/f8a6785e9c92f81acaef76ac5eb78a4a769e69da))
|
||||
* respect OS safe mode in plugin loader ([#1775](https://github.com/unraid/api/issues/1775)) ([92af3b6](https://github.com/unraid/api/commit/92af3b61156cabae70368cf5222a2f7ac5b4d083))
|
||||
|
||||
## [4.25.3](https://github.com/unraid/unraid-api/compare/v4.25.2...v4.25.3) (2025-10-22)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* flaky watch on boot drive's dynamix config ([ec7aa06](https://github.com/unraid/unraid-api/commit/ec7aa06d4a5fb1f0e84420266b0b0d7ee09a3663))
|
||||
|
||||
## [4.25.2](https://github.com/unraid/api/compare/v4.25.1...v4.25.2) (2025-09-30)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* enhance activation code modal visibility logic ([#1733](https://github.com/unraid/api/issues/1733)) ([e57ec00](https://github.com/unraid/api/commit/e57ec00627e54ce76d903fd0fa8686ad02b393f3))
|
||||
|
||||
## [4.25.1](https://github.com/unraid/api/compare/v4.25.0...v4.25.1) (2025-09-30)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* add cache busting to web component extractor ([#1731](https://github.com/unraid/api/issues/1731)) ([0d165a6](https://github.com/unraid/api/commit/0d165a608740505bdc505dcf69fb615225969741))
|
||||
* Connect won't appear within Apps - Previous Apps ([#1727](https://github.com/unraid/api/issues/1727)) ([d73953f](https://github.com/unraid/api/commit/d73953f8ff3d7425c0aed32d16236ededfd948e1))
|
||||
|
||||
## [4.25.0](https://github.com/unraid/api/compare/v4.24.1...v4.25.0) (2025-09-26)
|
||||
|
||||
|
||||
|
||||
@@ -71,6 +71,20 @@ unraid-api report -vv
|
||||
|
||||
If you found this file you're likely a developer. If you'd like to know more about the API and when it's available please join [our discord](https://discord.unraid.net/).
|
||||
|
||||
## Internationalization
|
||||
|
||||
- Run `pnpm --filter @unraid/api i18n:extract` to scan the Nest.js source for translation helper usages and update `src/i18n/en.json` with any new keys. The extractor keeps existing translations intact and appends new keys with their English source text.
|
||||
|
||||
## Developer Documentation
|
||||
|
||||
For detailed information about specific features:
|
||||
|
||||
- [API Plugins](docs/developer/api-plugins.md) - Working with API plugins and workspace packages
|
||||
- [Docker Feature](docs/developer/docker.md) - Container management, GraphQL API, and WebGUI integration
|
||||
- [Feature Flags](docs/developer/feature-flags.md) - Conditionally enabling functionality
|
||||
- [Repository Organization](docs/developer/repo-organization.md) - Codebase structure
|
||||
- [Development Workflows](docs/developer/workflows.md) - Development processes
|
||||
|
||||
## License
|
||||
|
||||
Copyright Lime Technology Inc. All rights reserved.
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
{
|
||||
"version": "4.22.2",
|
||||
"version": "4.29.2",
|
||||
"extraOrigins": [],
|
||||
"sandbox": true,
|
||||
"ssoSubIds": [],
|
||||
"plugins": [
|
||||
"unraid-api-plugin-connect"
|
||||
]
|
||||
}
|
||||
"plugins": ["unraid-api-plugin-connect"]
|
||||
}
|
||||
|
||||
@@ -0,0 +1,6 @@
|
||||
timestamp=1730937600
|
||||
event=Hashtag Test
|
||||
subject=Warning [UNRAID] - #1 OS is cooking
|
||||
description=Disk 1 temperature has reached #epic # levels of proportion
|
||||
importance=warning
|
||||
|
||||
@@ -0,0 +1,6 @@
|
||||
timestamp=1730937600
|
||||
event=Temperature Test
|
||||
subject=Warning [UNRAID] - High disk temperature detected: 45 °C
|
||||
description=Disk 1 temperature has reached 45 °C (threshold: 40 °C)<br><br>Current temperatures:<br>Parity - 32 °C [OK]<br>Disk 1 - 45 °C [WARNING]<br>Disk 2 - 38 °C [OK]<br>Cache - 28 °C [OK]<br><br>Please check cooling system.
|
||||
importance=warning
|
||||
|
||||
555
api/docs/developer/docker.md
Normal file
555
api/docs/developer/docker.md
Normal file
@@ -0,0 +1,555 @@
|
||||
# Docker Feature
|
||||
|
||||
The Docker feature provides complete container management for Unraid through a GraphQL API, including lifecycle operations, real-time monitoring, update detection, and organizational tools.
|
||||
|
||||
## Table of Contents
|
||||
|
||||
- [Overview](#overview)
|
||||
- [Architecture](#architecture)
|
||||
- [Module Structure](#module-structure)
|
||||
- [Data Flow](#data-flow)
|
||||
- [Core Services](#core-services)
|
||||
- [DockerService](#dockerservice)
|
||||
- [DockerNetworkService](#dockernetworkservice)
|
||||
- [DockerPortService](#dockerportservice)
|
||||
- [DockerLogService](#dockerlogservice)
|
||||
- [DockerStatsService](#dockerstatsservice)
|
||||
- [DockerAutostartService](#dockerautostartservice)
|
||||
- [DockerConfigService](#dockerconfigservice)
|
||||
- [DockerManifestService](#dockermanifestservice)
|
||||
- [DockerPhpService](#dockerphpservice)
|
||||
- [DockerTailscaleService](#dockertailscaleservice)
|
||||
- [DockerTemplateScannerService](#dockertemplatescannerservice)
|
||||
- [DockerOrganizerService](#dockerorganizerservice)
|
||||
- [GraphQL API](#graphql-api)
|
||||
- [Queries](#queries)
|
||||
- [Mutations](#mutations)
|
||||
- [Subscriptions](#subscriptions)
|
||||
- [Data Models](#data-models)
|
||||
- [DockerContainer](#dockercontainer)
|
||||
- [ContainerState](#containerstate)
|
||||
- [ContainerPort](#containerport)
|
||||
- [DockerPortConflicts](#dockerportconflicts)
|
||||
- [Caching Strategy](#caching-strategy)
|
||||
- [WebGUI Integration](#webgui-integration)
|
||||
- [File Modification](#file-modification)
|
||||
- [PHP Integration](#php-integration)
|
||||
- [Permissions](#permissions)
|
||||
- [Configuration Files](#configuration-files)
|
||||
- [Development](#development)
|
||||
- [Adding a New Docker Service](#adding-a-new-docker-service)
|
||||
- [Testing](#testing)
|
||||
- [Feature Flag Testing](#feature-flag-testing)
|
||||
|
||||
## Overview
|
||||
|
||||
**Location:** `src/unraid-api/graph/resolvers/docker/`
|
||||
|
||||
**Feature Flag:** Many next-generation features are gated behind `ENABLE_NEXT_DOCKER_RELEASE`. See [Feature Flags](./feature-flags.md) for details on enabling.
|
||||
|
||||
**Key Capabilities:**
|
||||
|
||||
- Container lifecycle management (start, stop, pause, update, remove)
|
||||
- Real-time container stats streaming
|
||||
- Network and port conflict detection
|
||||
- Container log retrieval
|
||||
- Automatic update detection via digest comparison
|
||||
- Tailscale container integration
|
||||
- Container organization with folders and views
|
||||
- Template-based metadata resolution
|
||||
|
||||
## Architecture
|
||||
|
||||
### Module Structure
|
||||
|
||||
The Docker module (`docker.module.ts`) serves as the entry point and exports:
|
||||
|
||||
- **13 services** for various Docker operations
|
||||
- **3 resolvers** for GraphQL query/mutation/subscription handling
|
||||
|
||||
**Dependencies:**
|
||||
|
||||
- `JobModule` - Background job scheduling
|
||||
- `NotificationsModule` - User notifications
|
||||
- `ServicesModule` - Shared service utilities
|
||||
|
||||
### Data Flow
|
||||
|
||||
```text
|
||||
Docker Daemon (Unix Socket)
|
||||
↓
|
||||
dockerode library
|
||||
↓
|
||||
DockerService (transform & cache)
|
||||
↓
|
||||
GraphQL Resolvers
|
||||
↓
|
||||
Client Applications
|
||||
```
|
||||
|
||||
The API communicates with the Docker daemon through the `dockerode` library via Unix socket. Container data is transformed from raw Docker API format to GraphQL types, enriched with Unraid-specific metadata (templates, autostart config), and cached for performance.
|
||||
|
||||
## Core Services
|
||||
|
||||
### DockerService
|
||||
|
||||
**File:** `docker.service.ts`
|
||||
|
||||
Central orchestrator for all container operations.
|
||||
|
||||
**Key Methods:**
|
||||
|
||||
- `getContainers(skipCache?, includeSize?)` - List containers with caching
|
||||
- `start(id)`, `stop(id)`, `pause(id)`, `unpause(id)` - Lifecycle operations
|
||||
- `updateContainer(id)`, `updateContainers(ids)`, `updateAllContainers()` - Image updates
|
||||
- `removeContainer(id, withImage?)` - Remove container and optionally its image
|
||||
|
||||
**Caching:**
|
||||
|
||||
- Cache TTL: 60 seconds (60000ms)
|
||||
- Cache keys: `docker_containers`, `docker_containers_with_size`
|
||||
- Invalidated automatically on mutations
|
||||
|
||||
### DockerNetworkService
|
||||
|
||||
**File:** `docker-network.service.ts`
|
||||
|
||||
Lists Docker networks with metadata including driver, scope, IPAM settings, and connected containers.
|
||||
|
||||
**Caching:** 60 seconds
|
||||
|
||||
### DockerPortService
|
||||
|
||||
**File:** `docker-port.service.ts`
|
||||
|
||||
Detects port conflicts between containers and with the host.
|
||||
|
||||
**Features:**
|
||||
|
||||
- Deduplicates port mappings from Docker API
|
||||
- Identifies container-to-container conflicts
|
||||
- Detects host-level port collisions
|
||||
- Separates TCP and UDP conflicts
|
||||
- Calculates LAN-accessible IP:port combinations
|
||||
|
||||
### DockerLogService
|
||||
|
||||
**File:** `docker-log.service.ts`
|
||||
|
||||
Retrieves container logs with configurable options.
|
||||
|
||||
**Parameters:**
|
||||
|
||||
- `tail` - Number of lines (default: 200, max: 2000)
|
||||
- `since` - Timestamp filter for log entries
|
||||
|
||||
**Additional Features:**
|
||||
|
||||
- Calculates container log file sizes
|
||||
- Supports timestamp-based filtering
|
||||
|
||||
### DockerStatsService
|
||||
|
||||
**File:** `docker-stats.service.ts`
|
||||
|
||||
Provides real-time container statistics via GraphQL subscription.
|
||||
|
||||
**Metrics:**
|
||||
|
||||
- CPU percentage
|
||||
- Memory usage and limit
|
||||
- Network I/O (received/transmitted bytes)
|
||||
- Block I/O (read/written bytes)
|
||||
|
||||
**Implementation:**
|
||||
|
||||
- Spawns `docker stats` process with streaming output
|
||||
- Publishes to `PUBSUB_CHANNEL.DOCKER_STATS`
|
||||
- Auto-starts on first subscriber, stops when last disconnects
|
||||
|
||||
### DockerAutostartService
|
||||
|
||||
**File:** `docker-autostart.service.ts`
|
||||
|
||||
Manages container auto-start configuration.
|
||||
|
||||
**Features:**
|
||||
|
||||
- Parses auto-start file format (name + wait time per line)
|
||||
- Maintains auto-start order and wait times
|
||||
- Persists configuration changes
|
||||
- Tracks container primary names
|
||||
|
||||
### DockerConfigService
|
||||
|
||||
**File:** `docker-config.service.ts`
|
||||
|
||||
Persistent configuration management using `ConfigFilePersister`.
|
||||
|
||||
**Configuration Options:**
|
||||
|
||||
- `templateMappings` - Container name to template file path mappings
|
||||
- `skipTemplatePaths` - Containers excluded from template scanning
|
||||
- `updateCheckCronSchedule` - Cron expression for digest refresh (default: daily at 6am)
|
||||
|
||||
### DockerManifestService
|
||||
|
||||
**File:** `docker-manifest.service.ts`
|
||||
|
||||
Detects available container image updates.
|
||||
|
||||
**Implementation:**
|
||||
|
||||
- Compares local and remote image SHA256 digests
|
||||
- Reads cached status from `/var/lib/docker/unraid-update-status.json`
|
||||
- Triggers refresh via PHP integration
|
||||
|
||||
### DockerPhpService
|
||||
|
||||
**File:** `docker-php.service.ts`
|
||||
|
||||
Integration with legacy Unraid PHP Docker scripts.
|
||||
|
||||
**PHP Scripts Used:**
|
||||
|
||||
- `DockerUpdate.php` - Refresh container digests
|
||||
- `DockerContainers.php` - Get update statuses
|
||||
|
||||
**Update Statuses:**
|
||||
|
||||
- `UP_TO_DATE` - Container is current
|
||||
- `UPDATE_AVAILABLE` - New image available
|
||||
- `REBUILD_READY` - Rebuild required
|
||||
- `UNKNOWN` - Status could not be determined
|
||||
|
||||
### DockerTailscaleService
|
||||
|
||||
**File:** `docker-tailscale.service.ts`
|
||||
|
||||
Detects and monitors Tailscale-enabled containers.
|
||||
|
||||
**Detection Methods:**
|
||||
|
||||
- Container labels indicating Tailscale
|
||||
- Tailscale socket mount points
|
||||
|
||||
**Status Information:**
|
||||
|
||||
- Tailscale version and backend state
|
||||
- Hostname and DNS name
|
||||
- Exit node status
|
||||
- Key expiry dates
|
||||
|
||||
**Caching:**
|
||||
|
||||
- Status cache: 30 seconds
|
||||
- DERP map and versions: 24 hours
|
||||
|
||||
### DockerTemplateScannerService
|
||||
|
||||
**File:** `docker-template-scanner.service.ts`
|
||||
|
||||
Maps containers to their template files for metadata resolution.
|
||||
|
||||
**Bootstrap Process:**
|
||||
|
||||
1. Runs 5 seconds after app startup
|
||||
2. Scans XML templates from configured paths
|
||||
3. Parses container/image names from XML
|
||||
4. Matches against running containers
|
||||
5. Stores mappings in `docker.config.json`
|
||||
|
||||
**Template Metadata Resolved:**
|
||||
|
||||
- `projectUrl`, `registryUrl`, `supportUrl`
|
||||
- `iconUrl`, `webUiUrl`, `shell`
|
||||
- Template port mappings
|
||||
|
||||
**Orphaned Containers:**
|
||||
|
||||
Containers without matching templates are marked as "orphaned" in the API response.
|
||||
|
||||
### DockerOrganizerService
|
||||
|
||||
**File:** `organizer/docker-organizer.service.ts`
|
||||
|
||||
Container organization system for UI views.
|
||||
|
||||
**Features:**
|
||||
|
||||
- Hierarchical folder structure
|
||||
- Multiple views with different layouts
|
||||
- Position-based organization
|
||||
- View-specific preferences (sorting, filtering)
|
||||
|
||||
## GraphQL API
|
||||
|
||||
### Queries
|
||||
|
||||
```graphql
|
||||
type Query {
|
||||
docker: Docker!
|
||||
}
|
||||
|
||||
type Docker {
|
||||
containers(skipCache: Boolean): [DockerContainer!]!
|
||||
container(id: PrefixedID!): DockerContainer # Feature-flagged
|
||||
networks(skipCache: Boolean): [DockerNetwork!]!
|
||||
portConflicts(skipCache: Boolean): DockerPortConflicts!
|
||||
logs(id: PrefixedID!, since: Int, tail: Int): DockerContainerLogs!
|
||||
organizer(skipCache: Boolean): DockerOrganizer! # Feature-flagged
|
||||
containerUpdateStatuses: [ContainerUpdateStatus!]! # Feature-flagged
|
||||
}
|
||||
```
|
||||
|
||||
### Mutations
|
||||
|
||||
**Container Lifecycle:**
|
||||
|
||||
```graphql
|
||||
type Mutation {
|
||||
start(id: PrefixedID!): DockerContainer!
|
||||
stop(id: PrefixedID!): DockerContainer!
|
||||
pause(id: PrefixedID!): DockerContainer!
|
||||
unpause(id: PrefixedID!): DockerContainer!
|
||||
removeContainer(id: PrefixedID!, withImage: Boolean): Boolean!
|
||||
}
|
||||
```
|
||||
|
||||
**Container Updates:**
|
||||
|
||||
```graphql
|
||||
type Mutation {
|
||||
updateContainer(id: PrefixedID!): DockerContainer!
|
||||
updateContainers(ids: [PrefixedID!]!): [DockerContainer!]!
|
||||
updateAllContainers: [DockerContainer!]!
|
||||
refreshDockerDigests: Boolean!
|
||||
}
|
||||
```
|
||||
|
||||
**Configuration:**
|
||||
|
||||
```graphql
|
||||
type Mutation {
|
||||
updateAutostartConfiguration(
|
||||
entries: [AutostartEntry!]!
|
||||
persistUserPreferences: Boolean
|
||||
): Boolean!
|
||||
syncDockerTemplatePaths: Boolean!
|
||||
resetDockerTemplateMappings: Boolean!
|
||||
}
|
||||
```
|
||||
|
||||
**Organizer (Feature-flagged):**
|
||||
|
||||
```graphql
|
||||
type Mutation {
|
||||
createDockerFolder(name: String!, parentId: ID, childrenIds: [ID!]): DockerFolder!
|
||||
createDockerFolderWithItems(
|
||||
name: String!
|
||||
parentId: ID
|
||||
sourceEntryIds: [ID!]
|
||||
position: Int
|
||||
): DockerFolder!
|
||||
setDockerFolderChildren(folderId: ID!, childrenIds: [ID!]!): DockerFolder!
|
||||
deleteDockerEntries(entryIds: [ID!]!): Boolean!
|
||||
moveDockerEntriesToFolder(sourceEntryIds: [ID!]!, destinationFolderId: ID!): Boolean!
|
||||
moveDockerItemsToPosition(
|
||||
sourceEntryIds: [ID!]!
|
||||
destinationFolderId: ID!
|
||||
position: Int!
|
||||
): Boolean!
|
||||
renameDockerFolder(folderId: ID!, newName: String!): DockerFolder!
|
||||
updateDockerViewPreferences(viewId: ID!, prefs: ViewPreferencesInput!): Boolean!
|
||||
}
|
||||
```
|
||||
|
||||
### Subscriptions
|
||||
|
||||
```graphql
|
||||
type Subscription {
|
||||
dockerContainerStats: DockerContainerStats!
|
||||
}
|
||||
```
|
||||
|
||||
Real-time container statistics stream. Automatically starts when first client subscribes and stops when last client disconnects.
|
||||
|
||||
## Data Models
|
||||
|
||||
### DockerContainer
|
||||
|
||||
Primary container representation with 24+ fields:
|
||||
|
||||
```typescript
|
||||
{
|
||||
id: PrefixedID
|
||||
names: [String!]!
|
||||
image: String!
|
||||
imageId: String!
|
||||
state: ContainerState!
|
||||
status: String!
|
||||
created: Float!
|
||||
|
||||
// Networking
|
||||
ports: [ContainerPort!]!
|
||||
lanIpPorts: [ContainerPort!]!
|
||||
hostConfig: ContainerHostConfig
|
||||
networkSettings: DockerNetworkSettings
|
||||
|
||||
// Storage
|
||||
sizeRootFs: Float
|
||||
sizeRw: Float
|
||||
sizeLog: Float
|
||||
mounts: [ContainerMount!]!
|
||||
|
||||
// Metadata
|
||||
labels: JSON
|
||||
|
||||
// Auto-start
|
||||
autoStart: Boolean!
|
||||
autoStartOrder: Int
|
||||
autoStartWait: Int
|
||||
|
||||
// Template Integration
|
||||
templatePath: String
|
||||
isOrphaned: Boolean!
|
||||
projectUrl: String
|
||||
registryUrl: String
|
||||
supportUrl: String
|
||||
iconUrl: String
|
||||
webUiUrl: String
|
||||
shell: String
|
||||
templatePorts: [ContainerPort!]
|
||||
|
||||
// Tailscale
|
||||
tailscaleEnabled: Boolean!
|
||||
tailscaleStatus: TailscaleStatus
|
||||
|
||||
// Updates
|
||||
isUpdateAvailable: Boolean
|
||||
isRebuildReady: Boolean
|
||||
}
|
||||
```
|
||||
|
||||
### ContainerState
|
||||
|
||||
```typescript
|
||||
enum ContainerState {
|
||||
RUNNING
|
||||
PAUSED
|
||||
EXITED
|
||||
}
|
||||
```
|
||||
|
||||
### ContainerPort
|
||||
|
||||
```typescript
|
||||
{
|
||||
ip: String
|
||||
privatePort: Int!
|
||||
publicPort: Int
|
||||
type: String! // "tcp" or "udp"
|
||||
}
|
||||
```
|
||||
|
||||
### DockerPortConflicts
|
||||
|
||||
```typescript
|
||||
{
|
||||
containerConflicts: [DockerContainerPortConflict!]!
|
||||
lanConflicts: [DockerLanPortConflict!]!
|
||||
}
|
||||
```
|
||||
|
||||
## Caching Strategy
|
||||
|
||||
The Docker feature uses `cache-manager` v7 for performance optimization.
|
||||
|
||||
**Important:** cache-manager v7 expects TTL values in **milliseconds**, not seconds.
|
||||
|
||||
| Cache Key | TTL | Invalidation |
|
||||
|-----------|-----|--------------|
|
||||
| `docker_containers` | 60s | On any container mutation |
|
||||
| `docker_containers_with_size` | 60s | On any container mutation |
|
||||
| `docker_networks` | 60s | On network changes |
|
||||
| Tailscale status | 30s | Automatic |
|
||||
| Tailscale DERP/versions | 24h | Automatic |
|
||||
|
||||
**Cache Invalidation Triggers:**
|
||||
|
||||
- `start()`, `stop()`, `pause()`, `unpause()`
|
||||
- `updateContainer()`, `updateContainers()`, `updateAllContainers()`
|
||||
- `removeContainer()`
|
||||
- `updateAutostartConfiguration()`
|
||||
|
||||
## WebGUI Integration
|
||||
|
||||
### File Modification
|
||||
|
||||
**File:** `unraid-file-modifier/modifications/docker-containers-page.modification.ts`
|
||||
|
||||
**Target:** `/usr/local/emhttp/plugins/dynamix.docker.manager/DockerContainers.page`
|
||||
|
||||
When `ENABLE_NEXT_DOCKER_RELEASE` is enabled and Unraid version is 7.3.0+, the modification:
|
||||
|
||||
1. Replaces the legacy Docker containers page
|
||||
2. Injects the Vue web component: `<unraid-docker-container-overview>`
|
||||
3. Retains the `Nchan="docker_load"` page attribute (an emhttp/WebGUI feature for real-time updates, not controlled by the API)
|
||||
|
||||
### PHP Integration
|
||||
|
||||
The API integrates with legacy Unraid PHP scripts for certain operations:
|
||||
|
||||
- **Digest refresh:** Calls `DockerUpdate.php` to refresh container image digests
|
||||
- **Update status:** Reads from `DockerContainers.php` output
|
||||
|
||||
## Permissions
|
||||
|
||||
All Docker operations are protected with permission checks:
|
||||
|
||||
| Operation | Resource | Action |
|
||||
|-----------|----------|--------|
|
||||
| Read containers/networks | `Resource.DOCKER` | `AuthAction.READ_ANY` |
|
||||
| Start/stop/pause/update | `Resource.DOCKER` | `AuthAction.UPDATE_ANY` |
|
||||
| Remove containers | `Resource.DOCKER` | `AuthAction.DELETE_ANY` |
|
||||
|
||||
## Configuration Files
|
||||
|
||||
| File | Purpose |
|
||||
|------|---------|
|
||||
| `docker.config.json` | Template mappings, skip paths, cron schedule |
|
||||
| `docker.organizer.json` | Container organization tree and views |
|
||||
| `/var/lib/docker/unraid-update-status.json` | Cached container update statuses |
|
||||
|
||||
## Development
|
||||
|
||||
### Adding a New Docker Service
|
||||
|
||||
1. Create service file in `src/unraid-api/graph/resolvers/docker/`
|
||||
2. Add to `docker.module.ts` providers and exports
|
||||
3. Inject into resolvers as needed
|
||||
4. Add GraphQL types to `docker.model.ts` if needed
|
||||
|
||||
### Testing
|
||||
|
||||
```bash
|
||||
# Run Docker-related tests
|
||||
pnpm --filter ./api test -- src/unraid-api/graph/resolvers/docker/
|
||||
|
||||
# Run specific test file
|
||||
pnpm --filter ./api test -- src/unraid-api/graph/resolvers/docker/docker.service.spec.ts
|
||||
```
|
||||
|
||||
### Feature Flag Testing
|
||||
|
||||
To test next-generation Docker features locally:
|
||||
|
||||
```bash
|
||||
ENABLE_NEXT_DOCKER_RELEASE=true unraid-api start
|
||||
```
|
||||
|
||||
Or add to `.env`:
|
||||
|
||||
```env
|
||||
ENABLE_NEXT_DOCKER_RELEASE=true
|
||||
```
|
||||
@@ -62,15 +62,18 @@ To build all packages in the monorepo:
|
||||
pnpm build
|
||||
```
|
||||
|
||||
### Watch Mode Building
|
||||
### Plugin Building (Docker Required)
|
||||
|
||||
For continuous building during development:
|
||||
The plugin build requires Docker. This command automatically builds all dependencies (API, web) before starting Docker:
|
||||
|
||||
```bash
|
||||
pnpm build:watch
|
||||
cd plugin
|
||||
pnpm run docker:build-and-run
|
||||
# Then inside the container:
|
||||
pnpm build
|
||||
```
|
||||
|
||||
This is useful when you want to see your changes reflected without manually rebuilding. This will also allow you to install a local plugin to test your changes.
|
||||
This serves the plugin at `http://YOUR_IP:5858/` for installation on your Unraid server.
|
||||
|
||||
### Package-Specific Building
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"cwd": "/usr/local/unraid-api",
|
||||
"exec_mode": "fork",
|
||||
"wait_ready": true,
|
||||
"listen_timeout": 15000,
|
||||
"listen_timeout": 30000,
|
||||
"max_restarts": 10,
|
||||
"min_uptime": 10000,
|
||||
"watch": false,
|
||||
|
||||
@@ -862,6 +862,38 @@ type DockerMutations {
|
||||
|
||||
"""Stop a container"""
|
||||
stop(id: PrefixedID!): DockerContainer!
|
||||
|
||||
"""Pause (Suspend) a container"""
|
||||
pause(id: PrefixedID!): DockerContainer!
|
||||
|
||||
"""Unpause (Resume) a container"""
|
||||
unpause(id: PrefixedID!): DockerContainer!
|
||||
|
||||
"""Remove a container"""
|
||||
removeContainer(id: PrefixedID!, withImage: Boolean): Boolean!
|
||||
|
||||
"""Update auto-start configuration for Docker containers"""
|
||||
updateAutostartConfiguration(entries: [DockerAutostartEntryInput!]!, persistUserPreferences: Boolean): Boolean!
|
||||
|
||||
"""Update a container to the latest image"""
|
||||
updateContainer(id: PrefixedID!): DockerContainer!
|
||||
|
||||
"""Update multiple containers to the latest images"""
|
||||
updateContainers(ids: [PrefixedID!]!): [DockerContainer!]!
|
||||
|
||||
"""Update all containers that have available updates"""
|
||||
updateAllContainers: [DockerContainer!]!
|
||||
}
|
||||
|
||||
input DockerAutostartEntryInput {
|
||||
"""Docker container identifier"""
|
||||
id: PrefixedID!
|
||||
|
||||
"""Whether the container should auto-start"""
|
||||
autoStart: Boolean!
|
||||
|
||||
"""Number of seconds to wait after starting the container"""
|
||||
wait: Int
|
||||
}
|
||||
|
||||
type VmMutations {
|
||||
@@ -944,6 +976,23 @@ input UpdateApiKeyInput {
|
||||
permissions: [AddPermissionInput!]
|
||||
}
|
||||
|
||||
"""Customization related mutations"""
|
||||
type CustomizationMutations {
|
||||
"""Update the UI theme (writes dynamix.cfg)"""
|
||||
setTheme(
|
||||
"""Theme to apply"""
|
||||
theme: ThemeName!
|
||||
): Theme!
|
||||
}
|
||||
|
||||
"""The theme name"""
|
||||
enum ThemeName {
|
||||
azure
|
||||
black
|
||||
gray
|
||||
white
|
||||
}
|
||||
|
||||
"""
|
||||
Parity check related mutations, WIP, response types and functionaliy will change
|
||||
"""
|
||||
@@ -1042,14 +1091,6 @@ type Theme {
|
||||
headerSecondaryTextColor: String
|
||||
}
|
||||
|
||||
"""The theme name"""
|
||||
enum ThemeName {
|
||||
azure
|
||||
black
|
||||
gray
|
||||
white
|
||||
}
|
||||
|
||||
type ExplicitStatusItem {
|
||||
name: String!
|
||||
updateStatus: UpdateStatus!
|
||||
@@ -1080,6 +1121,29 @@ enum ContainerPortType {
|
||||
UDP
|
||||
}
|
||||
|
||||
type DockerPortConflictContainer {
|
||||
id: PrefixedID!
|
||||
name: String!
|
||||
}
|
||||
|
||||
type DockerContainerPortConflict {
|
||||
privatePort: Port!
|
||||
type: ContainerPortType!
|
||||
containers: [DockerPortConflictContainer!]!
|
||||
}
|
||||
|
||||
type DockerLanPortConflict {
|
||||
lanIpPort: String!
|
||||
publicPort: Port
|
||||
type: ContainerPortType!
|
||||
containers: [DockerPortConflictContainer!]!
|
||||
}
|
||||
|
||||
type DockerPortConflicts {
|
||||
containerPorts: [DockerContainerPortConflict!]!
|
||||
lanPorts: [DockerLanPortConflict!]!
|
||||
}
|
||||
|
||||
type ContainerHostConfig {
|
||||
networkMode: String!
|
||||
}
|
||||
@@ -1093,8 +1157,17 @@ type DockerContainer implements Node {
|
||||
created: Int!
|
||||
ports: [ContainerPort!]!
|
||||
|
||||
"""List of LAN-accessible host:port values"""
|
||||
lanIpPorts: [String!]
|
||||
|
||||
"""Total size of all files in the container (in bytes)"""
|
||||
sizeRootFs: BigInt
|
||||
|
||||
"""Size of writable layer (in bytes)"""
|
||||
sizeRw: BigInt
|
||||
|
||||
"""Size of container logs (in bytes)"""
|
||||
sizeLog: BigInt
|
||||
labels: JSON
|
||||
state: ContainerState!
|
||||
status: String!
|
||||
@@ -1102,12 +1175,50 @@ type DockerContainer implements Node {
|
||||
networkSettings: JSON
|
||||
mounts: [JSON!]
|
||||
autoStart: Boolean!
|
||||
|
||||
"""Zero-based order in the auto-start list"""
|
||||
autoStartOrder: Int
|
||||
|
||||
"""Wait time in seconds applied after start"""
|
||||
autoStartWait: Int
|
||||
templatePath: String
|
||||
|
||||
"""Project/Product homepage URL"""
|
||||
projectUrl: String
|
||||
|
||||
"""Registry/Docker Hub URL"""
|
||||
registryUrl: String
|
||||
|
||||
"""Support page/thread URL"""
|
||||
supportUrl: String
|
||||
|
||||
"""Icon URL"""
|
||||
iconUrl: String
|
||||
|
||||
"""Resolved WebUI URL from template"""
|
||||
webUiUrl: String
|
||||
|
||||
"""Shell to use for console access (from template)"""
|
||||
shell: String
|
||||
|
||||
"""Port mappings from template (used when container is not running)"""
|
||||
templatePorts: [ContainerPort!]
|
||||
|
||||
"""Whether the container is orphaned (no template found)"""
|
||||
isOrphaned: Boolean!
|
||||
isUpdateAvailable: Boolean
|
||||
isRebuildReady: Boolean
|
||||
|
||||
"""Whether Tailscale is enabled for this container"""
|
||||
tailscaleEnabled: Boolean!
|
||||
|
||||
"""Tailscale status for this container (fetched via docker exec)"""
|
||||
tailscaleStatus(forceRefresh: Boolean = false): TailscaleStatus
|
||||
}
|
||||
|
||||
enum ContainerState {
|
||||
RUNNING
|
||||
PAUSED
|
||||
EXITED
|
||||
}
|
||||
|
||||
@@ -1129,49 +1240,221 @@ type DockerNetwork implements Node {
|
||||
labels: JSON!
|
||||
}
|
||||
|
||||
type DockerContainerLogLine {
|
||||
timestamp: DateTime!
|
||||
message: String!
|
||||
}
|
||||
|
||||
type DockerContainerLogs {
|
||||
containerId: PrefixedID!
|
||||
lines: [DockerContainerLogLine!]!
|
||||
|
||||
"""
|
||||
Cursor that can be passed back through the since argument to continue streaming logs.
|
||||
"""
|
||||
cursor: DateTime
|
||||
}
|
||||
|
||||
type DockerContainerStats {
|
||||
id: PrefixedID!
|
||||
|
||||
"""CPU Usage Percentage"""
|
||||
cpuPercent: Float!
|
||||
|
||||
"""Memory Usage String (e.g. 100MB / 1GB)"""
|
||||
memUsage: String!
|
||||
|
||||
"""Memory Usage Percentage"""
|
||||
memPercent: Float!
|
||||
|
||||
"""Network I/O String (e.g. 100MB / 1GB)"""
|
||||
netIO: String!
|
||||
|
||||
"""Block I/O String (e.g. 100MB / 1GB)"""
|
||||
blockIO: String!
|
||||
}
|
||||
|
||||
"""Tailscale exit node connection status"""
|
||||
type TailscaleExitNodeStatus {
|
||||
"""Whether the exit node is online"""
|
||||
online: Boolean!
|
||||
|
||||
"""Tailscale IPs of the exit node"""
|
||||
tailscaleIps: [String!]
|
||||
}
|
||||
|
||||
"""Tailscale status for a Docker container"""
|
||||
type TailscaleStatus {
|
||||
"""Whether Tailscale is online in the container"""
|
||||
online: Boolean!
|
||||
|
||||
"""Current Tailscale version"""
|
||||
version: String
|
||||
|
||||
"""Latest available Tailscale version"""
|
||||
latestVersion: String
|
||||
|
||||
"""Whether a Tailscale update is available"""
|
||||
updateAvailable: Boolean!
|
||||
|
||||
"""Configured Tailscale hostname"""
|
||||
hostname: String
|
||||
|
||||
"""Actual Tailscale DNS name"""
|
||||
dnsName: String
|
||||
|
||||
"""DERP relay code"""
|
||||
relay: String
|
||||
|
||||
"""DERP relay region name"""
|
||||
relayName: String
|
||||
|
||||
"""Tailscale IPv4 and IPv6 addresses"""
|
||||
tailscaleIps: [String!]
|
||||
|
||||
"""Advertised subnet routes"""
|
||||
primaryRoutes: [String!]
|
||||
|
||||
"""Whether this container is an exit node"""
|
||||
isExitNode: Boolean!
|
||||
|
||||
"""Status of the connected exit node (if using one)"""
|
||||
exitNodeStatus: TailscaleExitNodeStatus
|
||||
|
||||
"""Tailscale Serve/Funnel WebUI URL"""
|
||||
webUiUrl: String
|
||||
|
||||
"""Tailscale key expiry date"""
|
||||
keyExpiry: DateTime
|
||||
|
||||
"""Days until key expires"""
|
||||
keyExpiryDays: Int
|
||||
|
||||
"""Whether the Tailscale key has expired"""
|
||||
keyExpired: Boolean!
|
||||
|
||||
"""Tailscale backend state (Running, NeedsLogin, Stopped, etc.)"""
|
||||
backendState: String
|
||||
|
||||
"""Authentication URL if Tailscale needs login"""
|
||||
authUrl: String
|
||||
}
|
||||
|
||||
type Docker implements Node {
|
||||
id: PrefixedID!
|
||||
containers(skipCache: Boolean! = false): [DockerContainer!]!
|
||||
networks(skipCache: Boolean! = false): [DockerNetwork!]!
|
||||
organizer: ResolvedOrganizerV1!
|
||||
portConflicts(skipCache: Boolean! = false): DockerPortConflicts!
|
||||
|
||||
"""
|
||||
Access container logs. Requires specifying a target container id through resolver arguments.
|
||||
"""
|
||||
logs(id: PrefixedID!, since: DateTime, tail: Int): DockerContainerLogs!
|
||||
container(id: PrefixedID!): DockerContainer
|
||||
organizer(skipCache: Boolean! = false): ResolvedOrganizerV1!
|
||||
containerUpdateStatuses: [ExplicitStatusItem!]!
|
||||
}
|
||||
|
||||
type DockerTemplateSyncResult {
|
||||
scanned: Int!
|
||||
matched: Int!
|
||||
skipped: Int!
|
||||
errors: [String!]!
|
||||
}
|
||||
|
||||
type ResolvedOrganizerView {
|
||||
id: String!
|
||||
name: String!
|
||||
root: ResolvedOrganizerEntry!
|
||||
rootId: String!
|
||||
flatEntries: [FlatOrganizerEntry!]!
|
||||
prefs: JSON
|
||||
}
|
||||
|
||||
union ResolvedOrganizerEntry = ResolvedOrganizerFolder | OrganizerContainerResource | OrganizerResource
|
||||
|
||||
type ResolvedOrganizerFolder {
|
||||
id: String!
|
||||
type: String!
|
||||
name: String!
|
||||
children: [ResolvedOrganizerEntry!]!
|
||||
}
|
||||
|
||||
type OrganizerContainerResource {
|
||||
id: String!
|
||||
type: String!
|
||||
name: String!
|
||||
meta: DockerContainer
|
||||
}
|
||||
|
||||
type OrganizerResource {
|
||||
id: String!
|
||||
type: String!
|
||||
name: String!
|
||||
meta: JSON
|
||||
}
|
||||
|
||||
type ResolvedOrganizerV1 {
|
||||
version: Float!
|
||||
views: [ResolvedOrganizerView!]!
|
||||
}
|
||||
|
||||
type FlatOrganizerEntry {
|
||||
id: String!
|
||||
type: String!
|
||||
name: String!
|
||||
parentId: String
|
||||
depth: Float!
|
||||
position: Float!
|
||||
path: [String!]!
|
||||
hasChildren: Boolean!
|
||||
childrenIds: [String!]!
|
||||
meta: DockerContainer
|
||||
}
|
||||
|
||||
type NotificationCounts {
|
||||
info: Int!
|
||||
warning: Int!
|
||||
alert: Int!
|
||||
total: Int!
|
||||
}
|
||||
|
||||
type NotificationSettings {
|
||||
position: String!
|
||||
expand: Boolean!
|
||||
duration: Int!
|
||||
max: Int!
|
||||
}
|
||||
|
||||
type NotificationOverview {
|
||||
unread: NotificationCounts!
|
||||
archive: NotificationCounts!
|
||||
}
|
||||
|
||||
type Notification implements Node {
|
||||
id: PrefixedID!
|
||||
|
||||
"""Also known as 'event'"""
|
||||
title: String!
|
||||
subject: String!
|
||||
description: String!
|
||||
importance: NotificationImportance!
|
||||
link: String
|
||||
type: NotificationType!
|
||||
|
||||
"""ISO Timestamp for when the notification occurred"""
|
||||
timestamp: String
|
||||
formattedTimestamp: String
|
||||
}
|
||||
|
||||
enum NotificationImportance {
|
||||
ALERT
|
||||
INFO
|
||||
WARNING
|
||||
}
|
||||
|
||||
enum NotificationType {
|
||||
UNREAD
|
||||
ARCHIVE
|
||||
}
|
||||
|
||||
type Notifications implements Node {
|
||||
id: PrefixedID!
|
||||
|
||||
"""A cached overview of the notifications in the system & their severity."""
|
||||
overview: NotificationOverview!
|
||||
list(filter: NotificationFilter!): [Notification!]!
|
||||
|
||||
"""
|
||||
Deduplicated list of unread warning and alert notifications, sorted latest first.
|
||||
"""
|
||||
warningsAndAlerts: [Notification!]!
|
||||
settings: NotificationSettings!
|
||||
}
|
||||
|
||||
input NotificationFilter {
|
||||
importance: NotificationImportance
|
||||
type: NotificationType!
|
||||
offset: Int!
|
||||
limit: Int!
|
||||
}
|
||||
|
||||
type FlashBackupStatus {
|
||||
"""Status message indicating the outcome of the backup initiation."""
|
||||
status: String!
|
||||
@@ -1391,6 +1674,19 @@ type CpuLoad {
|
||||
percentSteal: Float!
|
||||
}
|
||||
|
||||
type CpuPackages implements Node {
|
||||
id: PrefixedID!
|
||||
|
||||
"""Total CPU package power draw (W)"""
|
||||
totalPower: Float!
|
||||
|
||||
"""Power draw per package (W)"""
|
||||
power: [Float!]!
|
||||
|
||||
"""Temperature per package (°C)"""
|
||||
temp: [Float!]!
|
||||
}
|
||||
|
||||
type CpuUtilization implements Node {
|
||||
id: PrefixedID!
|
||||
|
||||
@@ -1454,6 +1750,12 @@ type InfoCpu implements Node {
|
||||
|
||||
"""CPU feature flags"""
|
||||
flags: [String!]
|
||||
|
||||
"""
|
||||
Per-package array of core/thread pairs, e.g. [[[0,1],[2,3]], [[4,5],[6,7]]]
|
||||
"""
|
||||
topology: [[[Int!]!]!]!
|
||||
packages: CpuPackages!
|
||||
}
|
||||
|
||||
type MemoryLayout implements Node {
|
||||
@@ -1753,60 +2055,6 @@ type Metrics implements Node {
|
||||
memory: MemoryUtilization
|
||||
}
|
||||
|
||||
type NotificationCounts {
|
||||
info: Int!
|
||||
warning: Int!
|
||||
alert: Int!
|
||||
total: Int!
|
||||
}
|
||||
|
||||
type NotificationOverview {
|
||||
unread: NotificationCounts!
|
||||
archive: NotificationCounts!
|
||||
}
|
||||
|
||||
type Notification implements Node {
|
||||
id: PrefixedID!
|
||||
|
||||
"""Also known as 'event'"""
|
||||
title: String!
|
||||
subject: String!
|
||||
description: String!
|
||||
importance: NotificationImportance!
|
||||
link: String
|
||||
type: NotificationType!
|
||||
|
||||
"""ISO Timestamp for when the notification occurred"""
|
||||
timestamp: String
|
||||
formattedTimestamp: String
|
||||
}
|
||||
|
||||
enum NotificationImportance {
|
||||
ALERT
|
||||
INFO
|
||||
WARNING
|
||||
}
|
||||
|
||||
enum NotificationType {
|
||||
UNREAD
|
||||
ARCHIVE
|
||||
}
|
||||
|
||||
type Notifications implements Node {
|
||||
id: PrefixedID!
|
||||
|
||||
"""A cached overview of the notifications in the system & their severity."""
|
||||
overview: NotificationOverview!
|
||||
list(filter: NotificationFilter!): [Notification!]!
|
||||
}
|
||||
|
||||
input NotificationFilter {
|
||||
importance: NotificationImportance
|
||||
type: NotificationType!
|
||||
offset: Int!
|
||||
limit: Int!
|
||||
}
|
||||
|
||||
type Owner {
|
||||
username: String!
|
||||
url: String!
|
||||
@@ -2416,6 +2664,11 @@ type Mutation {
|
||||
"""Marks a notification as archived."""
|
||||
archiveNotification(id: PrefixedID!): Notification!
|
||||
archiveNotifications(ids: [PrefixedID!]!): NotificationOverview!
|
||||
|
||||
"""
|
||||
Creates a notification if an equivalent unread notification does not already exist.
|
||||
"""
|
||||
notifyIfUnique(input: NotificationData!): Notification
|
||||
archiveAll(importance: NotificationImportance): NotificationOverview!
|
||||
|
||||
"""Marks a notification as unread."""
|
||||
@@ -2430,11 +2683,22 @@ type Mutation {
|
||||
vm: VmMutations!
|
||||
parityCheck: ParityCheckMutations!
|
||||
apiKey: ApiKeyMutations!
|
||||
customization: CustomizationMutations!
|
||||
rclone: RCloneMutations!
|
||||
createDockerFolder(name: String!, parentId: String, childrenIds: [String!]): ResolvedOrganizerV1!
|
||||
setDockerFolderChildren(folderId: String, childrenIds: [String!]!): ResolvedOrganizerV1!
|
||||
deleteDockerEntries(entryIds: [String!]!): ResolvedOrganizerV1!
|
||||
moveDockerEntriesToFolder(sourceEntryIds: [String!]!, destinationFolderId: String!): ResolvedOrganizerV1!
|
||||
moveDockerItemsToPosition(sourceEntryIds: [String!]!, destinationFolderId: String!, position: Float!): ResolvedOrganizerV1!
|
||||
renameDockerFolder(folderId: String!, newName: String!): ResolvedOrganizerV1!
|
||||
createDockerFolderWithItems(name: String!, parentId: String, sourceEntryIds: [String!], position: Float): ResolvedOrganizerV1!
|
||||
updateDockerViewPreferences(viewId: String = "default", prefs: JSON!): ResolvedOrganizerV1!
|
||||
syncDockerTemplatePaths: DockerTemplateSyncResult!
|
||||
|
||||
"""
|
||||
Reset Docker template mappings to defaults. Use this to recover from corrupted state.
|
||||
"""
|
||||
resetDockerTemplateMappings: Boolean!
|
||||
refreshDockerDigests: Boolean!
|
||||
|
||||
"""Initiates a flash drive backup using a configured remote."""
|
||||
@@ -2636,12 +2900,15 @@ input AccessUrlInput {
|
||||
type Subscription {
|
||||
notificationAdded: Notification!
|
||||
notificationsOverview: NotificationOverview!
|
||||
notificationsWarningsAndAlerts: [Notification!]!
|
||||
ownerSubscription: Owner!
|
||||
serversSubscription: Server!
|
||||
parityHistorySubscription: ParityCheck!
|
||||
arraySubscription: UnraidArray!
|
||||
dockerContainerStats: DockerContainerStats!
|
||||
logFile(path: String!): LogFileContent!
|
||||
systemMetricsCpu: CpuUtilization!
|
||||
systemMetricsCpuTelemetry: CpuPackages!
|
||||
systemMetricsMemory: MemoryUtilization!
|
||||
upsUpdates: UPSDevice!
|
||||
}
|
||||
@@ -12,8 +12,13 @@ default:
|
||||
@deploy remote:
|
||||
./scripts/deploy-dev.sh {{remote}}
|
||||
|
||||
# watches typescript files and restarts dev server on changes
|
||||
@watch:
|
||||
watchexec -e ts -r -- pnpm dev
|
||||
|
||||
alias b := build
|
||||
alias d := deploy
|
||||
alias w := watch
|
||||
|
||||
sync-env server:
|
||||
rsync -avz --progress --stats -e ssh .env* root@{{server}}:/usr/local/unraid-api
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@unraid/api",
|
||||
"version": "4.25.0",
|
||||
"version": "4.29.2",
|
||||
"main": "src/cli/index.ts",
|
||||
"type": "module",
|
||||
"corepack": {
|
||||
@@ -30,6 +30,8 @@
|
||||
"// GraphQL Codegen": "",
|
||||
"codegen": "graphql-codegen --config codegen.ts",
|
||||
"codegen:watch": "graphql-codegen --config codegen.ts --watch",
|
||||
"// Internationalization": "",
|
||||
"i18n:extract": "node ./scripts/extract-translations.mjs",
|
||||
"// Code Quality": "",
|
||||
"lint": "eslint --config .eslintrc.ts src/",
|
||||
"lint:fix": "eslint --fix --config .eslintrc.ts src/",
|
||||
@@ -102,6 +104,7 @@
|
||||
"escape-html": "1.0.3",
|
||||
"execa": "9.6.0",
|
||||
"exit-hook": "4.0.0",
|
||||
"fast-xml-parser": "^5.3.0",
|
||||
"fastify": "5.5.0",
|
||||
"filenamify": "7.0.0",
|
||||
"fs-extra": "11.3.1",
|
||||
@@ -114,6 +117,7 @@
|
||||
"graphql-subscriptions": "3.0.0",
|
||||
"graphql-tag": "2.12.6",
|
||||
"graphql-ws": "6.0.6",
|
||||
"html-entities": "^2.6.0",
|
||||
"ini": "5.0.0",
|
||||
"ip": "2.0.1",
|
||||
"jose": "6.0.13",
|
||||
|
||||
@@ -7,7 +7,7 @@ import { exit } from 'process';
|
||||
import type { PackageJson } from 'type-fest';
|
||||
import { $, cd } from 'zx';
|
||||
|
||||
import { getDeploymentVersion } from './get-deployment-version.js';
|
||||
import { getDeploymentVersion } from '@app/../scripts/get-deployment-version.js';
|
||||
|
||||
type ApiPackageJson = PackageJson & {
|
||||
version: string;
|
||||
@@ -83,6 +83,10 @@ try {
|
||||
if (parsedPackageJson.dependencies?.[dep]) {
|
||||
delete parsedPackageJson.dependencies[dep];
|
||||
}
|
||||
// Also strip from peerDependencies (npm doesn't understand workspace: protocol)
|
||||
if (parsedPackageJson.peerDependencies?.[dep]) {
|
||||
delete parsedPackageJson.peerDependencies[dep];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
162
api/scripts/extract-translations.mjs
Normal file
162
api/scripts/extract-translations.mjs
Normal file
@@ -0,0 +1,162 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
import { readFile, writeFile } from 'node:fs/promises';
|
||||
import path from 'node:path';
|
||||
import { glob } from 'glob';
|
||||
import ts from 'typescript';
|
||||
|
||||
const projectRoot = process.cwd();
|
||||
const sourcePatterns = 'src/**/*.{ts,js}';
|
||||
const ignorePatterns = [
|
||||
'**/__tests__/**',
|
||||
'**/__test__/**',
|
||||
'**/*.spec.ts',
|
||||
'**/*.spec.js',
|
||||
'**/*.test.ts',
|
||||
'**/*.test.js',
|
||||
];
|
||||
|
||||
const englishLocaleFile = path.resolve(projectRoot, 'src/i18n/en.json');
|
||||
|
||||
const identifierTargets = new Set(['t', 'translate']);
|
||||
const propertyTargets = new Set([
|
||||
'i18n.t',
|
||||
'i18n.translate',
|
||||
'ctx.t',
|
||||
'this.translate',
|
||||
'this.i18n.translate',
|
||||
'this.i18n.t',
|
||||
]);
|
||||
|
||||
function getPropertyChain(node) {
|
||||
if (ts.isIdentifier(node)) {
|
||||
return node.text;
|
||||
}
|
||||
if (ts.isPropertyAccessExpression(node)) {
|
||||
const left = getPropertyChain(node.expression);
|
||||
if (!left) return undefined;
|
||||
return `${left}.${node.name.text}`;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function extractLiteral(node) {
|
||||
if (ts.isStringLiteralLike(node)) {
|
||||
return node.text;
|
||||
}
|
||||
if (ts.isNoSubstitutionTemplateLiteral(node)) {
|
||||
return node.text;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function collectKeysFromSource(sourceFile) {
|
||||
const keys = new Set();
|
||||
|
||||
function visit(node) {
|
||||
if (ts.isCallExpression(node)) {
|
||||
const expr = node.expression;
|
||||
let matches = false;
|
||||
|
||||
if (ts.isIdentifier(expr) && identifierTargets.has(expr.text)) {
|
||||
matches = true;
|
||||
} else if (ts.isPropertyAccessExpression(expr)) {
|
||||
const chain = getPropertyChain(expr);
|
||||
if (chain && propertyTargets.has(chain)) {
|
||||
matches = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (matches) {
|
||||
const [firstArg] = node.arguments;
|
||||
if (firstArg) {
|
||||
const literal = extractLiteral(firstArg);
|
||||
if (literal) {
|
||||
keys.add(literal);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ts.forEachChild(node, visit);
|
||||
}
|
||||
|
||||
visit(sourceFile);
|
||||
return keys;
|
||||
}
|
||||
|
||||
async function loadEnglishCatalog() {
|
||||
try {
|
||||
const raw = await readFile(englishLocaleFile, 'utf8');
|
||||
const parsed = raw.trim() ? JSON.parse(raw) : {};
|
||||
if (typeof parsed !== 'object' || Array.isArray(parsed)) {
|
||||
throw new Error('English locale file must contain a JSON object.');
|
||||
}
|
||||
return parsed;
|
||||
} catch (error) {
|
||||
if (error && error.code === 'ENOENT') {
|
||||
return {};
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async function ensureEnglishCatalog(keys) {
|
||||
const existingCatalog = await loadEnglishCatalog();
|
||||
const existingKeys = new Set(Object.keys(existingCatalog));
|
||||
|
||||
let added = 0;
|
||||
const combinedKeys = new Set([...existingKeys, ...keys]);
|
||||
const sortedKeys = Array.from(combinedKeys).sort((a, b) => a.localeCompare(b));
|
||||
const nextCatalog = {};
|
||||
|
||||
for (const key of sortedKeys) {
|
||||
if (Object.prototype.hasOwnProperty.call(existingCatalog, key)) {
|
||||
nextCatalog[key] = existingCatalog[key];
|
||||
} else {
|
||||
nextCatalog[key] = key;
|
||||
added += 1;
|
||||
}
|
||||
}
|
||||
|
||||
const nextJson = `${JSON.stringify(nextCatalog, null, 2)}\n`;
|
||||
const existingJson = JSON.stringify(existingCatalog, null, 2) + '\n';
|
||||
|
||||
if (nextJson !== existingJson) {
|
||||
await writeFile(englishLocaleFile, nextJson, 'utf8');
|
||||
}
|
||||
|
||||
return added;
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const files = await glob(sourcePatterns, {
|
||||
cwd: projectRoot,
|
||||
ignore: ignorePatterns,
|
||||
absolute: true,
|
||||
});
|
||||
|
||||
const collectedKeys = new Set();
|
||||
|
||||
await Promise.all(
|
||||
files.map(async (file) => {
|
||||
const content = await readFile(file, 'utf8');
|
||||
const sourceFile = ts.createSourceFile(file, content, ts.ScriptTarget.Latest, true);
|
||||
const keys = collectKeysFromSource(sourceFile);
|
||||
keys.forEach((key) => collectedKeys.add(key));
|
||||
}),
|
||||
);
|
||||
|
||||
const added = await ensureEnglishCatalog(collectedKeys);
|
||||
|
||||
if (added === 0) {
|
||||
console.log('[i18n] No new backend translation keys detected.');
|
||||
} else {
|
||||
console.log(`[i18n] Added ${added} key(s) to src/i18n/en.json.`);
|
||||
}
|
||||
}
|
||||
|
||||
main().catch((error) => {
|
||||
console.error('[i18n] Failed to extract backend translations.', error);
|
||||
process.exitCode = 1;
|
||||
});
|
||||
@@ -4,23 +4,18 @@ import {
|
||||
getBannerPathIfPresent,
|
||||
getCasePathIfPresent,
|
||||
} from '@app/core/utils/images/image-file-helpers.js';
|
||||
import { loadDynamixConfigFile } from '@app/store/actions/load-dynamix-config-file.js';
|
||||
import { store } from '@app/store/index.js';
|
||||
import { loadDynamixConfig } from '@app/store/index.js';
|
||||
|
||||
test('get case path returns expected result', async () => {
|
||||
await expect(getCasePathIfPresent()).resolves.toContain('/dev/dynamix/case-model.png');
|
||||
});
|
||||
|
||||
test('get banner path returns null (state unloaded)', async () => {
|
||||
await expect(getBannerPathIfPresent()).resolves.toMatchInlineSnapshot('null');
|
||||
});
|
||||
|
||||
test('get banner path returns the banner (state loaded)', async () => {
|
||||
await store.dispatch(loadDynamixConfigFile()).unwrap();
|
||||
loadDynamixConfig();
|
||||
await expect(getBannerPathIfPresent()).resolves.toContain('/dev/dynamix/banner.png');
|
||||
});
|
||||
|
||||
test('get banner path returns null when no banner (state loaded)', async () => {
|
||||
await store.dispatch(loadDynamixConfigFile()).unwrap();
|
||||
loadDynamixConfig();
|
||||
await expect(getBannerPathIfPresent('notabanner.png')).resolves.toMatchInlineSnapshot('null');
|
||||
});
|
||||
|
||||
@@ -6,6 +6,7 @@ exports[`Returns paths 1`] = `
|
||||
"unraid-api-base",
|
||||
"unraid-data",
|
||||
"docker-autostart",
|
||||
"docker-userprefs",
|
||||
"docker-socket",
|
||||
"rclone-socket",
|
||||
"parity-checks",
|
||||
|
||||
@@ -11,6 +11,7 @@ test('Returns paths', async () => {
|
||||
'unraid-api-base': '/usr/local/unraid-api/',
|
||||
'unraid-data': expect.stringContaining('api/dev/data'),
|
||||
'docker-autostart': '/var/lib/docker/unraid-autostart',
|
||||
'docker-userprefs': '/boot/config/plugins/dockerMan/userprefs.cfg',
|
||||
'docker-socket': '/var/run/docker.sock',
|
||||
'parity-checks': expect.stringContaining('api/dev/states/parity-checks.log'),
|
||||
htpasswd: '/etc/nginx/htpasswd',
|
||||
|
||||
151
api/src/__test__/store/watch/registration-watch.test.ts
Normal file
151
api/src/__test__/store/watch/registration-watch.test.ts
Normal file
@@ -0,0 +1,151 @@
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { StateFileKey } from '@app/store/types.js';
|
||||
import { RegistrationType } from '@app/unraid-api/graph/resolvers/registration/registration.model.js';
|
||||
|
||||
// Mock the store module
|
||||
vi.mock('@app/store/index.js', () => ({
|
||||
store: {
|
||||
dispatch: vi.fn(),
|
||||
},
|
||||
getters: {
|
||||
emhttp: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
// Mock the emhttp module
|
||||
vi.mock('@app/store/modules/emhttp.js', () => ({
|
||||
loadSingleStateFile: vi.fn((key) => ({ type: 'emhttp/load-single-state-file', payload: key })),
|
||||
}));
|
||||
|
||||
// Mock the registration module
|
||||
vi.mock('@app/store/modules/registration.js', () => ({
|
||||
loadRegistrationKey: vi.fn(() => ({ type: 'registration/load-registration-key' })),
|
||||
}));
|
||||
|
||||
// Mock the logger
|
||||
vi.mock('@app/core/log.js', () => ({
|
||||
keyServerLogger: {
|
||||
info: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
describe('reloadVarIniWithRetry', () => {
|
||||
let store: { dispatch: ReturnType<typeof vi.fn> };
|
||||
let getters: { emhttp: ReturnType<typeof vi.fn> };
|
||||
let loadSingleStateFile: ReturnType<typeof vi.fn>;
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.useFakeTimers();
|
||||
|
||||
const storeModule = await import('@app/store/index.js');
|
||||
const emhttpModule = await import('@app/store/modules/emhttp.js');
|
||||
|
||||
store = storeModule.store as unknown as typeof store;
|
||||
getters = storeModule.getters as unknown as typeof getters;
|
||||
loadSingleStateFile = emhttpModule.loadSingleStateFile as unknown as typeof loadSingleStateFile;
|
||||
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.useRealTimers();
|
||||
});
|
||||
|
||||
it('returns early when registration state changes on first retry', async () => {
|
||||
// Initial state is TRIAL
|
||||
getters.emhttp
|
||||
.mockReturnValueOnce({ var: { regTy: RegistrationType.TRIAL } }) // First call (beforeState)
|
||||
.mockReturnValueOnce({ var: { regTy: RegistrationType.UNLEASHED } }); // After first reload
|
||||
|
||||
const { reloadVarIniWithRetry } = await import('@app/store/watch/registration-watch.js');
|
||||
|
||||
const promise = reloadVarIniWithRetry();
|
||||
|
||||
// Advance past the first delay (500ms)
|
||||
await vi.advanceTimersByTimeAsync(500);
|
||||
await promise;
|
||||
|
||||
// Should only dispatch once since state changed
|
||||
expect(store.dispatch).toHaveBeenCalledTimes(1);
|
||||
expect(loadSingleStateFile).toHaveBeenCalledWith(StateFileKey.var);
|
||||
});
|
||||
|
||||
it('retries up to maxRetries when state does not change', async () => {
|
||||
// State never changes
|
||||
getters.emhttp.mockReturnValue({ var: { regTy: RegistrationType.TRIAL } });
|
||||
|
||||
const { reloadVarIniWithRetry } = await import('@app/store/watch/registration-watch.js');
|
||||
|
||||
const promise = reloadVarIniWithRetry(3);
|
||||
|
||||
// Advance through all retries: 500ms, 1000ms, 2000ms
|
||||
await vi.advanceTimersByTimeAsync(500);
|
||||
await vi.advanceTimersByTimeAsync(1000);
|
||||
await vi.advanceTimersByTimeAsync(2000);
|
||||
await promise;
|
||||
|
||||
// Should dispatch 3 times (maxRetries)
|
||||
expect(store.dispatch).toHaveBeenCalledTimes(3);
|
||||
});
|
||||
|
||||
it('stops retrying when state changes on second attempt', async () => {
|
||||
getters.emhttp
|
||||
.mockReturnValueOnce({ var: { regTy: RegistrationType.TRIAL } }) // beforeState
|
||||
.mockReturnValueOnce({ var: { regTy: RegistrationType.TRIAL } }) // After first reload (no change)
|
||||
.mockReturnValueOnce({ var: { regTy: RegistrationType.UNLEASHED } }); // After second reload (changed!)
|
||||
|
||||
const { reloadVarIniWithRetry } = await import('@app/store/watch/registration-watch.js');
|
||||
|
||||
const promise = reloadVarIniWithRetry(3);
|
||||
|
||||
// First retry
|
||||
await vi.advanceTimersByTimeAsync(500);
|
||||
// Second retry
|
||||
await vi.advanceTimersByTimeAsync(1000);
|
||||
await promise;
|
||||
|
||||
// Should dispatch twice - stopped after state changed
|
||||
expect(store.dispatch).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
it('handles undefined regTy gracefully', async () => {
|
||||
getters.emhttp.mockReturnValue({ var: {} });
|
||||
|
||||
const { reloadVarIniWithRetry } = await import('@app/store/watch/registration-watch.js');
|
||||
|
||||
const promise = reloadVarIniWithRetry(1);
|
||||
|
||||
await vi.advanceTimersByTimeAsync(500);
|
||||
await promise;
|
||||
|
||||
// Should still dispatch even with undefined regTy
|
||||
expect(store.dispatch).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('uses exponential backoff delays', async () => {
|
||||
getters.emhttp.mockReturnValue({ var: { regTy: RegistrationType.TRIAL } });
|
||||
|
||||
const { reloadVarIniWithRetry } = await import('@app/store/watch/registration-watch.js');
|
||||
|
||||
const promise = reloadVarIniWithRetry(3);
|
||||
|
||||
// At 0ms, no dispatch yet
|
||||
expect(store.dispatch).toHaveBeenCalledTimes(0);
|
||||
|
||||
// At 500ms, first dispatch
|
||||
await vi.advanceTimersByTimeAsync(500);
|
||||
expect(store.dispatch).toHaveBeenCalledTimes(1);
|
||||
|
||||
// At 1500ms (500 + 1000), second dispatch
|
||||
await vi.advanceTimersByTimeAsync(1000);
|
||||
expect(store.dispatch).toHaveBeenCalledTimes(2);
|
||||
|
||||
// At 3500ms (500 + 1000 + 2000), third dispatch
|
||||
await vi.advanceTimersByTimeAsync(2000);
|
||||
expect(store.dispatch).toHaveBeenCalledTimes(3);
|
||||
|
||||
await promise;
|
||||
});
|
||||
});
|
||||
234
api/src/common/compare-semver-version.spec.ts
Normal file
234
api/src/common/compare-semver-version.spec.ts
Normal file
@@ -0,0 +1,234 @@
|
||||
import { eq, gt, gte, lt, lte, parse } from 'semver';
|
||||
import { describe, expect, it } from 'vitest';
|
||||
|
||||
import { compareVersions } from '@app/common/compare-semver-version.js';
|
||||
|
||||
describe('compareVersions', () => {
|
||||
describe('basic comparisons', () => {
|
||||
it('should return true when current version is greater than compared (gte)', () => {
|
||||
const current = parse('7.3.0')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, gte)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return true when current version equals compared (gte)', () => {
|
||||
const current = parse('7.2.0')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, gte)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false when current version is less than compared (gte)', () => {
|
||||
const current = parse('7.1.0')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, gte)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return true when current version is less than compared (lte)', () => {
|
||||
const current = parse('7.1.0')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, lte)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return true when current version equals compared (lte)', () => {
|
||||
const current = parse('7.2.0')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, lte)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false when current version is greater than compared (lte)', () => {
|
||||
const current = parse('7.3.0')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, lte)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return true when current version is greater than compared (gt)', () => {
|
||||
const current = parse('7.3.0')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, gt)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false when current version equals compared (gt)', () => {
|
||||
const current = parse('7.2.0')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, gt)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return true when current version is less than compared (lt)', () => {
|
||||
const current = parse('7.1.0')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, lt)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false when current version equals compared (lt)', () => {
|
||||
const current = parse('7.2.0')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, lt)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return true when versions are equal (eq)', () => {
|
||||
const current = parse('7.2.0')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, eq)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false when versions are not equal (eq)', () => {
|
||||
const current = parse('7.3.0')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, eq)).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('prerelease handling - current has prerelease, compared is stable', () => {
|
||||
it('should return true for gte when current prerelease > stable (same base)', () => {
|
||||
const current = parse('7.2.0-beta.1')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, gte)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return true for gt when current prerelease > stable (same base)', () => {
|
||||
const current = parse('7.2.0-beta.1')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, gt)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false for lte when current prerelease < stable (same base)', () => {
|
||||
const current = parse('7.2.0-beta.1')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, lte)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for lt when current prerelease < stable (same base)', () => {
|
||||
const current = parse('7.2.0-beta.1')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, lt)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for eq when current prerelease != stable (same base)', () => {
|
||||
const current = parse('7.2.0-beta.1')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, eq)).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('prerelease handling - current is stable, compared has prerelease', () => {
|
||||
it('should use normal comparison when current is stable and compared has prerelease', () => {
|
||||
const current = parse('7.2.0')!;
|
||||
const compared = parse('7.2.0-beta.1')!;
|
||||
expect(compareVersions(current, compared, gte)).toBe(true);
|
||||
});
|
||||
|
||||
it('should use normal comparison for lte when current is stable and compared has prerelease', () => {
|
||||
const current = parse('7.2.0')!;
|
||||
const compared = parse('7.2.0-beta.1')!;
|
||||
expect(compareVersions(current, compared, lte)).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('prerelease handling - both have prerelease', () => {
|
||||
it('should use normal comparison when both versions have prerelease', () => {
|
||||
const current = parse('7.2.0-beta.2')!;
|
||||
const compared = parse('7.2.0-beta.1')!;
|
||||
expect(compareVersions(current, compared, gte)).toBe(true);
|
||||
});
|
||||
|
||||
it('should use normal comparison for lte when both have prerelease', () => {
|
||||
const current = parse('7.2.0-beta.1')!;
|
||||
const compared = parse('7.2.0-beta.2')!;
|
||||
expect(compareVersions(current, compared, lte)).toBe(true);
|
||||
});
|
||||
|
||||
it('should use normal comparison when prerelease versions are equal', () => {
|
||||
const current = parse('7.2.0-beta.1')!;
|
||||
const compared = parse('7.2.0-beta.1')!;
|
||||
expect(compareVersions(current, compared, eq)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('prerelease handling - different base versions', () => {
|
||||
it('should use normal comparison when base versions differ (current prerelease)', () => {
|
||||
const current = parse('7.3.0-beta.1')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, gte)).toBe(true);
|
||||
});
|
||||
|
||||
it('should use normal comparison when base versions differ (current prerelease, less)', () => {
|
||||
const current = parse('7.1.0-beta.1')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, gte)).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('includePrerelease flag', () => {
|
||||
it('should apply special prerelease handling when includePrerelease is true', () => {
|
||||
const current = parse('7.2.0-beta.1')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, gte, { includePrerelease: true })).toBe(true);
|
||||
});
|
||||
|
||||
it('should skip special prerelease handling when includePrerelease is false', () => {
|
||||
const current = parse('7.2.0-beta.1')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, gte, { includePrerelease: false })).toBe(false);
|
||||
});
|
||||
|
||||
it('should default to includePrerelease true', () => {
|
||||
const current = parse('7.2.0-beta.1')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, gte)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases', () => {
|
||||
it('should handle patch version differences', () => {
|
||||
const current = parse('7.2.1')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, gte)).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle minor version differences', () => {
|
||||
const current = parse('7.3.0')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, gte)).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle major version differences', () => {
|
||||
const current = parse('8.0.0')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, gte)).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle complex prerelease tags', () => {
|
||||
const current = parse('7.2.0-beta.2.4')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, gte)).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle alpha prerelease tags', () => {
|
||||
const current = parse('7.2.0-alpha.1')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, gte)).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle rc prerelease tags', () => {
|
||||
const current = parse('7.2.0-rc.1')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
expect(compareVersions(current, compared, gte)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('comparison function edge cases', () => {
|
||||
it('should handle custom comparison functions that are not gte/lte/gt/lt', () => {
|
||||
const current = parse('7.2.0-beta.1')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
const customCompare = (a: typeof current, b: typeof compared) => a.compare(b) === 1;
|
||||
expect(compareVersions(current, compared, customCompare)).toBe(false);
|
||||
});
|
||||
|
||||
it('should fall through to normal comparison for unknown functions with prerelease', () => {
|
||||
const current = parse('7.2.0-beta.1')!;
|
||||
const compared = parse('7.2.0')!;
|
||||
const customCompare = () => false;
|
||||
expect(compareVersions(current, compared, customCompare)).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
44
api/src/common/compare-semver-version.ts
Normal file
44
api/src/common/compare-semver-version.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
import type { SemVer } from 'semver';
|
||||
import { gt, gte, lt, lte } from 'semver';
|
||||
|
||||
/**
|
||||
* Shared version comparison logic with special handling for prerelease versions.
|
||||
*
|
||||
* When base versions are equal and current version has a prerelease tag while compared doesn't:
|
||||
* - For gte/gt: prerelease is considered greater than stable (returns true)
|
||||
* - For lte/lt: prerelease is considered less than stable (returns false)
|
||||
* - For eq: prerelease is not equal to stable (returns false)
|
||||
*
|
||||
* @param currentVersion - The current Unraid version (SemVer object)
|
||||
* @param comparedVersion - The version to compare against (SemVer object)
|
||||
* @param compareFn - The comparison function (e.g., gte, lte, lt, gt, eq)
|
||||
* @param includePrerelease - Whether to include special prerelease handling
|
||||
* @returns The result of the comparison
|
||||
*/
|
||||
export const compareVersions = (
|
||||
currentVersion: SemVer,
|
||||
comparedVersion: SemVer,
|
||||
compareFn: (a: SemVer, b: SemVer) => boolean,
|
||||
{ includePrerelease = true }: { includePrerelease?: boolean } = {}
|
||||
): boolean => {
|
||||
if (includePrerelease) {
|
||||
const baseCurrent = `${currentVersion.major}.${currentVersion.minor}.${currentVersion.patch}`;
|
||||
const baseCompared = `${comparedVersion.major}.${comparedVersion.minor}.${comparedVersion.patch}`;
|
||||
|
||||
if (baseCurrent === baseCompared) {
|
||||
const currentHasPrerelease = currentVersion.prerelease.length > 0;
|
||||
const comparedHasPrerelease = comparedVersion.prerelease.length > 0;
|
||||
|
||||
if (currentHasPrerelease && !comparedHasPrerelease) {
|
||||
if (compareFn === gte || compareFn === gt) {
|
||||
return true;
|
||||
}
|
||||
if (compareFn === lte || compareFn === lt) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return compareFn(currentVersion, comparedVersion);
|
||||
};
|
||||
60
api/src/common/get-unraid-version-sync.ts
Normal file
60
api/src/common/get-unraid-version-sync.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
import type { SemVer } from 'semver';
|
||||
import { coerce } from 'semver';
|
||||
|
||||
import { compareVersions } from '@app/common/compare-semver-version.js';
|
||||
import { fileExistsSync } from '@app/core/utils/files/file-exists.js';
|
||||
import { parseConfig } from '@app/core/utils/misc/parse-config.js';
|
||||
|
||||
type UnraidVersionIni = {
|
||||
version?: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* Synchronously reads the Unraid version from /etc/unraid-version
|
||||
* @returns The Unraid version string, or 'unknown' if the file cannot be read
|
||||
*/
|
||||
export const getUnraidVersionSync = (): string => {
|
||||
const versionPath = '/etc/unraid-version';
|
||||
|
||||
if (!fileExistsSync(versionPath)) {
|
||||
return 'unknown';
|
||||
}
|
||||
|
||||
try {
|
||||
const versionIni = parseConfig<UnraidVersionIni>({ filePath: versionPath, type: 'ini' });
|
||||
return versionIni.version || 'unknown';
|
||||
} catch {
|
||||
return 'unknown';
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Compares the Unraid version against a specified version using a comparison function
|
||||
* @param compareFn - The comparison function from semver (e.g., lt, gte, lte, gt, eq)
|
||||
* @param version - The version to compare against (e.g., '7.3.0')
|
||||
* @param options - Options for the comparison
|
||||
* @returns The result of the comparison, or false if the version cannot be determined
|
||||
*/
|
||||
export const compareUnraidVersionSync = (
|
||||
compareFn: (a: SemVer, b: SemVer) => boolean,
|
||||
version: string,
|
||||
{ includePrerelease = true }: { includePrerelease?: boolean } = {}
|
||||
): boolean => {
|
||||
const currentVersion = getUnraidVersionSync();
|
||||
if (currentVersion === 'unknown') {
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
const current = coerce(currentVersion, { includePrerelease });
|
||||
const compared = coerce(version, { includePrerelease });
|
||||
|
||||
if (!current || !compared) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return compareVersions(current, compared, compareFn, { includePrerelease });
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
12
api/src/connect-plugin-cleanup.ts
Normal file
12
api/src/connect-plugin-cleanup.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
import { existsSync } from 'node:fs';
|
||||
|
||||
/**
|
||||
* Local filesystem and env checks stay synchronous so we can branch at module load.
|
||||
* @returns True if the Connect Unraid plugin is installed, false otherwise.
|
||||
*/
|
||||
export const isConnectPluginInstalled = () => {
|
||||
if (process.env.SKIP_CONNECT_PLUGIN_CHECK === 'true') {
|
||||
return true;
|
||||
}
|
||||
return existsSync('/boot/config/plugins/dynamix.unraid.net.plg');
|
||||
};
|
||||
@@ -1,7 +1,7 @@
|
||||
import pino from 'pino';
|
||||
import pretty from 'pino-pretty';
|
||||
|
||||
import { API_VERSION, LOG_LEVEL, LOG_TYPE, SUPPRESS_LOGS } from '@app/environment.js';
|
||||
import { API_VERSION, LOG_LEVEL, LOG_TYPE, PATHS_LOGS_FILE, SUPPRESS_LOGS } from '@app/environment.js';
|
||||
|
||||
export const levels = ['trace', 'debug', 'info', 'warn', 'error', 'fatal'] as const;
|
||||
|
||||
@@ -15,18 +15,24 @@ const nullDestination = pino.destination({
|
||||
},
|
||||
});
|
||||
|
||||
const LOG_TRANSPORT = process.env.LOG_TRANSPORT ?? 'file';
|
||||
const useConsole = LOG_TRANSPORT === 'console';
|
||||
|
||||
export const logDestination =
|
||||
process.env.SUPPRESS_LOGS === 'true' ? nullDestination : pino.destination();
|
||||
// Since PM2 captures stdout and writes to the log file, we should not colorize stdout
|
||||
// to avoid ANSI escape codes in the log file
|
||||
process.env.SUPPRESS_LOGS === 'true'
|
||||
? nullDestination
|
||||
: useConsole
|
||||
? pino.destination(1) // stdout
|
||||
: pino.destination({ dest: PATHS_LOGS_FILE, mkdir: true });
|
||||
|
||||
const stream = SUPPRESS_LOGS
|
||||
? nullDestination
|
||||
: LOG_TYPE === 'pretty'
|
||||
? pretty({
|
||||
singleLine: true,
|
||||
hideObject: false,
|
||||
colorize: false, // No colors since PM2 writes stdout to file
|
||||
colorizeObjects: false,
|
||||
colorize: useConsole, // Enable colors when outputting to console
|
||||
colorizeObjects: useConsole,
|
||||
levelFirst: false,
|
||||
ignore: 'hostname,pid',
|
||||
destination: logDestination,
|
||||
@@ -34,10 +40,10 @@ const stream = SUPPRESS_LOGS
|
||||
customPrettifiers: {
|
||||
time: (timestamp: string | object) => `[${timestamp}`,
|
||||
level: (_logLevel: string | object, _key: string, log: any, extras: any) => {
|
||||
// Use label instead of labelColorized for non-colored output
|
||||
const { label } = extras;
|
||||
const { label, labelColorized } = extras;
|
||||
const context = log.context || log.logger || 'app';
|
||||
return `${label} ${context}]`;
|
||||
// Use colorized label when outputting to console
|
||||
return `${useConsole ? labelColorized : label} ${context}]`;
|
||||
},
|
||||
},
|
||||
messageFormat: (log: any, messageKey: string) => {
|
||||
|
||||
@@ -93,6 +93,9 @@ interface Notify {
|
||||
system: string;
|
||||
version: string;
|
||||
docker_update: string;
|
||||
expand?: string | boolean;
|
||||
duration?: string | number;
|
||||
max?: string | number;
|
||||
}
|
||||
|
||||
interface Ssmtp {
|
||||
|
||||
66
api/src/core/utils/__test__/safe-mode.test.ts
Normal file
66
api/src/core/utils/__test__/safe-mode.test.ts
Normal file
@@ -0,0 +1,66 @@
|
||||
import { afterEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { isSafeModeEnabled } from '@app/core/utils/safe-mode.js';
|
||||
import { store } from '@app/store/index.js';
|
||||
import * as stateFileLoader from '@app/store/services/state-file-loader.js';
|
||||
|
||||
describe('isSafeModeEnabled', () => {
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
it('returns the safe mode flag already present in the store', () => {
|
||||
const baseState = store.getState();
|
||||
vi.spyOn(store, 'getState').mockReturnValue({
|
||||
...baseState,
|
||||
emhttp: {
|
||||
...baseState.emhttp,
|
||||
var: {
|
||||
...(baseState.emhttp?.var ?? {}),
|
||||
safeMode: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
const loaderSpy = vi.spyOn(stateFileLoader, 'loadStateFileSync');
|
||||
|
||||
expect(isSafeModeEnabled()).toBe(true);
|
||||
expect(loaderSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('falls back to the synchronous loader when store state is missing', () => {
|
||||
const baseState = store.getState();
|
||||
vi.spyOn(store, 'getState').mockReturnValue({
|
||||
...baseState,
|
||||
emhttp: {
|
||||
...baseState.emhttp,
|
||||
var: {
|
||||
...(baseState.emhttp?.var ?? {}),
|
||||
safeMode: undefined as unknown as boolean,
|
||||
} as typeof baseState.emhttp.var,
|
||||
} as typeof baseState.emhttp,
|
||||
} as typeof baseState);
|
||||
vi.spyOn(stateFileLoader, 'loadStateFileSync').mockReturnValue({
|
||||
...(baseState.emhttp?.var ?? {}),
|
||||
safeMode: true,
|
||||
} as any);
|
||||
|
||||
expect(isSafeModeEnabled()).toBe(true);
|
||||
});
|
||||
|
||||
it('defaults to false when loader cannot provide state', () => {
|
||||
const baseState = store.getState();
|
||||
vi.spyOn(store, 'getState').mockReturnValue({
|
||||
...baseState,
|
||||
emhttp: {
|
||||
...baseState.emhttp,
|
||||
var: {
|
||||
...(baseState.emhttp?.var ?? {}),
|
||||
safeMode: undefined as unknown as boolean,
|
||||
} as typeof baseState.emhttp.var,
|
||||
} as typeof baseState.emhttp,
|
||||
} as typeof baseState);
|
||||
vi.spyOn(stateFileLoader, 'loadStateFileSync').mockReturnValue(null);
|
||||
|
||||
expect(isSafeModeEnabled()).toBe(false);
|
||||
});
|
||||
});
|
||||
231
api/src/core/utils/misc/__test__/timeout-budget.test.ts
Normal file
231
api/src/core/utils/misc/__test__/timeout-budget.test.ts
Normal file
@@ -0,0 +1,231 @@
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { TimeoutBudget } from '@app/core/utils/misc/timeout-budget.js';
|
||||
|
||||
describe('TimeoutBudget', () => {
|
||||
beforeEach(() => {
|
||||
vi.useFakeTimers();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.useRealTimers();
|
||||
});
|
||||
|
||||
describe('constructor', () => {
|
||||
it('initializes with the given budget', () => {
|
||||
const budget = new TimeoutBudget(10000);
|
||||
expect(budget.remaining()).toBe(10000);
|
||||
expect(budget.elapsed()).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('remaining', () => {
|
||||
it('returns full budget immediately after construction', () => {
|
||||
const budget = new TimeoutBudget(5000);
|
||||
expect(budget.remaining()).toBe(5000);
|
||||
});
|
||||
|
||||
it('decreases as time passes', () => {
|
||||
const budget = new TimeoutBudget(5000);
|
||||
|
||||
vi.advanceTimersByTime(1000);
|
||||
expect(budget.remaining()).toBe(4000);
|
||||
|
||||
vi.advanceTimersByTime(2000);
|
||||
expect(budget.remaining()).toBe(2000);
|
||||
});
|
||||
|
||||
it('never returns negative values', () => {
|
||||
const budget = new TimeoutBudget(1000);
|
||||
|
||||
vi.advanceTimersByTime(5000); // Well past the budget
|
||||
expect(budget.remaining()).toBe(0);
|
||||
});
|
||||
|
||||
it('returns zero when budget is exactly exhausted', () => {
|
||||
const budget = new TimeoutBudget(1000);
|
||||
|
||||
vi.advanceTimersByTime(1000);
|
||||
expect(budget.remaining()).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('elapsed', () => {
|
||||
it('returns zero immediately after construction', () => {
|
||||
const budget = new TimeoutBudget(5000);
|
||||
expect(budget.elapsed()).toBe(0);
|
||||
});
|
||||
|
||||
it('increases as time passes', () => {
|
||||
const budget = new TimeoutBudget(5000);
|
||||
|
||||
vi.advanceTimersByTime(1000);
|
||||
expect(budget.elapsed()).toBe(1000);
|
||||
|
||||
vi.advanceTimersByTime(500);
|
||||
expect(budget.elapsed()).toBe(1500);
|
||||
});
|
||||
|
||||
it('continues increasing past the budget limit', () => {
|
||||
const budget = new TimeoutBudget(1000);
|
||||
|
||||
vi.advanceTimersByTime(2000);
|
||||
expect(budget.elapsed()).toBe(2000);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getTimeout', () => {
|
||||
it('returns maxMs when plenty of budget remains', () => {
|
||||
const budget = new TimeoutBudget(10000);
|
||||
expect(budget.getTimeout(2000)).toBe(2000);
|
||||
});
|
||||
|
||||
it('returns maxMs when budget minus reserve is sufficient', () => {
|
||||
const budget = new TimeoutBudget(10000);
|
||||
expect(budget.getTimeout(2000, 5000)).toBe(2000);
|
||||
});
|
||||
|
||||
it('caps timeout to available budget minus reserve', () => {
|
||||
const budget = new TimeoutBudget(10000);
|
||||
vi.advanceTimersByTime(5000); // 5000ms remaining
|
||||
|
||||
// Want 2000ms but reserve 4000ms, only 1000ms available
|
||||
expect(budget.getTimeout(2000, 4000)).toBe(1000);
|
||||
});
|
||||
|
||||
it('caps timeout to remaining budget when no reserve', () => {
|
||||
const budget = new TimeoutBudget(1000);
|
||||
vi.advanceTimersByTime(800); // 200ms remaining
|
||||
|
||||
expect(budget.getTimeout(500)).toBe(200);
|
||||
});
|
||||
|
||||
it('returns minimum of 100ms even when budget is exhausted', () => {
|
||||
const budget = new TimeoutBudget(1000);
|
||||
vi.advanceTimersByTime(2000); // Budget exhausted
|
||||
|
||||
expect(budget.getTimeout(500)).toBe(100);
|
||||
});
|
||||
|
||||
it('returns minimum of 100ms when reserve exceeds remaining', () => {
|
||||
const budget = new TimeoutBudget(5000);
|
||||
vi.advanceTimersByTime(4000); // 1000ms remaining
|
||||
|
||||
// Reserve 2000ms but only 1000ms remaining
|
||||
expect(budget.getTimeout(500, 2000)).toBe(100);
|
||||
});
|
||||
|
||||
it('uses default reserve of 0 when not specified', () => {
|
||||
const budget = new TimeoutBudget(1000);
|
||||
vi.advanceTimersByTime(500); // 500ms remaining
|
||||
|
||||
expect(budget.getTimeout(1000)).toBe(500); // Capped to remaining
|
||||
});
|
||||
});
|
||||
|
||||
describe('hasTimeFor', () => {
|
||||
it('returns true when enough time remains', () => {
|
||||
const budget = new TimeoutBudget(5000);
|
||||
expect(budget.hasTimeFor(3000)).toBe(true);
|
||||
});
|
||||
|
||||
it('returns true when exactly enough time remains', () => {
|
||||
const budget = new TimeoutBudget(5000);
|
||||
expect(budget.hasTimeFor(5000)).toBe(true);
|
||||
});
|
||||
|
||||
it('returns false when not enough time remains', () => {
|
||||
const budget = new TimeoutBudget(5000);
|
||||
expect(budget.hasTimeFor(6000)).toBe(false);
|
||||
});
|
||||
|
||||
it('accounts for elapsed time', () => {
|
||||
const budget = new TimeoutBudget(5000);
|
||||
vi.advanceTimersByTime(3000); // 2000ms remaining
|
||||
|
||||
expect(budget.hasTimeFor(2000)).toBe(true);
|
||||
expect(budget.hasTimeFor(3000)).toBe(false);
|
||||
});
|
||||
|
||||
it('returns false when budget is exhausted', () => {
|
||||
const budget = new TimeoutBudget(1000);
|
||||
vi.advanceTimersByTime(2000);
|
||||
|
||||
expect(budget.hasTimeFor(1)).toBe(false);
|
||||
});
|
||||
|
||||
it('returns true for zero required time', () => {
|
||||
const budget = new TimeoutBudget(1000);
|
||||
vi.advanceTimersByTime(2000); // Budget exhausted
|
||||
|
||||
expect(budget.hasTimeFor(0)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('integration scenarios', () => {
|
||||
it('simulates a typical startup sequence', () => {
|
||||
const budget = new TimeoutBudget(13000); // 13 second budget
|
||||
const BOOTSTRAP_RESERVE = 8000;
|
||||
const MAX_OP_TIMEOUT = 2000;
|
||||
|
||||
// First operation - should get full 2000ms
|
||||
const op1Timeout = budget.getTimeout(MAX_OP_TIMEOUT, BOOTSTRAP_RESERVE);
|
||||
expect(op1Timeout).toBe(2000);
|
||||
|
||||
// Simulate operation taking 500ms
|
||||
vi.advanceTimersByTime(500);
|
||||
|
||||
// Second operation - still have plenty of budget
|
||||
const op2Timeout = budget.getTimeout(MAX_OP_TIMEOUT, BOOTSTRAP_RESERVE);
|
||||
expect(op2Timeout).toBe(2000);
|
||||
|
||||
// Simulate operation taking 1000ms
|
||||
vi.advanceTimersByTime(1000);
|
||||
|
||||
// Third operation
|
||||
const op3Timeout = budget.getTimeout(MAX_OP_TIMEOUT, BOOTSTRAP_RESERVE);
|
||||
expect(op3Timeout).toBe(2000);
|
||||
|
||||
// Simulate slow operation taking 2000ms
|
||||
vi.advanceTimersByTime(2000);
|
||||
|
||||
// Now 3500ms elapsed, 9500ms remaining
|
||||
// After reserve, only 1500ms available - less than max
|
||||
const op4Timeout = budget.getTimeout(MAX_OP_TIMEOUT, BOOTSTRAP_RESERVE);
|
||||
expect(op4Timeout).toBe(1500);
|
||||
|
||||
// Simulate operation completing
|
||||
vi.advanceTimersByTime(1000);
|
||||
|
||||
// Bootstrap phase - use all remaining time
|
||||
const bootstrapTimeout = budget.remaining();
|
||||
expect(bootstrapTimeout).toBe(8500);
|
||||
expect(budget.hasTimeFor(8000)).toBe(true);
|
||||
});
|
||||
|
||||
it('handles worst-case scenario where all operations timeout', () => {
|
||||
const budget = new TimeoutBudget(13000);
|
||||
const BOOTSTRAP_RESERVE = 8000;
|
||||
const MAX_OP_TIMEOUT = 2000;
|
||||
|
||||
// Each operation times out at its limit
|
||||
// Available for operations: 13000 - 8000 = 5000ms
|
||||
|
||||
// Op 1: gets 2000ms, times out
|
||||
budget.getTimeout(MAX_OP_TIMEOUT, BOOTSTRAP_RESERVE);
|
||||
vi.advanceTimersByTime(2000);
|
||||
|
||||
// Op 2: gets 2000ms, times out
|
||||
budget.getTimeout(MAX_OP_TIMEOUT, BOOTSTRAP_RESERVE);
|
||||
vi.advanceTimersByTime(2000);
|
||||
|
||||
// Op 3: only 1000ms available (5000 - 4000), times out
|
||||
const op3Timeout = budget.getTimeout(MAX_OP_TIMEOUT, BOOTSTRAP_RESERVE);
|
||||
expect(op3Timeout).toBe(1000);
|
||||
vi.advanceTimersByTime(1000);
|
||||
|
||||
// Bootstrap: should still have 8000ms
|
||||
expect(budget.remaining()).toBe(8000);
|
||||
});
|
||||
});
|
||||
});
|
||||
65
api/src/core/utils/misc/__test__/with-timeout.test.ts
Normal file
65
api/src/core/utils/misc/__test__/with-timeout.test.ts
Normal file
@@ -0,0 +1,65 @@
|
||||
import { describe, expect, it } from 'vitest';
|
||||
|
||||
import { withTimeout } from '@app/core/utils/misc/with-timeout.js';
|
||||
|
||||
describe('withTimeout', () => {
|
||||
it('resolves when promise completes before timeout', async () => {
|
||||
const promise = Promise.resolve('success');
|
||||
const result = await withTimeout(promise, 1000, 'testOp');
|
||||
expect(result).toBe('success');
|
||||
});
|
||||
|
||||
it('resolves with correct value for delayed promise within timeout', async () => {
|
||||
const promise = new Promise<number>((resolve) => setTimeout(() => resolve(42), 50));
|
||||
const result = await withTimeout(promise, 1000, 'testOp');
|
||||
expect(result).toBe(42);
|
||||
});
|
||||
|
||||
it('rejects when promise takes longer than timeout', async () => {
|
||||
const promise = new Promise<string>((resolve) => setTimeout(() => resolve('late'), 500));
|
||||
await expect(withTimeout(promise, 50, 'slowOp')).rejects.toThrow('slowOp timed out after 50ms');
|
||||
});
|
||||
|
||||
it('includes operation name in timeout error message', async () => {
|
||||
const promise = new Promise<void>(() => {}); // Never resolves
|
||||
await expect(withTimeout(promise, 10, 'myCustomOperation')).rejects.toThrow(
|
||||
'myCustomOperation timed out after 10ms'
|
||||
);
|
||||
});
|
||||
|
||||
it('propagates rejection from the original promise', async () => {
|
||||
const promise = Promise.reject(new Error('original error'));
|
||||
await expect(withTimeout(promise, 1000, 'testOp')).rejects.toThrow('original error');
|
||||
});
|
||||
|
||||
it('resolves immediately for already-resolved promises', async () => {
|
||||
const promise = Promise.resolve('immediate');
|
||||
const start = Date.now();
|
||||
const result = await withTimeout(promise, 1000, 'testOp');
|
||||
const elapsed = Date.now() - start;
|
||||
|
||||
expect(result).toBe('immediate');
|
||||
expect(elapsed).toBeLessThan(50); // Should be nearly instant
|
||||
});
|
||||
|
||||
it('works with zero timeout (immediately times out for pending promises)', async () => {
|
||||
const promise = new Promise<void>(() => {}); // Never resolves
|
||||
await expect(withTimeout(promise, 0, 'zeroTimeout')).rejects.toThrow(
|
||||
'zeroTimeout timed out after 0ms'
|
||||
);
|
||||
});
|
||||
|
||||
it('preserves the type of the resolved value', async () => {
|
||||
interface TestType {
|
||||
id: number;
|
||||
name: string;
|
||||
}
|
||||
const testObj: TestType = { id: 1, name: 'test' };
|
||||
const promise = Promise.resolve(testObj);
|
||||
|
||||
const result = await withTimeout(promise, 1000, 'testOp');
|
||||
|
||||
expect(result.id).toBe(1);
|
||||
expect(result.name).toBe('test');
|
||||
});
|
||||
});
|
||||
@@ -2,7 +2,7 @@ import { AppError } from '@app/core/errors/app-error.js';
|
||||
import { getters } from '@app/store/index.js';
|
||||
|
||||
interface DockerError extends NodeJS.ErrnoException {
|
||||
address: string;
|
||||
address?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
70
api/src/core/utils/misc/timeout-budget.ts
Normal file
70
api/src/core/utils/misc/timeout-budget.ts
Normal file
@@ -0,0 +1,70 @@
|
||||
/**
|
||||
* Tracks remaining time budget to ensure we don't exceed external timeouts (e.g., PM2's listen_timeout).
|
||||
*
|
||||
* This class helps coordinate multiple async operations by:
|
||||
* - Tracking elapsed time from construction
|
||||
* - Calculating dynamic timeouts based on remaining budget
|
||||
* - Reserving time for critical operations (like server bootstrap)
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const budget = new TimeoutBudget(15000); // 15 second total budget
|
||||
*
|
||||
* // Each operation gets a timeout capped by remaining budget
|
||||
* await withTimeout(loadConfig(), budget.getTimeout(2000, 8000), 'loadConfig');
|
||||
* await withTimeout(loadState(), budget.getTimeout(2000, 8000), 'loadState');
|
||||
*
|
||||
* // Bootstrap gets all remaining time
|
||||
* await withTimeout(bootstrap(), budget.remaining(), 'bootstrap');
|
||||
*
|
||||
* console.log(`Completed in ${budget.elapsed()}ms`);
|
||||
* ```
|
||||
*/
|
||||
export class TimeoutBudget {
|
||||
private startTime: number;
|
||||
private budgetMs: number;
|
||||
|
||||
/**
|
||||
* Creates a new startup budget tracker.
|
||||
* @param budgetMs Total time budget in milliseconds
|
||||
*/
|
||||
constructor(budgetMs: number) {
|
||||
this.startTime = Date.now();
|
||||
this.budgetMs = budgetMs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns remaining time in milliseconds.
|
||||
* Never returns negative values.
|
||||
*/
|
||||
remaining(): number {
|
||||
return Math.max(0, this.budgetMs - (Date.now() - this.startTime));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns elapsed time in milliseconds since construction.
|
||||
*/
|
||||
elapsed(): number {
|
||||
return Date.now() - this.startTime;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns timeout for an operation, capped by remaining budget.
|
||||
*
|
||||
* @param maxMs Maximum timeout for this operation
|
||||
* @param reserveMs Time to reserve for future operations (e.g., server bootstrap)
|
||||
* @returns Timeout in milliseconds (minimum 100ms to avoid instant failures)
|
||||
*/
|
||||
getTimeout(maxMs: number, reserveMs: number = 0): number {
|
||||
const available = this.remaining() - reserveMs;
|
||||
return Math.max(100, Math.min(maxMs, available));
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if there's enough time remaining for an operation.
|
||||
* @param requiredMs Time required in milliseconds
|
||||
*/
|
||||
hasTimeFor(requiredMs: number): boolean {
|
||||
return this.remaining() >= requiredMs;
|
||||
}
|
||||
}
|
||||
25
api/src/core/utils/misc/with-timeout.ts
Normal file
25
api/src/core/utils/misc/with-timeout.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
/**
|
||||
* Wraps a promise with a timeout to prevent hangs.
|
||||
* If the operation takes longer than timeoutMs, it rejects with a timeout error.
|
||||
*
|
||||
* @param promise The promise to wrap with a timeout
|
||||
* @param timeoutMs Maximum time in milliseconds before timing out
|
||||
* @param operationName Name of the operation for the error message
|
||||
* @returns The result of the promise if it completes in time
|
||||
* @throws Error if the operation times out
|
||||
*/
|
||||
export const withTimeout = <T>(
|
||||
promise: Promise<T>,
|
||||
timeoutMs: number,
|
||||
operationName: string
|
||||
): Promise<T> => {
|
||||
return Promise.race([
|
||||
promise,
|
||||
new Promise<never>((_, reject) =>
|
||||
setTimeout(
|
||||
() => reject(new Error(`${operationName} timed out after ${timeoutMs}ms`)),
|
||||
timeoutMs
|
||||
)
|
||||
),
|
||||
]);
|
||||
};
|
||||
19
api/src/core/utils/network.ts
Normal file
19
api/src/core/utils/network.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
import { getters } from '@app/store/index.js';
|
||||
|
||||
/**
|
||||
* Returns the LAN IPv4 address reported by emhttp, if available.
|
||||
*/
|
||||
export function getLanIp(): string {
|
||||
const emhttp = getters.emhttp();
|
||||
const lanFromNetworks = emhttp?.networks?.[0]?.ipaddr?.[0];
|
||||
if (lanFromNetworks) {
|
||||
return lanFromNetworks;
|
||||
}
|
||||
|
||||
const lanFromNginx = emhttp?.nginx?.lanIp;
|
||||
if (lanFromNginx) {
|
||||
return lanFromNginx;
|
||||
}
|
||||
|
||||
return '';
|
||||
}
|
||||
17
api/src/core/utils/safe-mode.ts
Normal file
17
api/src/core/utils/safe-mode.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { store } from '@app/store/index.js';
|
||||
import { loadStateFileSync } from '@app/store/services/state-file-loader.js';
|
||||
import { StateFileKey } from '@app/store/types.js';
|
||||
|
||||
export const isSafeModeEnabled = (): boolean => {
|
||||
const safeModeFromStore = store.getState().emhttp?.var?.safeMode;
|
||||
if (typeof safeModeFromStore === 'boolean') {
|
||||
return safeModeFromStore;
|
||||
}
|
||||
|
||||
const varState = loadStateFileSync(StateFileKey.var);
|
||||
if (varState) {
|
||||
return Boolean(varState.safeMode);
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
@@ -111,5 +111,10 @@ export const PATHS_CONFIG_MODULES =
|
||||
export const PATHS_LOCAL_SESSION_FILE =
|
||||
process.env.PATHS_LOCAL_SESSION_FILE ?? '/var/run/unraid-api/local-session';
|
||||
|
||||
export const PATHS_DOCKER_TEMPLATES = process.env.PATHS_DOCKER_TEMPLATES?.split(',') ?? [
|
||||
'/boot/config/plugins/dockerMan/templates-user',
|
||||
'/boot/config/plugins/dockerMan/templates',
|
||||
];
|
||||
|
||||
/** feature flag for the upcoming docker release */
|
||||
export const ENABLE_NEXT_DOCKER_RELEASE = process.env.ENABLE_NEXT_DOCKER_RELEASE === 'true';
|
||||
|
||||
1
api/src/i18n/ar.json
Normal file
1
api/src/i18n/ar.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/bn.json
Normal file
1
api/src/i18n/bn.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/ca.json
Normal file
1
api/src/i18n/ca.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/cs.json
Normal file
1
api/src/i18n/cs.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/da.json
Normal file
1
api/src/i18n/da.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/de.json
Normal file
1
api/src/i18n/de.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/en.json
Normal file
1
api/src/i18n/en.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/es.json
Normal file
1
api/src/i18n/es.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/fr.json
Normal file
1
api/src/i18n/fr.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/hi.json
Normal file
1
api/src/i18n/hi.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/hr.json
Normal file
1
api/src/i18n/hr.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/hu.json
Normal file
1
api/src/i18n/hu.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/it.json
Normal file
1
api/src/i18n/it.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/ja.json
Normal file
1
api/src/i18n/ja.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/ko.json
Normal file
1
api/src/i18n/ko.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/lv.json
Normal file
1
api/src/i18n/lv.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/nl.json
Normal file
1
api/src/i18n/nl.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/no.json
Normal file
1
api/src/i18n/no.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/pl.json
Normal file
1
api/src/i18n/pl.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/pt.json
Normal file
1
api/src/i18n/pt.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/ro.json
Normal file
1
api/src/i18n/ro.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/ru.json
Normal file
1
api/src/i18n/ru.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/sv.json
Normal file
1
api/src/i18n/sv.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/uk.json
Normal file
1
api/src/i18n/uk.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/zh.json
Normal file
1
api/src/i18n/zh.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
113
api/src/index.ts
113
api/src/index.ts
@@ -15,28 +15,38 @@ import { WebSocket } from 'ws';
|
||||
|
||||
import { logger } from '@app/core/log.js';
|
||||
import { fileExistsSync } from '@app/core/utils/files/file-exists.js';
|
||||
import { TimeoutBudget } from '@app/core/utils/misc/timeout-budget.js';
|
||||
import { withTimeout } from '@app/core/utils/misc/with-timeout.js';
|
||||
import { getServerIdentifier } from '@app/core/utils/server-identifier.js';
|
||||
import { environment, PATHS_CONFIG_MODULES, PORT } from '@app/environment.js';
|
||||
import * as envVars from '@app/environment.js';
|
||||
import { loadDynamixConfigFile } from '@app/store/actions/load-dynamix-config-file.js';
|
||||
import { shutdownApiEvent } from '@app/store/actions/shutdown-api-event.js';
|
||||
import { store } from '@app/store/index.js';
|
||||
import { loadDynamixConfig, store } from '@app/store/index.js';
|
||||
import { startMiddlewareListeners } from '@app/store/listeners/listener-middleware.js';
|
||||
import { loadStateFiles } from '@app/store/modules/emhttp.js';
|
||||
import { loadRegistrationKey } from '@app/store/modules/registration.js';
|
||||
import { setupDynamixConfigWatch } from '@app/store/watch/dynamix-config-watch.js';
|
||||
import { setupRegistrationKeyWatch } from '@app/store/watch/registration-watch.js';
|
||||
import { StateManager } from '@app/store/watch/state-watch.js';
|
||||
|
||||
let server: NestFastifyApplication<RawServerDefault> | null = null;
|
||||
|
||||
// PM2 listen_timeout is 15 seconds (ecosystem.config.json)
|
||||
// We use 13 seconds as our total budget to ensure our timeout triggers before PM2 kills us
|
||||
const TOTAL_STARTUP_BUDGET_MS = 30_000;
|
||||
// Reserve time for the NestJS bootstrap (the most critical and time-consuming operation)
|
||||
const BOOTSTRAP_RESERVED_MS = 20_000;
|
||||
// Maximum time for any single pre-bootstrap operation
|
||||
const MAX_OPERATION_TIMEOUT_MS = 5_000;
|
||||
|
||||
const unlinkUnixPort = () => {
|
||||
if (isNaN(parseInt(PORT, 10))) {
|
||||
if (fileExistsSync(PORT)) unlinkSync(PORT);
|
||||
}
|
||||
};
|
||||
|
||||
export const viteNodeApp = async () => {
|
||||
export const viteNodeApp = async (): Promise<NestFastifyApplication<RawServerDefault>> => {
|
||||
const budget = new TimeoutBudget(TOTAL_STARTUP_BUDGET_MS);
|
||||
|
||||
try {
|
||||
await import('json-bigint-patch');
|
||||
environment.IS_MAIN_PROCESS = true;
|
||||
@@ -44,15 +54,15 @@ export const viteNodeApp = async () => {
|
||||
/**------------------------------------------------------------------------
|
||||
* Attaching getServerIdentifier to globalThis
|
||||
|
||||
* getServerIdentifier is tightly coupled to the deprecated redux store,
|
||||
* getServerIdentifier is tightly coupled to the deprecated redux store,
|
||||
* which we don't want to share with other packages or plugins.
|
||||
*
|
||||
*
|
||||
* At the same time, we need to use it in @unraid/shared as a building block,
|
||||
* where it's used & available outside of NestJS's DI context.
|
||||
*
|
||||
* Attaching to globalThis is a temporary solution to avoid refactoring
|
||||
*
|
||||
* Attaching to globalThis is a temporary solution to avoid refactoring
|
||||
* config sync & management outside of NestJS's DI context.
|
||||
*
|
||||
*
|
||||
* Plugin authors should import getServerIdentifier from @unraid/shared instead,
|
||||
* to avoid breaking changes to their code.
|
||||
*------------------------------------------------------------------------**/
|
||||
@@ -60,7 +70,18 @@ export const viteNodeApp = async () => {
|
||||
logger.info('ENV %o', envVars);
|
||||
logger.info('PATHS %o', store.getState().paths);
|
||||
|
||||
await mkdir(PATHS_CONFIG_MODULES, { recursive: true });
|
||||
// Note: we use logger.info for checkpoints instead of a lower log level
|
||||
// to ensure emission during an unraid server's boot,
|
||||
// where the log level will be set to INFO by default.
|
||||
|
||||
// Create config directory
|
||||
try {
|
||||
await mkdir(PATHS_CONFIG_MODULES, { recursive: true });
|
||||
logger.info('Config directory ready');
|
||||
} catch (error) {
|
||||
logger.error(error, 'Failed to create config directory');
|
||||
throw error;
|
||||
}
|
||||
|
||||
const cacheable = new CacheableLookup();
|
||||
|
||||
@@ -70,32 +91,73 @@ export const viteNodeApp = async () => {
|
||||
cacheable.install(https.globalAgent);
|
||||
|
||||
// Load emhttp state into store
|
||||
await store.dispatch(loadStateFiles());
|
||||
try {
|
||||
const timeout = budget.getTimeout(MAX_OPERATION_TIMEOUT_MS, BOOTSTRAP_RESERVED_MS);
|
||||
await withTimeout(store.dispatch(loadStateFiles()), timeout, 'loadStateFiles');
|
||||
logger.info('Emhttp state loaded');
|
||||
} catch (error) {
|
||||
logger.error(error, 'Failed to load emhttp state files');
|
||||
logger.warn('Continuing with default state');
|
||||
}
|
||||
|
||||
// Load initial registration key into store
|
||||
await store.dispatch(loadRegistrationKey());
|
||||
try {
|
||||
const timeout = budget.getTimeout(MAX_OPERATION_TIMEOUT_MS, BOOTSTRAP_RESERVED_MS);
|
||||
await withTimeout(store.dispatch(loadRegistrationKey()), timeout, 'loadRegistrationKey');
|
||||
logger.info('Registration key loaded');
|
||||
} catch (error) {
|
||||
logger.error(error, 'Failed to load registration key');
|
||||
logger.warn('Continuing without registration key');
|
||||
}
|
||||
|
||||
// Load my dynamix config file into store
|
||||
await store.dispatch(loadDynamixConfigFile());
|
||||
try {
|
||||
loadDynamixConfig();
|
||||
logger.info('Dynamix config loaded');
|
||||
} catch (error) {
|
||||
logger.error(error, 'Failed to load dynamix config');
|
||||
logger.warn('Continuing with default dynamix config');
|
||||
}
|
||||
|
||||
// Start listening to file updates
|
||||
StateManager.getInstance();
|
||||
try {
|
||||
StateManager.getInstance();
|
||||
logger.info('State manager initialized');
|
||||
} catch (error) {
|
||||
logger.error(error, 'Failed to initialize state manager');
|
||||
logger.warn('Continuing without state watching');
|
||||
}
|
||||
|
||||
// Start listening to key file changes
|
||||
setupRegistrationKeyWatch();
|
||||
|
||||
// Start listening to dynamix config file changes
|
||||
setupDynamixConfigWatch();
|
||||
try {
|
||||
setupRegistrationKeyWatch();
|
||||
logger.info('Registration key watch active');
|
||||
} catch (error) {
|
||||
logger.error(error, 'Failed to setup registration key watch');
|
||||
logger.warn('Continuing without key file watching');
|
||||
}
|
||||
|
||||
// If port is unix socket, delete old socket before starting http server
|
||||
unlinkUnixPort();
|
||||
|
||||
startMiddlewareListeners();
|
||||
|
||||
// Start webserver
|
||||
const { bootstrapNestServer } = await import('@app/unraid-api/main.js');
|
||||
|
||||
server = await bootstrapNestServer();
|
||||
// Start webserver - use all remaining budget
|
||||
try {
|
||||
const bootstrapTimeout = budget.remaining();
|
||||
if (bootstrapTimeout < 1000) {
|
||||
logger.warn(
|
||||
`Insufficient startup budget remaining (${bootstrapTimeout}ms) for NestJS bootstrap`
|
||||
);
|
||||
}
|
||||
logger.info('Bootstrapping NestJS server (budget: %dms)...', bootstrapTimeout);
|
||||
const { bootstrapNestServer } = await import('@app/unraid-api/main.js');
|
||||
server = await withTimeout(bootstrapNestServer(), bootstrapTimeout, 'bootstrapNestServer');
|
||||
logger.info('Startup complete in %dms', budget.elapsed());
|
||||
} catch (error) {
|
||||
logger.error(error, 'Failed to start NestJS server');
|
||||
throw error; // This is critical - must rethrow to trigger graceful exit
|
||||
}
|
||||
|
||||
asyncExitHook(
|
||||
async (signal) => {
|
||||
@@ -108,8 +170,10 @@ export const viteNodeApp = async () => {
|
||||
|
||||
gracefulExit();
|
||||
},
|
||||
{ wait: 9999 }
|
||||
{ wait: 10_000 }
|
||||
);
|
||||
|
||||
return server;
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
logger.error(error, 'API-ERROR');
|
||||
@@ -120,8 +184,9 @@ export const viteNodeApp = async () => {
|
||||
await server?.close?.();
|
||||
}
|
||||
shutdownApiEvent();
|
||||
// Kill application
|
||||
// Kill application - gracefulExit calls process.exit but TS doesn't know it never returns
|
||||
gracefulExit(1);
|
||||
throw new Error('Unreachable');
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -1,12 +1,9 @@
|
||||
import { F_OK } from 'constants';
|
||||
import { access } from 'fs/promises';
|
||||
|
||||
import { createAsyncThunk } from '@reduxjs/toolkit';
|
||||
import { createTtlMemoizedLoader } from '@unraid/shared';
|
||||
|
||||
import type { RecursivePartial } from '@app/types/index.js';
|
||||
import { type DynamixConfig } from '@app/core/types/ini.js';
|
||||
import { fileExistsSync } from '@app/core/utils/files/file-exists.js';
|
||||
import { parseConfig } from '@app/core/utils/misc/parse-config.js';
|
||||
import { type RecursiveNullable, type RecursivePartial } from '@app/types/index.js';
|
||||
import { batchProcess } from '@app/utils.js';
|
||||
|
||||
/**
|
||||
* Loads a configuration file from disk, parses it to a RecursivePartial of the provided type, and returns it.
|
||||
@@ -16,11 +13,8 @@ import { batchProcess } from '@app/utils.js';
|
||||
* @param path The path to the configuration file on disk.
|
||||
* @returns A parsed RecursivePartial of the provided type.
|
||||
*/
|
||||
async function loadConfigFile<ConfigType>(path: string): Promise<RecursivePartial<ConfigType>> {
|
||||
const fileIsAccessible = await access(path, F_OK)
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
return fileIsAccessible
|
||||
function loadConfigFileSync<ConfigType>(path: string): RecursivePartial<ConfigType> {
|
||||
return fileExistsSync(path)
|
||||
? parseConfig<RecursivePartial<ConfigType>>({
|
||||
filePath: path,
|
||||
type: 'ini',
|
||||
@@ -28,21 +22,40 @@ async function loadConfigFile<ConfigType>(path: string): Promise<RecursivePartia
|
||||
: {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Load the dynamix.cfg into the store.
|
||||
*
|
||||
* Note: If the file doesn't exist this will fallback to default values.
|
||||
*/
|
||||
export const loadDynamixConfigFile = createAsyncThunk<
|
||||
RecursiveNullable<RecursivePartial<DynamixConfig>>,
|
||||
string | undefined
|
||||
>('config/load-dynamix-config-file', async (filePath) => {
|
||||
if (filePath) {
|
||||
return loadConfigFile<DynamixConfig>(filePath);
|
||||
}
|
||||
const store = await import('@app/store/index.js');
|
||||
const paths = store.getters.paths()['dynamix-config'];
|
||||
const { data: configs } = await batchProcess(paths, (path) => loadConfigFile<DynamixConfig>(path));
|
||||
const [defaultConfig = {}, customConfig = {}] = configs;
|
||||
return { ...defaultConfig, ...customConfig };
|
||||
type ConfigPaths = readonly (string | undefined | null)[];
|
||||
const CACHE_WINDOW_MS = 250;
|
||||
|
||||
const memoizedConfigLoader = createTtlMemoizedLoader<
|
||||
RecursivePartial<DynamixConfig>,
|
||||
ConfigPaths,
|
||||
string
|
||||
>({
|
||||
ttlMs: CACHE_WINDOW_MS,
|
||||
getCacheKey: (configPaths: ConfigPaths): string => JSON.stringify(configPaths),
|
||||
load: (configPaths: ConfigPaths) => {
|
||||
const validPaths = configPaths.filter((path): path is string => Boolean(path));
|
||||
if (validPaths.length === 0) {
|
||||
return {};
|
||||
}
|
||||
const configFiles = validPaths.map((path) => loadConfigFileSync<DynamixConfig>(path));
|
||||
return configFiles.reduce<RecursivePartial<DynamixConfig>>(
|
||||
(accumulator, configFile) => ({
|
||||
...accumulator,
|
||||
...configFile,
|
||||
}),
|
||||
{}
|
||||
);
|
||||
},
|
||||
});
|
||||
|
||||
/**
|
||||
* Loads dynamix config from disk with TTL caching.
|
||||
*
|
||||
* @param configPaths - Array of config file paths to load and merge
|
||||
* @returns Merged config object from all valid paths
|
||||
*/
|
||||
export const loadDynamixConfigFromDiskSync = (
|
||||
configPaths: readonly (string | undefined | null)[]
|
||||
): RecursivePartial<DynamixConfig> => {
|
||||
return memoizedConfigLoader.get(configPaths);
|
||||
};
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
import { configureStore } from '@reduxjs/toolkit';
|
||||
|
||||
import { logger } from '@app/core/log.js';
|
||||
import { loadDynamixConfigFromDiskSync } from '@app/store/actions/load-dynamix-config-file.js';
|
||||
import { listenerMiddleware } from '@app/store/listeners/listener-middleware.js';
|
||||
import { updateDynamixConfig } from '@app/store/modules/dynamix.js';
|
||||
import { rootReducer } from '@app/store/root-reducer.js';
|
||||
import { FileLoadStatus } from '@app/store/types.js';
|
||||
|
||||
export const store = configureStore({
|
||||
reducer: rootReducer,
|
||||
@@ -15,8 +19,36 @@ export type RootState = ReturnType<typeof store.getState>;
|
||||
export type AppDispatch = typeof store.dispatch;
|
||||
export type ApiStore = typeof store;
|
||||
|
||||
// loadDynamixConfig is located here and not in the actions/load-dynamix-config-file.js file because it needs to access the store,
|
||||
// and injecting it seemed circular and convoluted for this use case.
|
||||
/**
|
||||
* Loads the dynamix config into the store.
|
||||
* Can be called multiple times - uses TTL caching internally.
|
||||
* @returns The loaded dynamix config.
|
||||
*/
|
||||
export const loadDynamixConfig = () => {
|
||||
const configPaths = store.getState().paths['dynamix-config'] ?? [];
|
||||
try {
|
||||
const config = loadDynamixConfigFromDiskSync(configPaths);
|
||||
store.dispatch(
|
||||
updateDynamixConfig({
|
||||
...config,
|
||||
status: FileLoadStatus.LOADED,
|
||||
})
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error(error, 'Failed to load dynamix config from disk');
|
||||
store.dispatch(
|
||||
updateDynamixConfig({
|
||||
status: FileLoadStatus.FAILED_LOADING,
|
||||
})
|
||||
);
|
||||
}
|
||||
return store.getState().dynamix;
|
||||
};
|
||||
|
||||
export const getters = {
|
||||
dynamix: () => store.getState().dynamix,
|
||||
dynamix: () => loadDynamixConfig(),
|
||||
emhttp: () => store.getState().emhttp,
|
||||
paths: () => store.getState().paths,
|
||||
registration: () => store.getState().registration,
|
||||
|
||||
@@ -2,7 +2,6 @@ import type { PayloadAction } from '@reduxjs/toolkit';
|
||||
import { createSlice } from '@reduxjs/toolkit';
|
||||
|
||||
import { type DynamixConfig } from '@app/core/types/ini.js';
|
||||
import { loadDynamixConfigFile } from '@app/store/actions/load-dynamix-config-file.js';
|
||||
import { FileLoadStatus } from '@app/store/types.js';
|
||||
import { RecursivePartial } from '@app/types/index.js';
|
||||
|
||||
@@ -22,24 +21,6 @@ export const dynamix = createSlice({
|
||||
return Object.assign(state, action.payload);
|
||||
},
|
||||
},
|
||||
extraReducers(builder) {
|
||||
builder.addCase(loadDynamixConfigFile.pending, (state) => {
|
||||
state.status = FileLoadStatus.LOADING;
|
||||
});
|
||||
|
||||
builder.addCase(loadDynamixConfigFile.fulfilled, (state, action) => {
|
||||
return {
|
||||
...(action.payload as DynamixConfig),
|
||||
status: FileLoadStatus.LOADED,
|
||||
};
|
||||
});
|
||||
|
||||
builder.addCase(loadDynamixConfigFile.rejected, (state, action) => {
|
||||
Object.assign(state, action.payload, {
|
||||
status: FileLoadStatus.FAILED_LOADING,
|
||||
});
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
export const { updateDynamixConfig } = dynamix.actions;
|
||||
|
||||
@@ -163,6 +163,18 @@ export const loadStateFiles = createAsyncThunk<
|
||||
return state;
|
||||
});
|
||||
|
||||
const stateFieldKeyMap: Record<StateFileKey, keyof SliceState> = {
|
||||
[StateFileKey.var]: 'var',
|
||||
[StateFileKey.devs]: 'devices',
|
||||
[StateFileKey.network]: 'networks',
|
||||
[StateFileKey.nginx]: 'nginx',
|
||||
[StateFileKey.shares]: 'shares',
|
||||
[StateFileKey.disks]: 'disks',
|
||||
[StateFileKey.users]: 'users',
|
||||
[StateFileKey.sec]: 'smbShares',
|
||||
[StateFileKey.sec_nfs]: 'nfsShares',
|
||||
};
|
||||
|
||||
export const emhttp = createSlice({
|
||||
name: 'emhttp',
|
||||
initialState,
|
||||
@@ -175,7 +187,8 @@ export const emhttp = createSlice({
|
||||
}>
|
||||
) {
|
||||
const { field } = action.payload;
|
||||
return Object.assign(state, { [field]: action.payload.state });
|
||||
const targetField = stateFieldKeyMap[field] ?? (field as keyof SliceState);
|
||||
return Object.assign(state, { [targetField]: action.payload.state });
|
||||
},
|
||||
},
|
||||
extraReducers(builder) {
|
||||
|
||||
@@ -20,6 +20,7 @@ const initialState = {
|
||||
process.env.PATHS_UNRAID_DATA ?? ('/boot/config/plugins/dynamix.my.servers/data/' as const)
|
||||
),
|
||||
'docker-autostart': '/var/lib/docker/unraid-autostart' as const,
|
||||
'docker-userprefs': '/boot/config/plugins/dockerMan/userprefs.cfg' as const,
|
||||
'docker-socket': '/var/run/docker.sock' as const,
|
||||
'rclone-socket': resolvePath(process.env.PATHS_RCLONE_SOCKET ?? ('/var/run/rclone.socket' as const)),
|
||||
'parity-checks': resolvePath(
|
||||
|
||||
81
api/src/store/services/__test__/state-file-loader.test.ts
Normal file
81
api/src/store/services/__test__/state-file-loader.test.ts
Normal file
@@ -0,0 +1,81 @@
|
||||
import { mkdtempSync, readFileSync, rmSync, writeFileSync } from 'node:fs';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { store } from '@app/store/index.js';
|
||||
import { loadStateFileSync } from '@app/store/services/state-file-loader.js';
|
||||
import { StateFileKey } from '@app/store/types.js';
|
||||
|
||||
const VAR_FIXTURE = readFileSync(new URL('../../../../dev/states/var.ini', import.meta.url), 'utf-8');
|
||||
|
||||
const writeVarFixture = (dir: string, safeMode: 'yes' | 'no') => {
|
||||
const content = VAR_FIXTURE.replace(/safeMode="(yes|no)"/, `safeMode="${safeMode}"`);
|
||||
writeFileSync(join(dir, `${StateFileKey.var}.ini`), content);
|
||||
};
|
||||
|
||||
describe('loadStateFileSync', () => {
|
||||
let tempDir: string;
|
||||
let baseState: ReturnType<typeof store.getState>;
|
||||
|
||||
beforeEach(() => {
|
||||
tempDir = mkdtempSync(join(tmpdir(), 'state-file-'));
|
||||
baseState = store.getState();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
rmSync(tempDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('loads var.ini, updates the store, and returns the parsed state', () => {
|
||||
writeVarFixture(tempDir, 'yes');
|
||||
vi.spyOn(store, 'getState').mockReturnValue({
|
||||
...baseState,
|
||||
paths: {
|
||||
...baseState.paths,
|
||||
states: tempDir,
|
||||
},
|
||||
});
|
||||
const dispatchSpy = vi.spyOn(store, 'dispatch').mockImplementation((action) => action as any);
|
||||
|
||||
const result = loadStateFileSync(StateFileKey.var);
|
||||
|
||||
expect(result?.safeMode).toBe(true);
|
||||
expect(dispatchSpy).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
type: 'emhttp/updateEmhttpState',
|
||||
payload: {
|
||||
field: StateFileKey.var,
|
||||
state: expect.objectContaining({ safeMode: true }),
|
||||
},
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it('returns null when the states path is missing', () => {
|
||||
vi.spyOn(store, 'getState').mockReturnValue({
|
||||
...baseState,
|
||||
paths: undefined,
|
||||
} as any);
|
||||
const dispatchSpy = vi.spyOn(store, 'dispatch');
|
||||
|
||||
expect(loadStateFileSync(StateFileKey.var)).toBeNull();
|
||||
expect(dispatchSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('returns null when the requested state file cannot be found', () => {
|
||||
vi.spyOn(store, 'getState').mockReturnValue({
|
||||
...baseState,
|
||||
paths: {
|
||||
...baseState.paths,
|
||||
states: tempDir,
|
||||
},
|
||||
});
|
||||
const dispatchSpy = vi.spyOn(store, 'dispatch');
|
||||
|
||||
expect(loadStateFileSync(StateFileKey.var)).toBeNull();
|
||||
expect(dispatchSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
81
api/src/store/services/state-file-loader.ts
Normal file
81
api/src/store/services/state-file-loader.ts
Normal file
@@ -0,0 +1,81 @@
|
||||
import { join } from 'node:path';
|
||||
|
||||
import type { SliceState } from '@app/store/modules/emhttp.js';
|
||||
import type { StateFileToIniParserMap } from '@app/store/types.js';
|
||||
import { parseConfig } from '@app/core/utils/misc/parse-config.js';
|
||||
import { store } from '@app/store/index.js';
|
||||
import { updateEmhttpState } from '@app/store/modules/emhttp.js';
|
||||
import { parse as parseDevices } from '@app/store/state-parsers/devices.js';
|
||||
import { parse as parseNetwork } from '@app/store/state-parsers/network.js';
|
||||
import { parse as parseNfs } from '@app/store/state-parsers/nfs.js';
|
||||
import { parse as parseNginx } from '@app/store/state-parsers/nginx.js';
|
||||
import { parse as parseShares } from '@app/store/state-parsers/shares.js';
|
||||
import { parse as parseSlots } from '@app/store/state-parsers/slots.js';
|
||||
import { parse as parseSmb } from '@app/store/state-parsers/smb.js';
|
||||
import { parse as parseUsers } from '@app/store/state-parsers/users.js';
|
||||
import { parse as parseVar } from '@app/store/state-parsers/var.js';
|
||||
import { StateFileKey } from '@app/store/types.js';
|
||||
|
||||
type ParserReturnMap = {
|
||||
[StateFileKey.var]: ReturnType<typeof parseVar>;
|
||||
[StateFileKey.devs]: ReturnType<typeof parseDevices>;
|
||||
[StateFileKey.network]: ReturnType<typeof parseNetwork>;
|
||||
[StateFileKey.nginx]: ReturnType<typeof parseNginx>;
|
||||
[StateFileKey.shares]: ReturnType<typeof parseShares>;
|
||||
[StateFileKey.disks]: ReturnType<typeof parseSlots>;
|
||||
[StateFileKey.users]: ReturnType<typeof parseUsers>;
|
||||
[StateFileKey.sec]: ReturnType<typeof parseSmb>;
|
||||
[StateFileKey.sec_nfs]: ReturnType<typeof parseNfs>;
|
||||
};
|
||||
|
||||
const PARSER_MAP: { [K in StateFileKey]: StateFileToIniParserMap[K] } = {
|
||||
[StateFileKey.var]: parseVar,
|
||||
[StateFileKey.devs]: parseDevices,
|
||||
[StateFileKey.network]: parseNetwork,
|
||||
[StateFileKey.nginx]: parseNginx,
|
||||
[StateFileKey.shares]: parseShares,
|
||||
[StateFileKey.disks]: parseSlots,
|
||||
[StateFileKey.users]: parseUsers,
|
||||
[StateFileKey.sec]: parseSmb,
|
||||
[StateFileKey.sec_nfs]: parseNfs,
|
||||
};
|
||||
|
||||
/**
|
||||
* Synchronously loads an emhttp state file, updates the Redux store slice, and returns the parsed state.
|
||||
*
|
||||
* Designed for bootstrap contexts (CLI, plugin loading, etc.) where dispatching the async thunks is
|
||||
* impractical but we still need authoritative emhttp state from disk.
|
||||
*/
|
||||
export const loadStateFileSync = <K extends StateFileKey>(
|
||||
stateFileKey: K
|
||||
): ParserReturnMap[K] | null => {
|
||||
const state = store.getState();
|
||||
const statesDirectory = state.paths?.states;
|
||||
|
||||
if (!statesDirectory) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const filePath = join(statesDirectory, `${stateFileKey}.ini`);
|
||||
|
||||
try {
|
||||
const parser = PARSER_MAP[stateFileKey] as StateFileToIniParserMap[K];
|
||||
const rawConfig = parseConfig<Record<string, unknown>>({
|
||||
filePath,
|
||||
type: 'ini',
|
||||
});
|
||||
const config = rawConfig as Parameters<StateFileToIniParserMap[K]>[0];
|
||||
const parsed = (parser as (input: any) => ParserReturnMap[K])(config);
|
||||
|
||||
store.dispatch(
|
||||
updateEmhttpState({
|
||||
field: stateFileKey,
|
||||
state: parsed as Partial<SliceState[keyof SliceState]>,
|
||||
})
|
||||
);
|
||||
|
||||
return parsed;
|
||||
} catch (error) {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
@@ -1,17 +0,0 @@
|
||||
import { watch } from 'chokidar';
|
||||
|
||||
import { loadDynamixConfigFile } from '@app/store/actions/load-dynamix-config-file.js';
|
||||
import { getters, store } from '@app/store/index.js';
|
||||
|
||||
export const setupDynamixConfigWatch = () => {
|
||||
const configPath = getters.paths()?.['dynamix-config'];
|
||||
|
||||
// Update store when cfg changes
|
||||
watch(configPath, {
|
||||
persistent: true,
|
||||
ignoreInitial: true,
|
||||
}).on('change', async () => {
|
||||
// Load updated dynamix config file into store
|
||||
await store.dispatch(loadDynamixConfigFile());
|
||||
});
|
||||
};
|
||||
@@ -1,17 +1,51 @@
|
||||
import { watch } from 'chokidar';
|
||||
|
||||
import { CHOKIDAR_USEPOLLING } from '@app/environment.js';
|
||||
import { store } from '@app/store/index.js';
|
||||
import { keyServerLogger } from '@app/core/log.js';
|
||||
import { getters, store } from '@app/store/index.js';
|
||||
import { loadSingleStateFile } from '@app/store/modules/emhttp.js';
|
||||
import { loadRegistrationKey } from '@app/store/modules/registration.js';
|
||||
import { StateFileKey } from '@app/store/types.js';
|
||||
|
||||
/**
|
||||
* Reloads var.ini with retry logic to handle timing issues with emhttpd.
|
||||
* When a key file changes, emhttpd needs time to process it and update var.ini.
|
||||
* This function retries loading var.ini until the registration state changes
|
||||
* or max retries are exhausted.
|
||||
*/
|
||||
export const reloadVarIniWithRetry = async (maxRetries = 3): Promise<void> => {
|
||||
const beforeState = getters.emhttp().var?.regTy;
|
||||
|
||||
for (let attempt = 0; attempt < maxRetries; attempt++) {
|
||||
const delay = 500 * Math.pow(2, attempt); // 500ms, 1s, 2s
|
||||
await new Promise((resolve) => setTimeout(resolve, delay));
|
||||
|
||||
await store.dispatch(loadSingleStateFile(StateFileKey.var));
|
||||
|
||||
const afterState = getters.emhttp().var?.regTy;
|
||||
if (beforeState !== afterState) {
|
||||
keyServerLogger.info('Registration state updated: %s -> %s', beforeState, afterState);
|
||||
return;
|
||||
}
|
||||
keyServerLogger.debug('Retry %d: var.ini regTy still %s', attempt + 1, afterState);
|
||||
}
|
||||
keyServerLogger.debug('var.ini regTy unchanged after %d retries (may be expected)', maxRetries);
|
||||
};
|
||||
|
||||
export const setupRegistrationKeyWatch = () => {
|
||||
// IMPORTANT: /boot/config is on FAT32 flash drive which does NOT support inotify
|
||||
// Must use polling to detect file changes on FAT32 filesystems
|
||||
watch('/boot/config', {
|
||||
persistent: true,
|
||||
ignoreInitial: true,
|
||||
ignored: (path: string) => !path.endsWith('.key'),
|
||||
usePolling: CHOKIDAR_USEPOLLING === true,
|
||||
}).on('all', async () => {
|
||||
// Load updated key into store
|
||||
usePolling: true, // Required for FAT32 - inotify doesn't work
|
||||
interval: 5000, // Poll every 5 seconds (balance between responsiveness and CPU usage)
|
||||
}).on('all', async (event, path) => {
|
||||
keyServerLogger.info('Key file %s: %s', event, path);
|
||||
|
||||
await store.dispatch(loadRegistrationKey());
|
||||
|
||||
// Reload var.ini to get updated registration metadata from emhttpd
|
||||
await reloadVarIniWithRetry();
|
||||
});
|
||||
};
|
||||
|
||||
40
api/src/types/jsonforms-i18n.d.ts
vendored
Normal file
40
api/src/types/jsonforms-i18n.d.ts
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
import '@jsonforms/core/lib/models/jsonSchema4';
|
||||
import '@jsonforms/core/lib/models/jsonSchema7';
|
||||
import '@jsonforms/core/src/models/jsonSchema4';
|
||||
import '@jsonforms/core/src/models/jsonSchema7';
|
||||
|
||||
declare module '@jsonforms/core/lib/models/jsonSchema4' {
|
||||
interface JsonSchema4 {
|
||||
i18n?: string;
|
||||
}
|
||||
}
|
||||
|
||||
declare module '@jsonforms/core/lib/models/jsonSchema7' {
|
||||
interface JsonSchema7 {
|
||||
i18n?: string;
|
||||
}
|
||||
}
|
||||
|
||||
declare module '@jsonforms/core/src/models/jsonSchema4' {
|
||||
interface JsonSchema4 {
|
||||
i18n?: string;
|
||||
}
|
||||
}
|
||||
|
||||
declare module '@jsonforms/core/src/models/jsonSchema7' {
|
||||
interface JsonSchema7 {
|
||||
i18n?: string;
|
||||
}
|
||||
}
|
||||
|
||||
declare module '@jsonforms/core/lib/models/jsonSchema4.js' {
|
||||
interface JsonSchema4 {
|
||||
i18n?: string;
|
||||
}
|
||||
}
|
||||
|
||||
declare module '@jsonforms/core/lib/models/jsonSchema7.js' {
|
||||
interface JsonSchema7 {
|
||||
i18n?: string;
|
||||
}
|
||||
}
|
||||
@@ -6,103 +6,60 @@ import { AuthZGuard } from 'nest-authz';
|
||||
import request from 'supertest';
|
||||
import { afterAll, beforeAll, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { loadDynamixConfigFile } from '@app/store/actions/load-dynamix-config-file.js';
|
||||
import { store } from '@app/store/index.js';
|
||||
import { loadStateFiles } from '@app/store/modules/emhttp.js';
|
||||
import { AppModule } from '@app/unraid-api/app/app.module.js';
|
||||
import { AuthService } from '@app/unraid-api/auth/auth.service.js';
|
||||
import { AuthenticationGuard } from '@app/unraid-api/auth/authentication.guard.js';
|
||||
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
|
||||
|
||||
// Mock external system boundaries that we can't control in tests
|
||||
vi.mock('dockerode', () => {
|
||||
return {
|
||||
default: vi.fn().mockImplementation(() => ({
|
||||
listContainers: vi.fn().mockResolvedValue([
|
||||
{
|
||||
Id: 'test-container-1',
|
||||
Names: ['/test-container'],
|
||||
State: 'running',
|
||||
Status: 'Up 5 minutes',
|
||||
Image: 'test:latest',
|
||||
Command: 'node server.js',
|
||||
Created: Date.now() / 1000,
|
||||
Ports: [
|
||||
{
|
||||
IP: '0.0.0.0',
|
||||
PrivatePort: 3000,
|
||||
PublicPort: 3000,
|
||||
Type: 'tcp',
|
||||
},
|
||||
],
|
||||
Labels: {},
|
||||
HostConfig: {
|
||||
NetworkMode: 'bridge',
|
||||
},
|
||||
NetworkSettings: {
|
||||
Networks: {},
|
||||
},
|
||||
Mounts: [],
|
||||
// Mock the store before importing it
|
||||
vi.mock('@app/store/index.js', () => ({
|
||||
store: {
|
||||
dispatch: vi.fn().mockResolvedValue(undefined),
|
||||
subscribe: vi.fn().mockImplementation(() => vi.fn()),
|
||||
getState: vi.fn().mockReturnValue({
|
||||
emhttp: {
|
||||
var: {
|
||||
csrfToken: 'test-csrf-token',
|
||||
},
|
||||
]),
|
||||
getContainer: vi.fn().mockImplementation((id) => ({
|
||||
inspect: vi.fn().mockResolvedValue({
|
||||
Id: id,
|
||||
Name: '/test-container',
|
||||
State: { Running: true },
|
||||
Config: { Image: 'test:latest' },
|
||||
}),
|
||||
})),
|
||||
listImages: vi.fn().mockResolvedValue([]),
|
||||
listNetworks: vi.fn().mockResolvedValue([]),
|
||||
listVolumes: vi.fn().mockResolvedValue({ Volumes: [] }),
|
||||
})),
|
||||
};
|
||||
});
|
||||
|
||||
// Mock external command execution
|
||||
vi.mock('execa', () => ({
|
||||
execa: vi.fn().mockImplementation((cmd) => {
|
||||
if (cmd === 'whoami') {
|
||||
return Promise.resolve({ stdout: 'testuser' });
|
||||
}
|
||||
return Promise.resolve({ stdout: 'mocked output' });
|
||||
}),
|
||||
},
|
||||
docker: {
|
||||
containers: [],
|
||||
autostart: [],
|
||||
},
|
||||
}),
|
||||
unsubscribe: vi.fn(),
|
||||
},
|
||||
getters: {
|
||||
emhttp: vi.fn().mockReturnValue({
|
||||
var: {
|
||||
csrfToken: 'test-csrf-token',
|
||||
},
|
||||
}),
|
||||
docker: vi.fn().mockReturnValue({
|
||||
containers: [],
|
||||
autostart: [],
|
||||
}),
|
||||
paths: vi.fn().mockReturnValue({
|
||||
'docker-autostart': '/tmp/docker-autostart',
|
||||
'docker-socket': '/var/run/docker.sock',
|
||||
'var-run': '/var/run',
|
||||
'auth-keys': '/tmp/auth-keys',
|
||||
activationBase: '/tmp/activation',
|
||||
'dynamix-config': ['/tmp/dynamix-config', '/tmp/dynamix-config'],
|
||||
identConfig: '/tmp/ident.cfg',
|
||||
}),
|
||||
dynamix: vi.fn().mockReturnValue({
|
||||
notify: {
|
||||
path: '/tmp/notifications',
|
||||
},
|
||||
}),
|
||||
},
|
||||
loadDynamixConfig: vi.fn(),
|
||||
loadStateFiles: vi.fn().mockResolvedValue(undefined),
|
||||
}));
|
||||
|
||||
// Mock child_process for services that spawn processes
|
||||
vi.mock('node:child_process', () => ({
|
||||
spawn: vi.fn(() => ({
|
||||
on: vi.fn(),
|
||||
kill: vi.fn(),
|
||||
stdout: { on: vi.fn() },
|
||||
stderr: { on: vi.fn() },
|
||||
})),
|
||||
}));
|
||||
|
||||
// Mock file system operations that would fail in test environment
|
||||
vi.mock('node:fs/promises', async (importOriginal) => {
|
||||
const actual = await importOriginal<typeof import('fs/promises')>();
|
||||
return {
|
||||
...actual,
|
||||
readFile: vi.fn().mockResolvedValue(''),
|
||||
writeFile: vi.fn().mockResolvedValue(undefined),
|
||||
mkdir: vi.fn().mockResolvedValue(undefined),
|
||||
access: vi.fn().mockResolvedValue(undefined),
|
||||
stat: vi.fn().mockResolvedValue({ isFile: () => true }),
|
||||
readdir: vi.fn().mockResolvedValue([]),
|
||||
rename: vi.fn().mockResolvedValue(undefined),
|
||||
unlink: vi.fn().mockResolvedValue(undefined),
|
||||
};
|
||||
});
|
||||
|
||||
// Mock fs module for synchronous operations
|
||||
vi.mock('node:fs', () => ({
|
||||
existsSync: vi.fn().mockReturnValue(false),
|
||||
readFileSync: vi.fn().mockReturnValue(''),
|
||||
writeFileSync: vi.fn(),
|
||||
mkdirSync: vi.fn(),
|
||||
readdirSync: vi.fn().mockReturnValue([]),
|
||||
// Mock fs-extra for directory operations
|
||||
vi.mock('fs-extra', () => ({
|
||||
ensureDirSync: vi.fn().mockReturnValue(undefined),
|
||||
}));
|
||||
|
||||
describe('AppModule Integration Tests', () => {
|
||||
@@ -110,14 +67,6 @@ describe('AppModule Integration Tests', () => {
|
||||
let moduleRef: TestingModule;
|
||||
|
||||
beforeAll(async () => {
|
||||
// Initialize the dynamix config and state files before creating the module
|
||||
await store.dispatch(loadDynamixConfigFile());
|
||||
await store.dispatch(loadStateFiles());
|
||||
|
||||
// Debug: Log the CSRF token from the store
|
||||
const { getters } = await import('@app/store/index.js');
|
||||
console.log('CSRF Token from store:', getters.emhttp().var.csrfToken);
|
||||
|
||||
moduleRef = await Test.createTestingModule({
|
||||
imports: [AppModule],
|
||||
})
|
||||
@@ -150,14 +99,6 @@ describe('AppModule Integration Tests', () => {
|
||||
roles: ['admin'],
|
||||
}),
|
||||
})
|
||||
// Override Redis client
|
||||
.overrideProvider('REDIS_CLIENT')
|
||||
.useValue({
|
||||
get: vi.fn(),
|
||||
set: vi.fn(),
|
||||
del: vi.fn(),
|
||||
connect: vi.fn(),
|
||||
})
|
||||
.compile();
|
||||
|
||||
app = moduleRef.createNestApplication<NestFastifyApplication>(new FastifyAdapter());
|
||||
@@ -178,9 +119,9 @@ describe('AppModule Integration Tests', () => {
|
||||
});
|
||||
|
||||
it('should resolve core services', () => {
|
||||
const dockerService = moduleRef.get(DockerService);
|
||||
const authService = moduleRef.get(AuthService);
|
||||
|
||||
expect(dockerService).toBeDefined();
|
||||
expect(authService).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -239,18 +180,12 @@ describe('AppModule Integration Tests', () => {
|
||||
});
|
||||
|
||||
describe('Service Integration', () => {
|
||||
it('should have working service-to-service communication', async () => {
|
||||
const dockerService = moduleRef.get(DockerService);
|
||||
|
||||
// Test that the service can be called and returns expected data structure
|
||||
const containers = await dockerService.getContainers();
|
||||
|
||||
expect(containers).toBeInstanceOf(Array);
|
||||
// The containers might be empty or cached, just verify structure
|
||||
if (containers.length > 0) {
|
||||
expect(containers[0]).toHaveProperty('id');
|
||||
expect(containers[0]).toHaveProperty('names');
|
||||
}
|
||||
it('should have working service-to-service communication', () => {
|
||||
// Test that the module can resolve its services without errors
|
||||
// This validates that dependency injection is working correctly
|
||||
const authService = moduleRef.get(AuthService);
|
||||
expect(authService).toBeDefined();
|
||||
expect(typeof authService.validateCookiesWithCsrfToken).toBe('function');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { CacheModule } from '@nestjs/cache-manager';
|
||||
import { ConfigModule } from '@nestjs/config';
|
||||
import { Test } from '@nestjs/testing';
|
||||
|
||||
import { describe, expect, it } from 'vitest';
|
||||
@@ -10,7 +11,11 @@ describe('Module Dependencies Integration', () => {
|
||||
let module;
|
||||
try {
|
||||
module = await Test.createTestingModule({
|
||||
imports: [CacheModule.register({ isGlobal: true }), RestModule],
|
||||
imports: [
|
||||
ConfigModule.forRoot({ ignoreEnvFile: true, isGlobal: true }),
|
||||
CacheModule.register({ isGlobal: true }),
|
||||
RestModule,
|
||||
],
|
||||
}).compile();
|
||||
|
||||
expect(module).toBeDefined();
|
||||
|
||||
@@ -183,6 +183,11 @@ export class ApiKeyService implements OnModuleInit {
|
||||
|
||||
async loadAllFromDisk(): Promise<ApiKey[]> {
|
||||
const files = await readdir(this.basePath).catch((error) => {
|
||||
if (error.code === 'ENOENT') {
|
||||
// Directory doesn't exist, which means no API keys have been created yet
|
||||
this.logger.error(`API key directory does not exist: ${this.basePath}`);
|
||||
return [];
|
||||
}
|
||||
this.logger.error(`Failed to read API key directory: ${error}`);
|
||||
throw new Error('Failed to list API keys');
|
||||
});
|
||||
|
||||
@@ -36,6 +36,7 @@ const mockPluginManagementService = {
|
||||
addPlugin: vi.fn(),
|
||||
addBundledPlugin: vi.fn(),
|
||||
removePlugin: vi.fn(),
|
||||
removePluginConfigOnly: vi.fn(),
|
||||
removeBundledPlugin: vi.fn(),
|
||||
plugins: [] as string[],
|
||||
};
|
||||
@@ -147,6 +148,7 @@ describe('Plugin Commands', () => {
|
||||
'@unraid/plugin-example',
|
||||
'@unraid/plugin-test'
|
||||
);
|
||||
expect(mockPluginManagementService.removePluginConfigOnly).not.toHaveBeenCalled();
|
||||
expect(mockLogger.log).toHaveBeenCalledWith('Removed plugin @unraid/plugin-example');
|
||||
expect(mockLogger.log).toHaveBeenCalledWith('Removed plugin @unraid/plugin-test');
|
||||
expect(mockApiConfigPersistence.persist).toHaveBeenCalled();
|
||||
@@ -178,9 +180,72 @@ describe('Plugin Commands', () => {
|
||||
expect(mockPluginManagementService.removePlugin).toHaveBeenCalledWith(
|
||||
'@unraid/plugin-example'
|
||||
);
|
||||
expect(mockPluginManagementService.removePluginConfigOnly).not.toHaveBeenCalled();
|
||||
expect(mockApiConfigPersistence.persist).toHaveBeenCalled();
|
||||
expect(mockRestartCommand.run).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should bypass npm uninstall when bypass flag is provided', async () => {
|
||||
mockInquirerService.prompt.mockResolvedValue({
|
||||
plugins: ['@unraid/plugin-example'],
|
||||
restart: true,
|
||||
bypassNpm: true,
|
||||
});
|
||||
|
||||
await command.run([], { restart: true, bypassNpm: true });
|
||||
|
||||
expect(mockPluginManagementService.removePluginConfigOnly).toHaveBeenCalledWith(
|
||||
'@unraid/plugin-example'
|
||||
);
|
||||
expect(mockPluginManagementService.removePlugin).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should preserve cli flags when prompt supplies plugins', async () => {
|
||||
mockInquirerService.prompt.mockResolvedValue({
|
||||
plugins: ['@unraid/plugin-example'],
|
||||
});
|
||||
|
||||
await command.run([], { restart: false, bypassNpm: true });
|
||||
|
||||
expect(mockPluginManagementService.removePluginConfigOnly).toHaveBeenCalledWith(
|
||||
'@unraid/plugin-example'
|
||||
);
|
||||
expect(mockPluginManagementService.removePlugin).not.toHaveBeenCalled();
|
||||
expect(mockRestartCommand.run).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should honor prompt restart value when cli flag not provided', async () => {
|
||||
mockInquirerService.prompt.mockResolvedValue({
|
||||
plugins: ['@unraid/plugin-example'],
|
||||
restart: false,
|
||||
});
|
||||
|
||||
await command.run([], {});
|
||||
|
||||
expect(mockPluginManagementService.removePlugin).toHaveBeenCalledWith(
|
||||
'@unraid/plugin-example'
|
||||
);
|
||||
expect(mockRestartCommand.run).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should respect passed params and skip inquirer', async () => {
|
||||
await command.run(['@unraid/plugin-example'], { restart: true, bypassNpm: false });
|
||||
|
||||
expect(mockInquirerService.prompt).not.toHaveBeenCalled();
|
||||
expect(mockPluginManagementService.removePlugin).toHaveBeenCalledWith(
|
||||
'@unraid/plugin-example'
|
||||
);
|
||||
});
|
||||
|
||||
it('should bypass npm when flag provided with passed params', async () => {
|
||||
await command.run(['@unraid/plugin-example'], { restart: true, bypassNpm: true });
|
||||
|
||||
expect(mockInquirerService.prompt).not.toHaveBeenCalled();
|
||||
expect(mockPluginManagementService.removePluginConfigOnly).toHaveBeenCalledWith(
|
||||
'@unraid/plugin-example'
|
||||
);
|
||||
expect(mockPluginManagementService.removePlugin).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('ListPluginCommand', () => {
|
||||
|
||||
@@ -525,6 +525,7 @@ export enum ContainerPortType {
|
||||
|
||||
export enum ContainerState {
|
||||
EXITED = 'EXITED',
|
||||
PAUSED = 'PAUSED',
|
||||
RUNNING = 'RUNNING'
|
||||
}
|
||||
|
||||
@@ -559,6 +560,17 @@ export type CpuLoad = {
|
||||
percentUser: Scalars['Float']['output'];
|
||||
};
|
||||
|
||||
export type CpuPackages = Node & {
|
||||
__typename?: 'CpuPackages';
|
||||
id: Scalars['PrefixedID']['output'];
|
||||
/** Power draw per package (W) */
|
||||
power: Array<Scalars['Float']['output']>;
|
||||
/** Temperature per package (°C) */
|
||||
temp: Array<Scalars['Float']['output']>;
|
||||
/** Total CPU package power draw (W) */
|
||||
totalPower: Scalars['Float']['output'];
|
||||
};
|
||||
|
||||
export type CpuUtilization = Node & {
|
||||
__typename?: 'CpuUtilization';
|
||||
/** CPU load for each core */
|
||||
@@ -590,6 +602,19 @@ export type Customization = {
|
||||
theme: Theme;
|
||||
};
|
||||
|
||||
/** Customization related mutations */
|
||||
export type CustomizationMutations = {
|
||||
__typename?: 'CustomizationMutations';
|
||||
/** Update the UI theme (writes dynamix.cfg) */
|
||||
setTheme: Theme;
|
||||
};
|
||||
|
||||
|
||||
/** Customization related mutations */
|
||||
export type CustomizationMutationsSetThemeArgs = {
|
||||
theme: ThemeName;
|
||||
};
|
||||
|
||||
export type DeleteApiKeyInput = {
|
||||
ids: Array<Scalars['PrefixedID']['input']>;
|
||||
};
|
||||
@@ -678,11 +703,20 @@ export enum DiskSmartStatus {
|
||||
|
||||
export type Docker = Node & {
|
||||
__typename?: 'Docker';
|
||||
container?: Maybe<DockerContainer>;
|
||||
containerUpdateStatuses: Array<ExplicitStatusItem>;
|
||||
containers: Array<DockerContainer>;
|
||||
id: Scalars['PrefixedID']['output'];
|
||||
/** Access container logs. Requires specifying a target container id through resolver arguments. */
|
||||
logs: DockerContainerLogs;
|
||||
networks: Array<DockerNetwork>;
|
||||
organizer: ResolvedOrganizerV1;
|
||||
portConflicts: DockerPortConflicts;
|
||||
};
|
||||
|
||||
|
||||
export type DockerContainerArgs = {
|
||||
id: Scalars['PrefixedID']['input'];
|
||||
};
|
||||
|
||||
|
||||
@@ -691,38 +725,169 @@ export type DockerContainersArgs = {
|
||||
};
|
||||
|
||||
|
||||
export type DockerLogsArgs = {
|
||||
id: Scalars['PrefixedID']['input'];
|
||||
since?: InputMaybe<Scalars['DateTime']['input']>;
|
||||
tail?: InputMaybe<Scalars['Int']['input']>;
|
||||
};
|
||||
|
||||
|
||||
export type DockerNetworksArgs = {
|
||||
skipCache?: Scalars['Boolean']['input'];
|
||||
};
|
||||
|
||||
|
||||
export type DockerOrganizerArgs = {
|
||||
skipCache?: Scalars['Boolean']['input'];
|
||||
};
|
||||
|
||||
|
||||
export type DockerPortConflictsArgs = {
|
||||
skipCache?: Scalars['Boolean']['input'];
|
||||
};
|
||||
|
||||
export type DockerAutostartEntryInput = {
|
||||
/** Whether the container should auto-start */
|
||||
autoStart: Scalars['Boolean']['input'];
|
||||
/** Docker container identifier */
|
||||
id: Scalars['PrefixedID']['input'];
|
||||
/** Number of seconds to wait after starting the container */
|
||||
wait?: InputMaybe<Scalars['Int']['input']>;
|
||||
};
|
||||
|
||||
export type DockerContainer = Node & {
|
||||
__typename?: 'DockerContainer';
|
||||
autoStart: Scalars['Boolean']['output'];
|
||||
/** Zero-based order in the auto-start list */
|
||||
autoStartOrder?: Maybe<Scalars['Int']['output']>;
|
||||
/** Wait time in seconds applied after start */
|
||||
autoStartWait?: Maybe<Scalars['Int']['output']>;
|
||||
command: Scalars['String']['output'];
|
||||
created: Scalars['Int']['output'];
|
||||
hostConfig?: Maybe<ContainerHostConfig>;
|
||||
/** Icon URL */
|
||||
iconUrl?: Maybe<Scalars['String']['output']>;
|
||||
id: Scalars['PrefixedID']['output'];
|
||||
image: Scalars['String']['output'];
|
||||
imageId: Scalars['String']['output'];
|
||||
/** Whether the container is orphaned (no template found) */
|
||||
isOrphaned: Scalars['Boolean']['output'];
|
||||
isRebuildReady?: Maybe<Scalars['Boolean']['output']>;
|
||||
isUpdateAvailable?: Maybe<Scalars['Boolean']['output']>;
|
||||
labels?: Maybe<Scalars['JSON']['output']>;
|
||||
/** List of LAN-accessible host:port values */
|
||||
lanIpPorts?: Maybe<Array<Scalars['String']['output']>>;
|
||||
mounts?: Maybe<Array<Scalars['JSON']['output']>>;
|
||||
names: Array<Scalars['String']['output']>;
|
||||
networkSettings?: Maybe<Scalars['JSON']['output']>;
|
||||
ports: Array<ContainerPort>;
|
||||
/** Project/Product homepage URL */
|
||||
projectUrl?: Maybe<Scalars['String']['output']>;
|
||||
/** Registry/Docker Hub URL */
|
||||
registryUrl?: Maybe<Scalars['String']['output']>;
|
||||
/** Shell to use for console access (from template) */
|
||||
shell?: Maybe<Scalars['String']['output']>;
|
||||
/** Size of container logs (in bytes) */
|
||||
sizeLog?: Maybe<Scalars['BigInt']['output']>;
|
||||
/** Total size of all files in the container (in bytes) */
|
||||
sizeRootFs?: Maybe<Scalars['BigInt']['output']>;
|
||||
/** Size of writable layer (in bytes) */
|
||||
sizeRw?: Maybe<Scalars['BigInt']['output']>;
|
||||
state: ContainerState;
|
||||
status: Scalars['String']['output'];
|
||||
/** Support page/thread URL */
|
||||
supportUrl?: Maybe<Scalars['String']['output']>;
|
||||
/** Whether Tailscale is enabled for this container */
|
||||
tailscaleEnabled: Scalars['Boolean']['output'];
|
||||
/** Tailscale status for this container (fetched via docker exec) */
|
||||
tailscaleStatus?: Maybe<TailscaleStatus>;
|
||||
templatePath?: Maybe<Scalars['String']['output']>;
|
||||
/** Port mappings from template (used when container is not running) */
|
||||
templatePorts?: Maybe<Array<ContainerPort>>;
|
||||
/** Resolved WebUI URL from template */
|
||||
webUiUrl?: Maybe<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
|
||||
export type DockerContainerTailscaleStatusArgs = {
|
||||
forceRefresh?: InputMaybe<Scalars['Boolean']['input']>;
|
||||
};
|
||||
|
||||
export type DockerContainerLogLine = {
|
||||
__typename?: 'DockerContainerLogLine';
|
||||
message: Scalars['String']['output'];
|
||||
timestamp: Scalars['DateTime']['output'];
|
||||
};
|
||||
|
||||
export type DockerContainerLogs = {
|
||||
__typename?: 'DockerContainerLogs';
|
||||
containerId: Scalars['PrefixedID']['output'];
|
||||
/** Cursor that can be passed back through the since argument to continue streaming logs. */
|
||||
cursor?: Maybe<Scalars['DateTime']['output']>;
|
||||
lines: Array<DockerContainerLogLine>;
|
||||
};
|
||||
|
||||
export type DockerContainerPortConflict = {
|
||||
__typename?: 'DockerContainerPortConflict';
|
||||
containers: Array<DockerPortConflictContainer>;
|
||||
privatePort: Scalars['Port']['output'];
|
||||
type: ContainerPortType;
|
||||
};
|
||||
|
||||
export type DockerContainerStats = {
|
||||
__typename?: 'DockerContainerStats';
|
||||
/** Block I/O String (e.g. 100MB / 1GB) */
|
||||
blockIO: Scalars['String']['output'];
|
||||
/** CPU Usage Percentage */
|
||||
cpuPercent: Scalars['Float']['output'];
|
||||
id: Scalars['PrefixedID']['output'];
|
||||
/** Memory Usage Percentage */
|
||||
memPercent: Scalars['Float']['output'];
|
||||
/** Memory Usage String (e.g. 100MB / 1GB) */
|
||||
memUsage: Scalars['String']['output'];
|
||||
/** Network I/O String (e.g. 100MB / 1GB) */
|
||||
netIO: Scalars['String']['output'];
|
||||
};
|
||||
|
||||
export type DockerLanPortConflict = {
|
||||
__typename?: 'DockerLanPortConflict';
|
||||
containers: Array<DockerPortConflictContainer>;
|
||||
lanIpPort: Scalars['String']['output'];
|
||||
publicPort?: Maybe<Scalars['Port']['output']>;
|
||||
type: ContainerPortType;
|
||||
};
|
||||
|
||||
export type DockerMutations = {
|
||||
__typename?: 'DockerMutations';
|
||||
/** Pause (Suspend) a container */
|
||||
pause: DockerContainer;
|
||||
/** Remove a container */
|
||||
removeContainer: Scalars['Boolean']['output'];
|
||||
/** Start a container */
|
||||
start: DockerContainer;
|
||||
/** Stop a container */
|
||||
stop: DockerContainer;
|
||||
/** Unpause (Resume) a container */
|
||||
unpause: DockerContainer;
|
||||
/** Update all containers that have available updates */
|
||||
updateAllContainers: Array<DockerContainer>;
|
||||
/** Update auto-start configuration for Docker containers */
|
||||
updateAutostartConfiguration: Scalars['Boolean']['output'];
|
||||
/** Update a container to the latest image */
|
||||
updateContainer: DockerContainer;
|
||||
/** Update multiple containers to the latest images */
|
||||
updateContainers: Array<DockerContainer>;
|
||||
};
|
||||
|
||||
|
||||
export type DockerMutationsPauseArgs = {
|
||||
id: Scalars['PrefixedID']['input'];
|
||||
};
|
||||
|
||||
|
||||
export type DockerMutationsRemoveContainerArgs = {
|
||||
id: Scalars['PrefixedID']['input'];
|
||||
withImage?: InputMaybe<Scalars['Boolean']['input']>;
|
||||
};
|
||||
|
||||
|
||||
@@ -735,6 +900,27 @@ export type DockerMutationsStopArgs = {
|
||||
id: Scalars['PrefixedID']['input'];
|
||||
};
|
||||
|
||||
|
||||
export type DockerMutationsUnpauseArgs = {
|
||||
id: Scalars['PrefixedID']['input'];
|
||||
};
|
||||
|
||||
|
||||
export type DockerMutationsUpdateAutostartConfigurationArgs = {
|
||||
entries: Array<DockerAutostartEntryInput>;
|
||||
persistUserPreferences?: InputMaybe<Scalars['Boolean']['input']>;
|
||||
};
|
||||
|
||||
|
||||
export type DockerMutationsUpdateContainerArgs = {
|
||||
id: Scalars['PrefixedID']['input'];
|
||||
};
|
||||
|
||||
|
||||
export type DockerMutationsUpdateContainersArgs = {
|
||||
ids: Array<Scalars['PrefixedID']['input']>;
|
||||
};
|
||||
|
||||
export type DockerNetwork = Node & {
|
||||
__typename?: 'DockerNetwork';
|
||||
attachable: Scalars['Boolean']['output'];
|
||||
@@ -754,6 +940,26 @@ export type DockerNetwork = Node & {
|
||||
scope: Scalars['String']['output'];
|
||||
};
|
||||
|
||||
export type DockerPortConflictContainer = {
|
||||
__typename?: 'DockerPortConflictContainer';
|
||||
id: Scalars['PrefixedID']['output'];
|
||||
name: Scalars['String']['output'];
|
||||
};
|
||||
|
||||
export type DockerPortConflicts = {
|
||||
__typename?: 'DockerPortConflicts';
|
||||
containerPorts: Array<DockerContainerPortConflict>;
|
||||
lanPorts: Array<DockerLanPortConflict>;
|
||||
};
|
||||
|
||||
export type DockerTemplateSyncResult = {
|
||||
__typename?: 'DockerTemplateSyncResult';
|
||||
errors: Array<Scalars['String']['output']>;
|
||||
matched: Scalars['Int']['output'];
|
||||
scanned: Scalars['Int']['output'];
|
||||
skipped: Scalars['Int']['output'];
|
||||
};
|
||||
|
||||
export type DynamicRemoteAccessStatus = {
|
||||
__typename?: 'DynamicRemoteAccessStatus';
|
||||
/** The type of dynamic remote access that is enabled */
|
||||
@@ -799,6 +1005,20 @@ export type FlashBackupStatus = {
|
||||
status: Scalars['String']['output'];
|
||||
};
|
||||
|
||||
export type FlatOrganizerEntry = {
|
||||
__typename?: 'FlatOrganizerEntry';
|
||||
childrenIds: Array<Scalars['String']['output']>;
|
||||
depth: Scalars['Float']['output'];
|
||||
hasChildren: Scalars['Boolean']['output'];
|
||||
id: Scalars['String']['output'];
|
||||
meta?: Maybe<DockerContainer>;
|
||||
name: Scalars['String']['output'];
|
||||
parentId?: Maybe<Scalars['String']['output']>;
|
||||
path: Array<Scalars['String']['output']>;
|
||||
position: Scalars['Float']['output'];
|
||||
type: Scalars['String']['output'];
|
||||
};
|
||||
|
||||
export type FormSchema = {
|
||||
/** The data schema for the form */
|
||||
dataSchema: Scalars['JSON']['output'];
|
||||
@@ -869,6 +1089,7 @@ export type InfoCpu = Node & {
|
||||
manufacturer?: Maybe<Scalars['String']['output']>;
|
||||
/** CPU model */
|
||||
model?: Maybe<Scalars['String']['output']>;
|
||||
packages: CpuPackages;
|
||||
/** Number of physical processors */
|
||||
processors?: Maybe<Scalars['Int']['output']>;
|
||||
/** CPU revision */
|
||||
@@ -885,6 +1106,8 @@ export type InfoCpu = Node & {
|
||||
stepping?: Maybe<Scalars['Int']['output']>;
|
||||
/** Number of CPU threads */
|
||||
threads?: Maybe<Scalars['Int']['output']>;
|
||||
/** Per-package array of core/thread pairs, e.g. [[[0,1],[2,3]], [[4,5],[6,7]]] */
|
||||
topology: Array<Array<Array<Scalars['Int']['output']>>>;
|
||||
/** CPU vendor */
|
||||
vendor?: Maybe<Scalars['String']['output']>;
|
||||
/** CPU voltage */
|
||||
@@ -1223,8 +1446,10 @@ export type Mutation = {
|
||||
connectSignIn: Scalars['Boolean']['output'];
|
||||
connectSignOut: Scalars['Boolean']['output'];
|
||||
createDockerFolder: ResolvedOrganizerV1;
|
||||
createDockerFolderWithItems: ResolvedOrganizerV1;
|
||||
/** Creates a new notification record */
|
||||
createNotification: Notification;
|
||||
customization: CustomizationMutations;
|
||||
/** Deletes all archived notifications on server. */
|
||||
deleteArchivedNotifications: NotificationOverview;
|
||||
deleteDockerEntries: ResolvedOrganizerV1;
|
||||
@@ -1234,6 +1459,9 @@ export type Mutation = {
|
||||
/** Initiates a flash drive backup using a configured remote. */
|
||||
initiateFlashBackup: FlashBackupStatus;
|
||||
moveDockerEntriesToFolder: ResolvedOrganizerV1;
|
||||
moveDockerItemsToPosition: ResolvedOrganizerV1;
|
||||
/** Creates a notification if an equivalent unread notification does not already exist. */
|
||||
notifyIfUnique?: Maybe<Notification>;
|
||||
parityCheck: ParityCheckMutations;
|
||||
rclone: RCloneMutations;
|
||||
/** Reads each notification to recompute & update the overview. */
|
||||
@@ -1241,13 +1469,18 @@ export type Mutation = {
|
||||
refreshDockerDigests: Scalars['Boolean']['output'];
|
||||
/** Remove one or more plugins from the API. Returns false if restart was triggered automatically, true if manual restart is required. */
|
||||
removePlugin: Scalars['Boolean']['output'];
|
||||
renameDockerFolder: ResolvedOrganizerV1;
|
||||
/** Reset Docker template mappings to defaults. Use this to recover from corrupted state. */
|
||||
resetDockerTemplateMappings: Scalars['Boolean']['output'];
|
||||
setDockerFolderChildren: ResolvedOrganizerV1;
|
||||
setupRemoteAccess: Scalars['Boolean']['output'];
|
||||
syncDockerTemplatePaths: DockerTemplateSyncResult;
|
||||
unarchiveAll: NotificationOverview;
|
||||
unarchiveNotifications: NotificationOverview;
|
||||
/** Marks a notification as unread. */
|
||||
unreadNotification: Notification;
|
||||
updateApiSettings: ConnectSettingsValues;
|
||||
updateDockerViewPreferences: ResolvedOrganizerV1;
|
||||
updateSettings: UpdateSettingsResponse;
|
||||
vm: VmMutations;
|
||||
};
|
||||
@@ -1290,6 +1523,14 @@ export type MutationCreateDockerFolderArgs = {
|
||||
};
|
||||
|
||||
|
||||
export type MutationCreateDockerFolderWithItemsArgs = {
|
||||
name: Scalars['String']['input'];
|
||||
parentId?: InputMaybe<Scalars['String']['input']>;
|
||||
position?: InputMaybe<Scalars['Float']['input']>;
|
||||
sourceEntryIds?: InputMaybe<Array<Scalars['String']['input']>>;
|
||||
};
|
||||
|
||||
|
||||
export type MutationCreateNotificationArgs = {
|
||||
input: NotificationData;
|
||||
};
|
||||
@@ -1322,11 +1563,29 @@ export type MutationMoveDockerEntriesToFolderArgs = {
|
||||
};
|
||||
|
||||
|
||||
export type MutationMoveDockerItemsToPositionArgs = {
|
||||
destinationFolderId: Scalars['String']['input'];
|
||||
position: Scalars['Float']['input'];
|
||||
sourceEntryIds: Array<Scalars['String']['input']>;
|
||||
};
|
||||
|
||||
|
||||
export type MutationNotifyIfUniqueArgs = {
|
||||
input: NotificationData;
|
||||
};
|
||||
|
||||
|
||||
export type MutationRemovePluginArgs = {
|
||||
input: PluginManagementInput;
|
||||
};
|
||||
|
||||
|
||||
export type MutationRenameDockerFolderArgs = {
|
||||
folderId: Scalars['String']['input'];
|
||||
newName: Scalars['String']['input'];
|
||||
};
|
||||
|
||||
|
||||
export type MutationSetDockerFolderChildrenArgs = {
|
||||
childrenIds: Array<Scalars['String']['input']>;
|
||||
folderId?: InputMaybe<Scalars['String']['input']>;
|
||||
@@ -1358,6 +1617,12 @@ export type MutationUpdateApiSettingsArgs = {
|
||||
};
|
||||
|
||||
|
||||
export type MutationUpdateDockerViewPreferencesArgs = {
|
||||
prefs: Scalars['JSON']['input'];
|
||||
viewId?: InputMaybe<Scalars['String']['input']>;
|
||||
};
|
||||
|
||||
|
||||
export type MutationUpdateSettingsArgs = {
|
||||
input: Scalars['JSON']['input'];
|
||||
};
|
||||
@@ -1422,6 +1687,14 @@ export type NotificationOverview = {
|
||||
unread: NotificationCounts;
|
||||
};
|
||||
|
||||
export type NotificationSettings = {
|
||||
__typename?: 'NotificationSettings';
|
||||
duration: Scalars['Int']['output'];
|
||||
expand: Scalars['Boolean']['output'];
|
||||
max: Scalars['Int']['output'];
|
||||
position: Scalars['String']['output'];
|
||||
};
|
||||
|
||||
export enum NotificationType {
|
||||
ARCHIVE = 'ARCHIVE',
|
||||
UNREAD = 'UNREAD'
|
||||
@@ -1433,6 +1706,9 @@ export type Notifications = Node & {
|
||||
list: Array<Notification>;
|
||||
/** A cached overview of the notifications in the system & their severity. */
|
||||
overview: NotificationOverview;
|
||||
settings: NotificationSettings;
|
||||
/** Deduplicated list of unread warning and alert notifications, sorted latest first. */
|
||||
warningsAndAlerts: Array<Notification>;
|
||||
};
|
||||
|
||||
|
||||
@@ -1498,22 +1774,6 @@ export type OidcSessionValidation = {
|
||||
valid: Scalars['Boolean']['output'];
|
||||
};
|
||||
|
||||
export type OrganizerContainerResource = {
|
||||
__typename?: 'OrganizerContainerResource';
|
||||
id: Scalars['String']['output'];
|
||||
meta?: Maybe<DockerContainer>;
|
||||
name: Scalars['String']['output'];
|
||||
type: Scalars['String']['output'];
|
||||
};
|
||||
|
||||
export type OrganizerResource = {
|
||||
__typename?: 'OrganizerResource';
|
||||
id: Scalars['String']['output'];
|
||||
meta?: Maybe<Scalars['JSON']['output']>;
|
||||
name: Scalars['String']['output'];
|
||||
type: Scalars['String']['output'];
|
||||
};
|
||||
|
||||
export type Owner = {
|
||||
__typename?: 'Owner';
|
||||
avatar: Scalars['String']['output'];
|
||||
@@ -1882,16 +2142,6 @@ export type RemoveRoleFromApiKeyInput = {
|
||||
role: Role;
|
||||
};
|
||||
|
||||
export type ResolvedOrganizerEntry = OrganizerContainerResource | OrganizerResource | ResolvedOrganizerFolder;
|
||||
|
||||
export type ResolvedOrganizerFolder = {
|
||||
__typename?: 'ResolvedOrganizerFolder';
|
||||
children: Array<ResolvedOrganizerEntry>;
|
||||
id: Scalars['String']['output'];
|
||||
name: Scalars['String']['output'];
|
||||
type: Scalars['String']['output'];
|
||||
};
|
||||
|
||||
export type ResolvedOrganizerV1 = {
|
||||
__typename?: 'ResolvedOrganizerV1';
|
||||
version: Scalars['Float']['output'];
|
||||
@@ -1900,10 +2150,11 @@ export type ResolvedOrganizerV1 = {
|
||||
|
||||
export type ResolvedOrganizerView = {
|
||||
__typename?: 'ResolvedOrganizerView';
|
||||
flatEntries: Array<FlatOrganizerEntry>;
|
||||
id: Scalars['String']['output'];
|
||||
name: Scalars['String']['output'];
|
||||
prefs?: Maybe<Scalars['JSON']['output']>;
|
||||
root: ResolvedOrganizerEntry;
|
||||
rootId: Scalars['String']['output'];
|
||||
};
|
||||
|
||||
/** Available resources for permissions */
|
||||
@@ -2046,13 +2297,16 @@ export type SsoSettings = Node & {
|
||||
export type Subscription = {
|
||||
__typename?: 'Subscription';
|
||||
arraySubscription: UnraidArray;
|
||||
dockerContainerStats: DockerContainerStats;
|
||||
logFile: LogFileContent;
|
||||
notificationAdded: Notification;
|
||||
notificationsOverview: NotificationOverview;
|
||||
notificationsWarningsAndAlerts: Array<Notification>;
|
||||
ownerSubscription: Owner;
|
||||
parityHistorySubscription: ParityCheck;
|
||||
serversSubscription: Server;
|
||||
systemMetricsCpu: CpuUtilization;
|
||||
systemMetricsCpuTelemetry: CpuPackages;
|
||||
systemMetricsMemory: MemoryUtilization;
|
||||
upsUpdates: UpsDevice;
|
||||
};
|
||||
@@ -2062,6 +2316,56 @@ export type SubscriptionLogFileArgs = {
|
||||
path: Scalars['String']['input'];
|
||||
};
|
||||
|
||||
/** Tailscale exit node connection status */
|
||||
export type TailscaleExitNodeStatus = {
|
||||
__typename?: 'TailscaleExitNodeStatus';
|
||||
/** Whether the exit node is online */
|
||||
online: Scalars['Boolean']['output'];
|
||||
/** Tailscale IPs of the exit node */
|
||||
tailscaleIps?: Maybe<Array<Scalars['String']['output']>>;
|
||||
};
|
||||
|
||||
/** Tailscale status for a Docker container */
|
||||
export type TailscaleStatus = {
|
||||
__typename?: 'TailscaleStatus';
|
||||
/** Authentication URL if Tailscale needs login */
|
||||
authUrl?: Maybe<Scalars['String']['output']>;
|
||||
/** Tailscale backend state (Running, NeedsLogin, Stopped, etc.) */
|
||||
backendState?: Maybe<Scalars['String']['output']>;
|
||||
/** Actual Tailscale DNS name */
|
||||
dnsName?: Maybe<Scalars['String']['output']>;
|
||||
/** Status of the connected exit node (if using one) */
|
||||
exitNodeStatus?: Maybe<TailscaleExitNodeStatus>;
|
||||
/** Configured Tailscale hostname */
|
||||
hostname?: Maybe<Scalars['String']['output']>;
|
||||
/** Whether this container is an exit node */
|
||||
isExitNode: Scalars['Boolean']['output'];
|
||||
/** Whether the Tailscale key has expired */
|
||||
keyExpired: Scalars['Boolean']['output'];
|
||||
/** Tailscale key expiry date */
|
||||
keyExpiry?: Maybe<Scalars['DateTime']['output']>;
|
||||
/** Days until key expires */
|
||||
keyExpiryDays?: Maybe<Scalars['Int']['output']>;
|
||||
/** Latest available Tailscale version */
|
||||
latestVersion?: Maybe<Scalars['String']['output']>;
|
||||
/** Whether Tailscale is online in the container */
|
||||
online: Scalars['Boolean']['output'];
|
||||
/** Advertised subnet routes */
|
||||
primaryRoutes?: Maybe<Array<Scalars['String']['output']>>;
|
||||
/** DERP relay code */
|
||||
relay?: Maybe<Scalars['String']['output']>;
|
||||
/** DERP relay region name */
|
||||
relayName?: Maybe<Scalars['String']['output']>;
|
||||
/** Tailscale IPv4 and IPv6 addresses */
|
||||
tailscaleIps?: Maybe<Array<Scalars['String']['output']>>;
|
||||
/** Whether a Tailscale update is available */
|
||||
updateAvailable: Scalars['Boolean']['output'];
|
||||
/** Current Tailscale version */
|
||||
version?: Maybe<Scalars['String']['output']>;
|
||||
/** Tailscale Serve/Funnel WebUI URL */
|
||||
webUiUrl?: Maybe<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
/** Temperature unit */
|
||||
export enum Temperature {
|
||||
CELSIUS = 'CELSIUS',
|
||||
|
||||
@@ -74,13 +74,15 @@ export class InstallPluginCommand extends CommandRunner {
|
||||
|
||||
interface RemovePluginCommandOptions {
|
||||
plugins?: string[];
|
||||
restart: boolean;
|
||||
restart?: boolean;
|
||||
bypassNpm?: boolean;
|
||||
}
|
||||
|
||||
@SubCommand({
|
||||
name: 'remove',
|
||||
aliases: ['rm'],
|
||||
description: 'Remove plugin peer dependencies.',
|
||||
arguments: '[plugins...]',
|
||||
})
|
||||
export class RemovePluginCommand extends CommandRunner {
|
||||
constructor(
|
||||
@@ -93,9 +95,83 @@ export class RemovePluginCommand extends CommandRunner {
|
||||
super();
|
||||
}
|
||||
|
||||
async run(_passedParams: string[], options?: RemovePluginCommandOptions): Promise<void> {
|
||||
async run(passedParams: string[], options?: RemovePluginCommandOptions): Promise<void> {
|
||||
const cliBypass = options?.bypassNpm;
|
||||
const cliRestart = options?.restart;
|
||||
const mergedOptions: RemovePluginCommandOptions = {
|
||||
bypassNpm: cliBypass ?? false,
|
||||
restart: cliRestart ?? true,
|
||||
plugins: passedParams.length > 0 ? passedParams : options?.plugins,
|
||||
};
|
||||
|
||||
let resolvedOptions = mergedOptions;
|
||||
if (!mergedOptions.plugins?.length) {
|
||||
const promptOptions = await this.promptForPlugins(mergedOptions);
|
||||
if (!promptOptions) {
|
||||
return;
|
||||
}
|
||||
resolvedOptions = {
|
||||
// precedence: cli > prompt > default (fallback)
|
||||
bypassNpm: cliBypass ?? promptOptions.bypassNpm ?? mergedOptions.bypassNpm,
|
||||
restart: cliRestart ?? promptOptions.restart ?? mergedOptions.restart,
|
||||
// precedence: prompt > default (fallback)
|
||||
plugins: promptOptions.plugins ?? mergedOptions.plugins,
|
||||
};
|
||||
}
|
||||
|
||||
if (!resolvedOptions.plugins?.length) {
|
||||
this.logService.warn('No plugins selected for removal.');
|
||||
return;
|
||||
}
|
||||
|
||||
if (resolvedOptions.bypassNpm) {
|
||||
await this.pluginManagementService.removePluginConfigOnly(...resolvedOptions.plugins);
|
||||
} else {
|
||||
await this.pluginManagementService.removePlugin(...resolvedOptions.plugins);
|
||||
}
|
||||
for (const plugin of resolvedOptions.plugins) {
|
||||
this.logService.log(`Removed plugin ${plugin}`);
|
||||
}
|
||||
await this.apiConfigPersistence.persist();
|
||||
|
||||
if (resolvedOptions.restart) {
|
||||
await this.restartCommand.run();
|
||||
}
|
||||
}
|
||||
|
||||
@Option({
|
||||
flags: '--no-restart',
|
||||
description: 'do NOT restart the service after deploy',
|
||||
defaultValue: true,
|
||||
})
|
||||
parseRestart(): boolean {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Option({
|
||||
flags: '-b, --bypass-npm',
|
||||
description: 'Bypass npm uninstall and only update the config',
|
||||
defaultValue: false,
|
||||
name: 'bypassNpm',
|
||||
})
|
||||
parseBypass(): boolean {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Option({
|
||||
flags: '--npm',
|
||||
description: 'Run npm uninstall for unbundled plugins (default behavior)',
|
||||
name: 'bypassNpm',
|
||||
})
|
||||
parseRunNpm(): boolean {
|
||||
return false;
|
||||
}
|
||||
|
||||
private async promptForPlugins(
|
||||
initialOptions: RemovePluginCommandOptions
|
||||
): Promise<RemovePluginCommandOptions | undefined> {
|
||||
try {
|
||||
options = await this.inquirerService.prompt(RemovePluginQuestionSet.name, options);
|
||||
return await this.inquirerService.prompt(RemovePluginQuestionSet.name, initialOptions);
|
||||
} catch (error) {
|
||||
if (error instanceof NoPluginsFoundError) {
|
||||
this.logService.error(error.message);
|
||||
@@ -108,30 +184,6 @@ export class RemovePluginCommand extends CommandRunner {
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
if (!options.plugins || options.plugins.length === 0) {
|
||||
this.logService.warn('No plugins selected for removal.');
|
||||
return;
|
||||
}
|
||||
|
||||
await this.pluginManagementService.removePlugin(...options.plugins);
|
||||
for (const plugin of options.plugins) {
|
||||
this.logService.log(`Removed plugin ${plugin}`);
|
||||
}
|
||||
await this.apiConfigPersistence.persist();
|
||||
|
||||
if (options.restart) {
|
||||
await this.restartCommand.run();
|
||||
}
|
||||
}
|
||||
|
||||
@Option({
|
||||
flags: '--no-restart',
|
||||
description: 'do NOT restart the service after deploy',
|
||||
defaultValue: true,
|
||||
})
|
||||
parseRestart(): boolean {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -58,7 +58,8 @@ export class PM2Service {
|
||||
...(needsPathUpdate && { PATH: finalPath }),
|
||||
};
|
||||
|
||||
const runCommand = () => execa(PM2_PATH, [...args], execOptions satisfies Options);
|
||||
const pm2Args = args.some((arg) => arg === '--no-color') ? args : ['--no-color', ...args];
|
||||
const runCommand = () => execa(PM2_PATH, pm2Args, execOptions satisfies Options);
|
||||
if (raw) {
|
||||
return runCommand();
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ import type { ApiConfig } from '@unraid/shared/services/api-config.js';
|
||||
import { ConfigFilePersister } from '@unraid/shared/services/config-file.js';
|
||||
import { csvStringToArray } from '@unraid/shared/util/data.js';
|
||||
|
||||
import { isConnectPluginInstalled } from '@app/connect-plugin-cleanup.js';
|
||||
import { API_VERSION, PATHS_CONFIG_MODULES } from '@app/environment.js';
|
||||
|
||||
export { type ApiConfig };
|
||||
@@ -29,6 +30,13 @@ export const loadApiConfig = async () => {
|
||||
const apiHandler = new ApiConfigPersistence(new ConfigService()).getFileHandler();
|
||||
|
||||
const diskConfig: Partial<ApiConfig> = await apiHandler.loadConfig();
|
||||
// Hack: cleanup stale connect plugin entry if necessary
|
||||
if (!isConnectPluginInstalled()) {
|
||||
diskConfig.plugins = diskConfig.plugins?.filter(
|
||||
(plugin) => plugin !== 'unraid-api-plugin-connect'
|
||||
);
|
||||
await apiHandler.writeConfigFile(diskConfig as ApiConfig);
|
||||
}
|
||||
|
||||
return {
|
||||
...defaultConfig,
|
||||
|
||||
@@ -12,6 +12,24 @@ import {
|
||||
createSimpleLabeledControl,
|
||||
} from '@app/unraid-api/graph/utils/form-utils.js';
|
||||
|
||||
const API_KEY_I18N = {
|
||||
name: 'jsonforms.apiKey.name',
|
||||
description: 'jsonforms.apiKey.description',
|
||||
roles: 'jsonforms.apiKey.roles',
|
||||
permissionPresets: 'jsonforms.apiKey.permissionPresets',
|
||||
customPermissions: {
|
||||
root: 'jsonforms.apiKey.customPermissions',
|
||||
resources: 'jsonforms.apiKey.customPermissions.resources',
|
||||
actions: 'jsonforms.apiKey.customPermissions.actions',
|
||||
},
|
||||
permissions: {
|
||||
header: 'jsonforms.apiKey.permissions.header',
|
||||
description: 'jsonforms.apiKey.permissions.description',
|
||||
subheader: 'jsonforms.apiKey.permissions.subheader',
|
||||
help: 'jsonforms.apiKey.permissions.help',
|
||||
},
|
||||
} as const;
|
||||
|
||||
// Helper to get GraphQL enum names for JSON Schema
|
||||
// GraphQL expects the enum names (keys) not the values
|
||||
function getAuthActionEnumNames(): string[] {
|
||||
@@ -82,6 +100,7 @@ export class ApiKeyFormService {
|
||||
properties: {
|
||||
name: {
|
||||
type: 'string',
|
||||
i18n: API_KEY_I18N.name,
|
||||
title: 'API Key Name',
|
||||
description: 'A descriptive name for this API key',
|
||||
minLength: 1,
|
||||
@@ -89,12 +108,14 @@ export class ApiKeyFormService {
|
||||
},
|
||||
description: {
|
||||
type: 'string',
|
||||
i18n: API_KEY_I18N.description,
|
||||
title: 'Description',
|
||||
description: 'Optional description of what this key is used for',
|
||||
maxLength: 500,
|
||||
},
|
||||
roles: {
|
||||
type: 'array',
|
||||
i18n: API_KEY_I18N.roles,
|
||||
title: 'Roles',
|
||||
description: 'Select one or more roles to grant pre-defined permission sets',
|
||||
items: {
|
||||
@@ -105,6 +126,7 @@ export class ApiKeyFormService {
|
||||
},
|
||||
permissionPresets: {
|
||||
type: 'string',
|
||||
i18n: API_KEY_I18N.permissionPresets,
|
||||
title: 'Add Permission Preset',
|
||||
description: 'Quick add common permission sets',
|
||||
enum: [
|
||||
@@ -119,6 +141,7 @@ export class ApiKeyFormService {
|
||||
},
|
||||
customPermissions: {
|
||||
type: 'array',
|
||||
i18n: API_KEY_I18N.customPermissions.root,
|
||||
title: 'Permissions',
|
||||
description: 'Configure specific permissions',
|
||||
items: {
|
||||
@@ -126,6 +149,7 @@ export class ApiKeyFormService {
|
||||
properties: {
|
||||
resources: {
|
||||
type: 'array',
|
||||
i18n: API_KEY_I18N.customPermissions.resources,
|
||||
title: 'Resources',
|
||||
items: {
|
||||
type: 'string',
|
||||
@@ -137,6 +161,7 @@ export class ApiKeyFormService {
|
||||
},
|
||||
actions: {
|
||||
type: 'array',
|
||||
i18n: API_KEY_I18N.customPermissions.actions,
|
||||
title: 'Actions',
|
||||
items: {
|
||||
type: 'string',
|
||||
@@ -167,6 +192,7 @@ export class ApiKeyFormService {
|
||||
controlOptions: {
|
||||
inputType: 'text',
|
||||
},
|
||||
i18nKey: API_KEY_I18N.name,
|
||||
}),
|
||||
createLabeledControl({
|
||||
scope: '#/properties/description',
|
||||
@@ -177,6 +203,7 @@ export class ApiKeyFormService {
|
||||
multi: true,
|
||||
rows: 3,
|
||||
},
|
||||
i18nKey: API_KEY_I18N.description,
|
||||
}),
|
||||
// Permissions section header
|
||||
{
|
||||
@@ -185,6 +212,7 @@ export class ApiKeyFormService {
|
||||
options: {
|
||||
format: 'title',
|
||||
},
|
||||
i18n: API_KEY_I18N.permissions.header,
|
||||
} as LabelElement,
|
||||
{
|
||||
type: 'Label',
|
||||
@@ -192,6 +220,7 @@ export class ApiKeyFormService {
|
||||
options: {
|
||||
format: 'description',
|
||||
},
|
||||
i18n: API_KEY_I18N.permissions.description,
|
||||
} as LabelElement,
|
||||
// Roles selection
|
||||
createLabeledControl({
|
||||
@@ -210,6 +239,7 @@ export class ApiKeyFormService {
|
||||
),
|
||||
descriptions: this.getRoleDescriptions(),
|
||||
},
|
||||
i18nKey: API_KEY_I18N.roles,
|
||||
}),
|
||||
// Separator for permissions
|
||||
{
|
||||
@@ -218,6 +248,7 @@ export class ApiKeyFormService {
|
||||
options: {
|
||||
format: 'subtitle',
|
||||
},
|
||||
i18n: API_KEY_I18N.permissions.subheader,
|
||||
} as LabelElement,
|
||||
{
|
||||
type: 'Label',
|
||||
@@ -225,6 +256,7 @@ export class ApiKeyFormService {
|
||||
options: {
|
||||
format: 'description',
|
||||
},
|
||||
i18n: API_KEY_I18N.permissions.help,
|
||||
} as LabelElement,
|
||||
// Permission preset dropdown
|
||||
createLabeledControl({
|
||||
@@ -242,6 +274,7 @@ export class ApiKeyFormService {
|
||||
network_admin: 'Network Admin (Network & Services Control)',
|
||||
},
|
||||
},
|
||||
i18nKey: API_KEY_I18N.permissionPresets,
|
||||
}),
|
||||
// Custom permissions array - following OIDC pattern exactly
|
||||
{
|
||||
@@ -269,6 +302,7 @@ export class ApiKeyFormService {
|
||||
{}
|
||||
),
|
||||
},
|
||||
i18nKey: API_KEY_I18N.customPermissions.resources,
|
||||
}),
|
||||
createSimpleLabeledControl({
|
||||
scope: '#/properties/actions',
|
||||
@@ -278,6 +312,7 @@ export class ApiKeyFormService {
|
||||
multiple: true,
|
||||
labels: getAuthActionLabels(),
|
||||
},
|
||||
i18nKey: API_KEY_I18N.customPermissions.actions,
|
||||
}),
|
||||
],
|
||||
},
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
|
||||
import { CustomizationMutationsResolver } from '@app/unraid-api/graph/resolvers/customization/customization.mutations.resolver.js';
|
||||
import { CustomizationResolver } from '@app/unraid-api/graph/resolvers/customization/customization.resolver.js';
|
||||
import { CustomizationService } from '@app/unraid-api/graph/resolvers/customization/customization.service.js';
|
||||
|
||||
@Module({
|
||||
providers: [CustomizationService, CustomizationResolver],
|
||||
providers: [CustomizationService, CustomizationResolver, CustomizationMutationsResolver],
|
||||
exports: [CustomizationService],
|
||||
})
|
||||
export class CustomizationModule {}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user