mirror of
https://github.com/unraid/api.git
synced 2026-01-02 14:40:01 -06:00
Compare commits
57 Commits
4.25.1-bui
...
fix/style-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f184711ff7 | ||
|
|
74df938e45 | ||
|
|
51f025b105 | ||
|
|
23a71207dd | ||
|
|
832e9d04f2 | ||
|
|
31af99e52f | ||
|
|
933cefa020 | ||
|
|
375dcd0598 | ||
|
|
64875edbba | ||
|
|
330e81a484 | ||
|
|
b8f0fdf8d2 | ||
|
|
36c104915e | ||
|
|
dc9a036c73 | ||
|
|
c71b0487ad | ||
|
|
e7340431a5 | ||
|
|
e4a9b8291b | ||
|
|
6b6b78fa2e | ||
|
|
e2fdf6cadb | ||
|
|
3d4f193fa4 | ||
|
|
b28ef1ea33 | ||
|
|
ee0f240233 | ||
|
|
3aacaa1fb5 | ||
|
|
0cd4c0ae16 | ||
|
|
66625ded6a | ||
|
|
f8a6785e9c | ||
|
|
d7aca81c60 | ||
|
|
854b403fbd | ||
|
|
c264a1843c | ||
|
|
45cda4af80 | ||
|
|
64eb9ce9b5 | ||
|
|
d56797c59f | ||
|
|
92af3b6115 | ||
|
|
35f8bc2258 | ||
|
|
c4cd0c6352 | ||
|
|
818e7ce997 | ||
|
|
7e13202aa1 | ||
|
|
d18eaf2364 | ||
|
|
42406e795d | ||
|
|
11d2de5d08 | ||
|
|
031c1ab5dc | ||
|
|
34075e44c5 | ||
|
|
ff2906e52a | ||
|
|
a0d6cc92c8 | ||
|
|
57acfaacf0 | ||
|
|
ea816c7a5c | ||
|
|
cafde72d38 | ||
|
|
2b481c397c | ||
|
|
8c4e9dd7ae | ||
|
|
f212dce88b | ||
|
|
8cd2a4c124 | ||
|
|
10f048ee1f | ||
|
|
e9e271ade5 | ||
|
|
31c41027fc | ||
|
|
fabe6a2c4b | ||
|
|
754966d5d3 | ||
|
|
ed594e9147 | ||
|
|
50d83313a1 |
@@ -241,4 +241,3 @@ const pinia = createTestingPinia({
|
||||
- Set initial state for focused testing
|
||||
- Test computed properties by accessing them directly
|
||||
- Verify state changes by updating the store
|
||||
|
||||
|
||||
201
.github/workflows/build-artifacts.yml
vendored
Normal file
201
.github/workflows/build-artifacts.yml
vendored
Normal file
@@ -0,0 +1,201 @@
|
||||
name: Build Artifacts
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ref:
|
||||
type: string
|
||||
required: false
|
||||
description: "Git ref to checkout (commit SHA, branch, or tag)"
|
||||
version_override:
|
||||
type: string
|
||||
required: false
|
||||
description: "Override version (for manual releases)"
|
||||
outputs:
|
||||
build_number:
|
||||
description: "Build number for the artifacts"
|
||||
value: ${{ jobs.build-api.outputs.build_number }}
|
||||
secrets:
|
||||
VITE_ACCOUNT:
|
||||
required: true
|
||||
VITE_CONNECT:
|
||||
required: true
|
||||
VITE_UNRAID_NET:
|
||||
required: true
|
||||
VITE_CALLBACK_KEY:
|
||||
required: true
|
||||
UNRAID_BOT_GITHUB_ADMIN_TOKEN:
|
||||
required: false
|
||||
|
||||
jobs:
|
||||
build-api:
|
||||
name: Build API
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
build_number: ${{ steps.buildnumber.outputs.build_number }}
|
||||
defaults:
|
||||
run:
|
||||
working-directory: api
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
ref: ${{ inputs.ref || github.ref }}
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential
|
||||
version: 1.0
|
||||
|
||||
- name: PNPM Install
|
||||
run: |
|
||||
cd ${{ github.workspace }}
|
||||
pnpm install --frozen-lockfile
|
||||
|
||||
- name: Get Git Short Sha and API version
|
||||
id: vars
|
||||
run: |
|
||||
GIT_SHA=$(git rev-parse --short HEAD)
|
||||
IS_TAGGED=$(git describe --tags --abbrev=0 --exact-match || echo '')
|
||||
PACKAGE_LOCK_VERSION=$(jq -r '.version' package.json)
|
||||
API_VERSION=${{ inputs.version_override && format('"{0}"', inputs.version_override) || '${PACKAGE_LOCK_VERSION}' }}
|
||||
if [ -z "${{ inputs.version_override }}" ] && [ -z "$IS_TAGGED" ]; then
|
||||
API_VERSION="${PACKAGE_LOCK_VERSION}+${GIT_SHA}"
|
||||
fi
|
||||
export API_VERSION
|
||||
echo "API_VERSION=${API_VERSION}" >> $GITHUB_ENV
|
||||
echo "PACKAGE_LOCK_VERSION=${PACKAGE_LOCK_VERSION}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Generate build number
|
||||
id: buildnumber
|
||||
uses: onyxmueller/build-tag-number@v1
|
||||
with:
|
||||
token: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN || github.token }}
|
||||
prefix: ${{ inputs.version_override || steps.vars.outputs.PACKAGE_LOCK_VERSION }}
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
pnpm run build:release
|
||||
tar -czf deploy/unraid-api.tgz -C deploy/pack/ .
|
||||
|
||||
- name: Upload tgz to Github artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: unraid-api
|
||||
path: ${{ github.workspace }}/api/deploy/unraid-api.tgz
|
||||
|
||||
build-unraid-ui-webcomponents:
|
||||
name: Build Unraid UI Library (Webcomponent Version)
|
||||
defaults:
|
||||
run:
|
||||
working-directory: unraid-ui
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
ref: ${{ inputs.ref || github.ref }}
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential
|
||||
version: 1.0
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
cd ${{ github.workspace }}
|
||||
pnpm install --frozen-lockfile --filter @unraid/ui
|
||||
|
||||
- name: Lint
|
||||
run: pnpm run lint
|
||||
|
||||
- name: Build
|
||||
run: pnpm run build:wc
|
||||
|
||||
- name: Upload Artifact to Github
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: unraid-wc-ui
|
||||
path: unraid-ui/dist-wc/
|
||||
|
||||
build-web:
|
||||
name: Build Web App
|
||||
defaults:
|
||||
run:
|
||||
working-directory: web
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
ref: ${{ inputs.ref || github.ref }}
|
||||
|
||||
- name: Create env file
|
||||
run: |
|
||||
touch .env
|
||||
echo VITE_ACCOUNT=${{ secrets.VITE_ACCOUNT }} >> .env
|
||||
echo VITE_CONNECT=${{ secrets.VITE_CONNECT }} >> .env
|
||||
echo VITE_UNRAID_NET=${{ secrets.VITE_UNRAID_NET }} >> .env
|
||||
echo VITE_CALLBACK_KEY=${{ secrets.VITE_CALLBACK_KEY }} >> .env
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: PNPM Install
|
||||
run: |
|
||||
cd ${{ github.workspace }}
|
||||
pnpm install --frozen-lockfile --filter @unraid/web --filter @unraid/ui
|
||||
|
||||
- name: Build Unraid UI
|
||||
run: |
|
||||
cd ${{ github.workspace }}/unraid-ui
|
||||
pnpm run build
|
||||
|
||||
- name: Lint files
|
||||
run: pnpm run lint
|
||||
|
||||
- name: Type Check
|
||||
run: pnpm run type-check
|
||||
|
||||
- name: Build
|
||||
run: pnpm run build
|
||||
|
||||
- name: Upload build to Github artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: unraid-wc-rich
|
||||
path: web/dist
|
||||
|
||||
28
.github/workflows/build-plugin.yml
vendored
28
.github/workflows/build-plugin.yml
vendored
@@ -27,6 +27,15 @@ on:
|
||||
type: string
|
||||
required: true
|
||||
description: "Build number for the plugin builds"
|
||||
ref:
|
||||
type: string
|
||||
required: false
|
||||
description: "Git ref (commit SHA, branch, or tag) to checkout"
|
||||
TRIGGER_PRODUCTION_RELEASE:
|
||||
type: boolean
|
||||
required: false
|
||||
default: false
|
||||
description: "Whether to automatically trigger the release-production workflow (default: false)"
|
||||
secrets:
|
||||
CF_ACCESS_KEY_ID:
|
||||
required: true
|
||||
@@ -49,6 +58,7 @@ jobs:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
ref: ${{ inputs.ref }}
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
@@ -68,7 +78,21 @@ jobs:
|
||||
GIT_SHA=$(git rev-parse --short HEAD)
|
||||
IS_TAGGED=$(git describe --tags --abbrev=0 --exact-match || echo '')
|
||||
PACKAGE_LOCK_VERSION=$(jq -r '.version' package.json)
|
||||
API_VERSION=$([[ -n "$IS_TAGGED" ]] && echo "$PACKAGE_LOCK_VERSION" || echo "${PACKAGE_LOCK_VERSION}+${GIT_SHA}")
|
||||
|
||||
# For release builds, trust the release tag version to avoid stale checkouts
|
||||
if [ "${{ inputs.RELEASE_CREATED }}" = "true" ] && [ -n "${{ inputs.RELEASE_TAG }}" ]; then
|
||||
TAG_VERSION="${{ inputs.RELEASE_TAG }}"
|
||||
TAG_VERSION="${TAG_VERSION#v}" # trim leading v if present
|
||||
|
||||
if [ "$TAG_VERSION" != "$PACKAGE_LOCK_VERSION" ]; then
|
||||
echo "::warning::Release tag version ($TAG_VERSION) does not match package.json version ($PACKAGE_LOCK_VERSION). Using tag version for TXZ naming."
|
||||
fi
|
||||
|
||||
API_VERSION="$TAG_VERSION"
|
||||
else
|
||||
API_VERSION=$([[ -n "$IS_TAGGED" ]] && echo "$PACKAGE_LOCK_VERSION" || echo "${PACKAGE_LOCK_VERSION}+${GIT_SHA}")
|
||||
fi
|
||||
|
||||
echo "API_VERSION=${API_VERSION}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Install dependencies
|
||||
@@ -136,7 +160,7 @@ jobs:
|
||||
done
|
||||
|
||||
- name: Workflow Dispatch and wait
|
||||
if: inputs.RELEASE_CREATED == 'true'
|
||||
if: inputs.RELEASE_CREATED == 'true' && inputs.TRIGGER_PRODUCTION_RELEASE == true
|
||||
uses: the-actions-org/workflow-dispatch@v4.0.0
|
||||
with:
|
||||
workflow: release-production.yml
|
||||
|
||||
103
.github/workflows/claude-code-review.yml
vendored
103
.github/workflows/claude-code-review.yml
vendored
@@ -1,103 +0,0 @@
|
||||
name: Claude Code Review
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize]
|
||||
# Skip reviews for non-code changes
|
||||
paths-ignore:
|
||||
- "**/*.md"
|
||||
- "**/package-lock.json"
|
||||
- "**/pnpm-lock.yaml"
|
||||
- "**/.gitignore"
|
||||
- "**/LICENSE"
|
||||
- "**/*.config.js"
|
||||
- "**/*.config.ts"
|
||||
- "**/tsconfig.json"
|
||||
- "**/.github/workflows/*.yml"
|
||||
- "**/docs/**"
|
||||
|
||||
jobs:
|
||||
claude-review:
|
||||
# Skip review for bot PRs and WIP/skip-review PRs
|
||||
# Only run if changes are significant (>10 lines)
|
||||
if: |
|
||||
(github.event.pull_request.additions > 10 || github.event.pull_request.deletions > 10) &&
|
||||
!contains(github.event.pull_request.title, '[skip-review]') &&
|
||||
!contains(github.event.pull_request.title, '[WIP]') &&
|
||||
!endsWith(github.event.pull_request.user.login, '[bot]') &&
|
||||
github.event.pull_request.user.login != 'dependabot' &&
|
||||
github.event.pull_request.user.login != 'renovate'
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: read
|
||||
issues: read
|
||||
id-token: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Run Claude Code Review
|
||||
id: claude-review
|
||||
uses: anthropics/claude-code-action@beta
|
||||
with:
|
||||
claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
|
||||
|
||||
# Optional: Specify model (defaults to Claude Sonnet 4, uncomment for Claude Opus 4)
|
||||
# model: "claude-opus-4-20250514"
|
||||
|
||||
# Direct prompt for automated review (no @claude mention needed)
|
||||
direct_prompt: |
|
||||
IMPORTANT: Review ONLY the DIFF/CHANGESET - the actual lines that were added or modified in this PR.
|
||||
DO NOT review the entire file context, only analyze the specific changes being made.
|
||||
|
||||
Look for HIGH-PRIORITY issues in the CHANGED LINES ONLY:
|
||||
|
||||
1. CRITICAL BUGS: Logic errors, null pointer issues, infinite loops, race conditions
|
||||
2. SECURITY: SQL injection, XSS, authentication bypass, exposed secrets, unsafe operations
|
||||
3. BREAKING CHANGES: API contract violations, removed exports, changed function signatures
|
||||
4. DATA LOSS RISKS: Destructive operations without safeguards, missing data validation
|
||||
|
||||
DO NOT comment on:
|
||||
- Code that wasn't changed in this PR
|
||||
- Style, formatting, or documentation
|
||||
- Test coverage (unless tests are broken by the changes)
|
||||
- Minor optimizations or best practices
|
||||
- Existing code issues that weren't introduced by this PR
|
||||
|
||||
If you find no critical issues in the DIFF, respond with: "✅ No critical issues found in changes"
|
||||
|
||||
Keep response under 10 lines. Reference specific line numbers from the diff when reporting issues.
|
||||
|
||||
# Optional: Use sticky comments to make Claude reuse the same comment on subsequent pushes to the same PR
|
||||
use_sticky_comment: true
|
||||
|
||||
# Context-aware review based on PR characteristics
|
||||
# Uncomment to enable different review strategies based on context
|
||||
# direct_prompt: |
|
||||
# ${{
|
||||
# (github.event.pull_request.additions > 500) &&
|
||||
# 'Large PR detected. Focus only on architectural issues and breaking changes. Skip minor issues.' ||
|
||||
# contains(github.event.pull_request.title, 'fix') &&
|
||||
# 'Bug fix PR: Verify the fix addresses the root cause and check for regression risks.' ||
|
||||
# contains(github.event.pull_request.title, 'deps') &&
|
||||
# 'Dependency update: Check for breaking changes and security advisories only.' ||
|
||||
# contains(github.event.pull_request.title, 'refactor') &&
|
||||
# 'Refactor PR: Verify no behavior changes and check for performance regressions.' ||
|
||||
# contains(github.event.pull_request.title, 'feat') &&
|
||||
# 'New feature: Check for security issues, edge cases, and integration problems only.' ||
|
||||
# 'Standard review: Check for critical bugs, security issues, and breaking changes only.'
|
||||
# }}
|
||||
|
||||
# Optional: Add specific tools for running tests or linting
|
||||
# allowed_tools: "Bash(npm run test),Bash(npm run lint),Bash(npm run typecheck)"
|
||||
|
||||
# Optional: Skip review for certain conditions
|
||||
# if: |
|
||||
# !contains(github.event.pull_request.title, '[skip-review]') &&
|
||||
# !contains(github.event.pull_request.title, '[WIP]')
|
||||
|
||||
210
.github/workflows/generate-release-notes.yml
vendored
Normal file
210
.github/workflows/generate-release-notes.yml
vendored
Normal file
@@ -0,0 +1,210 @@
|
||||
name: Generate Release Notes
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
version:
|
||||
description: 'Version number (e.g., 4.25.3)'
|
||||
required: true
|
||||
type: string
|
||||
target_commitish:
|
||||
description: 'Commit SHA or branch (leave empty for current HEAD)'
|
||||
required: false
|
||||
type: string
|
||||
release_notes:
|
||||
description: 'Custom release notes (leave empty to auto-generate)'
|
||||
required: false
|
||||
type: string
|
||||
outputs:
|
||||
release_notes:
|
||||
description: 'Generated or provided release notes'
|
||||
value: ${{ jobs.generate.outputs.release_notes }}
|
||||
secrets:
|
||||
UNRAID_BOT_GITHUB_ADMIN_TOKEN:
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
generate:
|
||||
name: Generate Release Notes
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
release_notes: ${{ steps.generate_notes.outputs.release_notes }}
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
ref: ${{ inputs.target_commitish || github.ref }}
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN }}
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
- name: Generate Release Notes
|
||||
id: generate_notes
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
TAG_NAME="v${{ inputs.version }}"
|
||||
VERSION="${{ inputs.version }}"
|
||||
|
||||
if [ -n "${{ inputs.release_notes }}" ]; then
|
||||
NOTES="${{ inputs.release_notes }}"
|
||||
else
|
||||
CHANGELOG_PATH="api/CHANGELOG.md"
|
||||
|
||||
if [ -f "$CHANGELOG_PATH" ]; then
|
||||
echo "Extracting release notes from CHANGELOG.md for version ${VERSION}"
|
||||
|
||||
NOTES=$(awk -v ver="$VERSION" '
|
||||
BEGIN {
|
||||
found=0; capture=0; output="";
|
||||
gsub(/\./, "\\.", ver);
|
||||
}
|
||||
/^## \[/ {
|
||||
if (capture) exit;
|
||||
if ($0 ~ "\\[" ver "\\]") {
|
||||
found=1;
|
||||
capture=1;
|
||||
}
|
||||
}
|
||||
capture {
|
||||
if (output != "") output = output "\n";
|
||||
output = output $0;
|
||||
}
|
||||
END {
|
||||
if (found) print output;
|
||||
else exit 1;
|
||||
}
|
||||
' "$CHANGELOG_PATH") || EXTRACTION_STATUS=$?
|
||||
|
||||
if [ ${EXTRACTION_STATUS:-0} -eq 0 ] && [ -n "$NOTES" ]; then
|
||||
echo "✓ Found release notes in CHANGELOG.md"
|
||||
else
|
||||
echo "⚠ Version ${VERSION} not found in CHANGELOG.md, generating with conventional-changelog"
|
||||
|
||||
PREV_TAG=$(git describe --tags --abbrev=0 HEAD^ 2>/dev/null || echo "")
|
||||
CHANGELOG_GENERATED=false
|
||||
|
||||
if [ -n "$PREV_TAG" ]; then
|
||||
echo "Generating changelog from ${PREV_TAG}..HEAD using conventional-changelog"
|
||||
|
||||
npm install -g conventional-changelog-cli
|
||||
|
||||
TEMP_NOTES=$(mktemp)
|
||||
conventional-changelog -p conventionalcommits \
|
||||
--release-count 1 \
|
||||
--output-unreleased \
|
||||
> "$TEMP_NOTES" 2>/dev/null || true
|
||||
|
||||
if [ -s "$TEMP_NOTES" ]; then
|
||||
NOTES=$(cat "$TEMP_NOTES")
|
||||
|
||||
if [ -n "$NOTES" ]; then
|
||||
echo "✓ Generated changelog with conventional-changelog"
|
||||
CHANGELOG_GENERATED=true
|
||||
|
||||
TEMP_CHANGELOG=$(mktemp)
|
||||
{
|
||||
if [ -f "$CHANGELOG_PATH" ]; then
|
||||
head -n 1 "$CHANGELOG_PATH"
|
||||
echo ""
|
||||
echo "$NOTES"
|
||||
echo ""
|
||||
tail -n +2 "$CHANGELOG_PATH"
|
||||
else
|
||||
echo "# Changelog"
|
||||
echo ""
|
||||
echo "$NOTES"
|
||||
fi
|
||||
} > "$TEMP_CHANGELOG"
|
||||
|
||||
mv "$TEMP_CHANGELOG" "$CHANGELOG_PATH"
|
||||
echo "✓ Updated CHANGELOG.md with generated notes"
|
||||
else
|
||||
echo "⚠ conventional-changelog produced empty output, using GitHub auto-generation"
|
||||
NOTES=$(gh api repos/${{ github.repository }}/releases/generate-notes \
|
||||
-f tag_name="${TAG_NAME}" \
|
||||
-f target_commitish="${{ inputs.target_commitish || github.sha }}" \
|
||||
-f previous_tag_name="${PREV_TAG}" \
|
||||
--jq '.body')
|
||||
fi
|
||||
else
|
||||
echo "⚠ conventional-changelog failed, using GitHub auto-generation"
|
||||
NOTES=$(gh api repos/${{ github.repository }}/releases/generate-notes \
|
||||
-f tag_name="${TAG_NAME}" \
|
||||
-f target_commitish="${{ inputs.target_commitish || github.sha }}" \
|
||||
-f previous_tag_name="${PREV_TAG}" \
|
||||
--jq '.body')
|
||||
fi
|
||||
|
||||
rm -f "$TEMP_NOTES"
|
||||
else
|
||||
echo "⚠ No previous tag found, using GitHub auto-generation"
|
||||
NOTES=$(gh api repos/${{ github.repository }}/releases/generate-notes \
|
||||
-f tag_name="${TAG_NAME}" \
|
||||
-f target_commitish="${{ inputs.target_commitish || github.sha }}" \
|
||||
--jq '.body' || echo "Release ${VERSION}")
|
||||
fi
|
||||
|
||||
if [ "$CHANGELOG_GENERATED" = true ]; then
|
||||
BRANCH_OR_SHA="${{ inputs.target_commitish || github.ref }}"
|
||||
|
||||
if git show-ref --verify --quiet "refs/heads/${BRANCH_OR_SHA}"; then
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "CHANGELOG GENERATED AND COMMITTED"
|
||||
echo "=========================================="
|
||||
echo ""
|
||||
|
||||
git config user.name "github-actions[bot]"
|
||||
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||
|
||||
BEFORE_SHA=$(git rev-parse HEAD)
|
||||
|
||||
git add "$CHANGELOG_PATH"
|
||||
git commit -m "chore: add changelog for version ${VERSION}"
|
||||
git push origin "HEAD:${BRANCH_OR_SHA}"
|
||||
|
||||
AFTER_SHA=$(git rev-parse HEAD)
|
||||
|
||||
echo "✓ Changelog committed and pushed successfully"
|
||||
echo ""
|
||||
echo "Previous SHA: ${BEFORE_SHA}"
|
||||
echo "New SHA: ${AFTER_SHA}"
|
||||
echo ""
|
||||
echo "⚠️ CRITICAL: A new commit was created, but github.sha is immutable."
|
||||
echo "⚠️ github.sha = ${BEFORE_SHA} (original workflow trigger)"
|
||||
echo "⚠️ The release tag must point to ${AFTER_SHA} (with changelog)"
|
||||
echo ""
|
||||
echo "Re-run this workflow to create the release with the correct commit."
|
||||
echo ""
|
||||
exit 1
|
||||
else
|
||||
echo "⚠ Target is a commit SHA, not a branch. Cannot push changelog updates."
|
||||
echo "Changelog was generated but not committed."
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
else
|
||||
echo "⚠ CHANGELOG.md not found, using GitHub auto-generation"
|
||||
PREV_TAG=$(git describe --tags --abbrev=0 HEAD^ 2>/dev/null || echo "")
|
||||
|
||||
if [ -n "$PREV_TAG" ]; then
|
||||
NOTES=$(gh api repos/${{ github.repository }}/releases/generate-notes \
|
||||
-f tag_name="${TAG_NAME}" \
|
||||
-f target_commitish="${{ inputs.target_commitish || github.sha }}" \
|
||||
-f previous_tag_name="${PREV_TAG}" \
|
||||
--jq '.body')
|
||||
else
|
||||
NOTES="Release ${VERSION}"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "release_notes<<EOF" >> $GITHUB_OUTPUT
|
||||
echo "$NOTES" >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
|
||||
194
.github/workflows/main.yml
vendored
194
.github/workflows/main.yml
vendored
@@ -154,170 +154,15 @@ jobs:
|
||||
files: ./coverage/coverage-final.json,../web/coverage/coverage-final.json,../unraid-ui/coverage/coverage-final.json,../packages/unraid-api-plugin-connect/coverage/coverage-final.json,../packages/unraid-shared/coverage/coverage-final.json
|
||||
fail_ci_if_error: false
|
||||
|
||||
build-api:
|
||||
name: Build API
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
build_number: ${{ steps.buildnumber.outputs.build_number }}
|
||||
defaults:
|
||||
run:
|
||||
working-directory: api
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential
|
||||
version: 1.0
|
||||
|
||||
- name: PNPM Install
|
||||
run: |
|
||||
cd ${{ github.workspace }}
|
||||
pnpm install --frozen-lockfile
|
||||
|
||||
- name: Build
|
||||
run: pnpm run build
|
||||
|
||||
- name: Get Git Short Sha and API version
|
||||
id: vars
|
||||
run: |
|
||||
GIT_SHA=$(git rev-parse --short HEAD)
|
||||
IS_TAGGED=$(git describe --tags --abbrev=0 --exact-match || echo '')
|
||||
PACKAGE_LOCK_VERSION=$(jq -r '.version' package.json)
|
||||
API_VERSION=$([[ -n "$IS_TAGGED" ]] && echo "$PACKAGE_LOCK_VERSION" || echo "${PACKAGE_LOCK_VERSION}+${GIT_SHA}")
|
||||
export API_VERSION
|
||||
echo "API_VERSION=${API_VERSION}" >> $GITHUB_ENV
|
||||
echo "PACKAGE_LOCK_VERSION=${PACKAGE_LOCK_VERSION}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Generate build number
|
||||
id: buildnumber
|
||||
uses: onyxmueller/build-tag-number@v1
|
||||
with:
|
||||
token: ${{secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN}}
|
||||
prefix: ${{steps.vars.outputs.PACKAGE_LOCK_VERSION}}
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
pnpm run build:release
|
||||
tar -czf deploy/unraid-api.tgz -C deploy/pack/ .
|
||||
|
||||
- name: Upload tgz to Github artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: unraid-api
|
||||
path: ${{ github.workspace }}/api/deploy/unraid-api.tgz
|
||||
|
||||
build-unraid-ui-webcomponents:
|
||||
name: Build Unraid UI Library (Webcomponent Version)
|
||||
defaults:
|
||||
run:
|
||||
working-directory: unraid-ui
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential
|
||||
version: 1.0
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
cd ${{ github.workspace }}
|
||||
pnpm install --frozen-lockfile --filter @unraid/ui
|
||||
|
||||
- name: Lint
|
||||
run: pnpm run lint
|
||||
|
||||
- name: Build
|
||||
run: pnpm run build:wc
|
||||
|
||||
- name: Upload Artifact to Github
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: unraid-wc-ui
|
||||
path: unraid-ui/dist-wc/
|
||||
|
||||
build-web:
|
||||
# needs: [build-unraid-ui]
|
||||
name: Build Web App
|
||||
defaults:
|
||||
run:
|
||||
working-directory: web
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Create env file
|
||||
run: |
|
||||
touch .env
|
||||
echo VITE_ACCOUNT=${{ secrets.VITE_ACCOUNT }} >> .env
|
||||
echo VITE_CONNECT=${{ secrets.VITE_CONNECT }} >> .env
|
||||
echo VITE_UNRAID_NET=${{ secrets.VITE_UNRAID_NET }} >> .env
|
||||
echo VITE_CALLBACK_KEY=${{ secrets.VITE_CALLBACK_KEY }} >> .env
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: PNPM Install
|
||||
run: |
|
||||
cd ${{ github.workspace }}
|
||||
pnpm install --frozen-lockfile --filter @unraid/web --filter @unraid/ui
|
||||
|
||||
- name: Build Unraid UI
|
||||
run: |
|
||||
cd ${{ github.workspace }}/unraid-ui
|
||||
pnpm run build
|
||||
|
||||
- name: Lint files
|
||||
run: pnpm run lint
|
||||
|
||||
- name: Type Check
|
||||
run: pnpm run type-check
|
||||
|
||||
- name: Build
|
||||
run: pnpm run build
|
||||
|
||||
- name: Upload build to Github artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: unraid-wc-rich
|
||||
path: web/dist
|
||||
build-artifacts:
|
||||
name: Build All Artifacts
|
||||
uses: ./.github/workflows/build-artifacts.yml
|
||||
secrets:
|
||||
VITE_ACCOUNT: ${{ secrets.VITE_ACCOUNT }}
|
||||
VITE_CONNECT: ${{ secrets.VITE_CONNECT }}
|
||||
VITE_UNRAID_NET: ${{ secrets.VITE_UNRAID_NET }}
|
||||
VITE_CALLBACK_KEY: ${{ secrets.VITE_CALLBACK_KEY }}
|
||||
UNRAID_BOT_GITHUB_ADMIN_TOKEN: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN }}
|
||||
|
||||
release-please:
|
||||
name: Release Please
|
||||
@@ -326,15 +171,15 @@ jobs:
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
||||
needs:
|
||||
- test-api
|
||||
- build-api
|
||||
- build-web
|
||||
- build-unraid-ui-webcomponents
|
||||
- build-artifacts
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- id: release
|
||||
uses: googleapis/release-please-action@v4
|
||||
@@ -345,17 +190,15 @@ jobs:
|
||||
build-plugin-staging-pr:
|
||||
name: Build and Deploy Plugin
|
||||
needs:
|
||||
- build-api
|
||||
- build-web
|
||||
- build-unraid-ui-webcomponents
|
||||
- build-artifacts
|
||||
- test-api
|
||||
uses: ./.github/workflows/build-plugin.yml
|
||||
with:
|
||||
RELEASE_CREATED: false
|
||||
RELEASE_CREATED: 'false'
|
||||
TAG: ${{ github.event.pull_request.number && format('PR{0}', github.event.pull_request.number) || '' }}
|
||||
BUCKET_PATH: ${{ github.event.pull_request.number && format('unraid-api/tag/PR{0}', github.event.pull_request.number) || 'unraid-api' }}
|
||||
BASE_URL: "https://preview.dl.unraid.net/unraid-api"
|
||||
BUILD_NUMBER: ${{ needs.build-api.outputs.build_number }}
|
||||
BUILD_NUMBER: ${{ needs.build-artifacts.outputs.build_number }}
|
||||
secrets:
|
||||
CF_ACCESS_KEY_ID: ${{ secrets.CF_ACCESS_KEY_ID }}
|
||||
CF_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
@@ -367,15 +210,16 @@ jobs:
|
||||
name: Build and Deploy Production Plugin
|
||||
needs:
|
||||
- release-please
|
||||
- build-api
|
||||
- build-artifacts
|
||||
uses: ./.github/workflows/build-plugin.yml
|
||||
with:
|
||||
RELEASE_CREATED: true
|
||||
RELEASE_CREATED: 'true'
|
||||
RELEASE_TAG: ${{ needs.release-please.outputs.tag_name }}
|
||||
TAG: ""
|
||||
BUCKET_PATH: unraid-api
|
||||
BASE_URL: "https://stable.dl.unraid.net/unraid-api"
|
||||
BUILD_NUMBER: ${{ needs.build-api.outputs.build_number }}
|
||||
BUILD_NUMBER: ${{ needs.build-artifacts.outputs.build_number }}
|
||||
TRIGGER_PRODUCTION_RELEASE: true
|
||||
secrets:
|
||||
CF_ACCESS_KEY_ID: ${{ secrets.CF_ACCESS_KEY_ID }}
|
||||
CF_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
|
||||
239
.github/workflows/manual-release.yml
vendored
Normal file
239
.github/workflows/manual-release.yml
vendored
Normal file
@@ -0,0 +1,239 @@
|
||||
name: Manual Release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: 'Version to release (e.g., 4.25.3)'
|
||||
required: true
|
||||
type: string
|
||||
target_commitish:
|
||||
description: 'Commit SHA or branch (leave empty for current HEAD)'
|
||||
required: false
|
||||
type: string
|
||||
release_notes:
|
||||
description: 'Release notes/changelog (leave empty to auto-generate from commits)'
|
||||
required: false
|
||||
type: string
|
||||
prerelease:
|
||||
description: 'Mark as prerelease'
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
validate-version:
|
||||
name: Validate and Update Package Versions
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
ref: ${{ inputs.target_commitish || github.ref }}
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN }}
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
- name: Check and Update Package Versions
|
||||
run: |
|
||||
EXPECTED_VERSION="${{ inputs.version }}"
|
||||
MISMATCHES_FOUND=false
|
||||
|
||||
PACKAGE_JSONS=(
|
||||
"package.json"
|
||||
"api/package.json"
|
||||
"web/package.json"
|
||||
"unraid-ui/package.json"
|
||||
"plugin/package.json"
|
||||
"packages/unraid-shared/package.json"
|
||||
"packages/unraid-api-plugin-health/package.json"
|
||||
"packages/unraid-api-plugin-generator/package.json"
|
||||
"packages/unraid-api-plugin-connect/package.json"
|
||||
)
|
||||
|
||||
echo "Checking package.json versions against expected version: ${EXPECTED_VERSION}"
|
||||
|
||||
for pkg in "${PACKAGE_JSONS[@]}"; do
|
||||
if [ -f "$pkg" ]; then
|
||||
CURRENT_VERSION=$(node -p "require('./$pkg').version")
|
||||
if [ "$CURRENT_VERSION" != "$EXPECTED_VERSION" ]; then
|
||||
echo "❌ Version mismatch in $pkg: $CURRENT_VERSION != $EXPECTED_VERSION"
|
||||
MISMATCHES_FOUND=true
|
||||
|
||||
# Detect indentation by checking the first property line
|
||||
INDENT_SPACES=$(head -10 "$pkg" | grep '^ *"' | head -1 | sed 's/".*//g' | wc -c)
|
||||
INDENT_SPACES=$((INDENT_SPACES - 1))
|
||||
|
||||
jq --indent "$INDENT_SPACES" --arg version "$EXPECTED_VERSION" '.version = $version' "$pkg" > "$pkg.tmp" && mv "$pkg.tmp" "$pkg"
|
||||
echo "✓ Updated $pkg to version $EXPECTED_VERSION"
|
||||
else
|
||||
echo "✓ $pkg version matches: $CURRENT_VERSION"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
if [ "$MISMATCHES_FOUND" = true ]; then
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "Version mismatches found!"
|
||||
echo "=========================================="
|
||||
echo ""
|
||||
|
||||
BRANCH_OR_SHA="${{ inputs.target_commitish || github.ref }}"
|
||||
|
||||
if git show-ref --verify --quiet "refs/heads/${BRANCH_OR_SHA}"; then
|
||||
echo "Creating commit with version updates and pushing to branch: ${BRANCH_OR_SHA}"
|
||||
|
||||
git config user.name "github-actions[bot]"
|
||||
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||
|
||||
BEFORE_SHA=$(git rev-parse HEAD)
|
||||
|
||||
git add ${PACKAGE_JSONS[@]}
|
||||
git commit -m "chore: update package versions to ${{ inputs.version }}"
|
||||
git push origin "HEAD:${BRANCH_OR_SHA}"
|
||||
|
||||
AFTER_SHA=$(git rev-parse HEAD)
|
||||
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "WORKFLOW MUST BE RE-RUN"
|
||||
echo "=========================================="
|
||||
echo ""
|
||||
echo "✓ Version updates committed and pushed successfully"
|
||||
echo ""
|
||||
echo "Previous SHA: ${BEFORE_SHA}"
|
||||
echo "New SHA: ${AFTER_SHA}"
|
||||
echo ""
|
||||
echo "⚠️ CRITICAL: A new commit was created, but github.sha is immutable."
|
||||
echo "⚠️ github.sha = ${BEFORE_SHA} (original workflow trigger)"
|
||||
echo "⚠️ The release tag must point to ${AFTER_SHA} (with version updates)"
|
||||
echo ""
|
||||
echo "Re-run this workflow to create the release with the correct commit."
|
||||
echo ""
|
||||
exit 1
|
||||
else
|
||||
echo "Target is a commit SHA, not a branch. Cannot push version updates."
|
||||
echo "Please update the package.json versions manually and re-run the workflow."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "✓ All package.json versions match the expected version: ${EXPECTED_VERSION}"
|
||||
|
||||
build-artifacts:
|
||||
name: Build All Artifacts
|
||||
needs:
|
||||
- validate-version
|
||||
uses: ./.github/workflows/build-artifacts.yml
|
||||
with:
|
||||
ref: ${{ inputs.target_commitish || github.ref }}
|
||||
version_override: ${{ inputs.version }}
|
||||
secrets:
|
||||
VITE_ACCOUNT: ${{ secrets.VITE_ACCOUNT }}
|
||||
VITE_CONNECT: ${{ secrets.VITE_CONNECT }}
|
||||
VITE_UNRAID_NET: ${{ secrets.VITE_UNRAID_NET }}
|
||||
VITE_CALLBACK_KEY: ${{ secrets.VITE_CALLBACK_KEY }}
|
||||
UNRAID_BOT_GITHUB_ADMIN_TOKEN: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN }}
|
||||
|
||||
generate-release-notes:
|
||||
name: Generate Release Notes
|
||||
needs:
|
||||
- build-artifacts
|
||||
uses: ./.github/workflows/generate-release-notes.yml
|
||||
with:
|
||||
version: ${{ inputs.version }}
|
||||
target_commitish: ${{ inputs.target_commitish || github.ref }}
|
||||
release_notes: ${{ inputs.release_notes }}
|
||||
secrets:
|
||||
UNRAID_BOT_GITHUB_ADMIN_TOKEN: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN }}
|
||||
|
||||
create-release:
|
||||
name: Create GitHub Release (Draft)
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- generate-release-notes
|
||||
outputs:
|
||||
tag_name: ${{ steps.create_release.outputs.tag_name }}
|
||||
release_notes: ${{ needs.generate-release-notes.outputs.release_notes }}
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
ref: ${{ inputs.target_commitish || github.ref }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Create or Update Release as Draft
|
||||
id: create_release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
TAG_NAME="v${{ inputs.version }}"
|
||||
TARGET="${{ inputs.target_commitish || github.sha }}"
|
||||
|
||||
echo "tag_name=${TAG_NAME}" >> $GITHUB_OUTPUT
|
||||
|
||||
if gh release view "${TAG_NAME}" > /dev/null 2>&1; then
|
||||
echo "Release ${TAG_NAME} already exists, updating as draft..."
|
||||
gh release edit "${TAG_NAME}" \
|
||||
--draft \
|
||||
--notes "${{ needs.generate-release-notes.outputs.release_notes }}" \
|
||||
${{ inputs.prerelease && '--prerelease' || '' }}
|
||||
else
|
||||
echo "Creating new draft release ${TAG_NAME}..."
|
||||
git tag "${TAG_NAME}" "${TARGET}" || true
|
||||
git push origin "${TAG_NAME}" || true
|
||||
|
||||
gh release create "${TAG_NAME}" \
|
||||
--draft \
|
||||
--title "${{ inputs.version }}" \
|
||||
--notes "${{ needs.generate-release-notes.outputs.release_notes }}" \
|
||||
--target "${TARGET}" \
|
||||
${{ inputs.prerelease && '--prerelease' || '' }}
|
||||
fi
|
||||
|
||||
build-plugin-production:
|
||||
name: Build and Deploy Production Plugin
|
||||
needs:
|
||||
- create-release
|
||||
- build-artifacts
|
||||
uses: ./.github/workflows/build-plugin.yml
|
||||
with:
|
||||
RELEASE_CREATED: 'true'
|
||||
RELEASE_TAG: ${{ needs.create-release.outputs.tag_name }}
|
||||
TAG: ""
|
||||
BUCKET_PATH: unraid-api
|
||||
BASE_URL: "https://stable.dl.unraid.net/unraid-api"
|
||||
BUILD_NUMBER: ${{ needs.build-artifacts.outputs.build_number }}
|
||||
ref: ${{ inputs.target_commitish || github.ref }}
|
||||
secrets:
|
||||
CF_ACCESS_KEY_ID: ${{ secrets.CF_ACCESS_KEY_ID }}
|
||||
CF_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
CF_BUCKET_PREVIEW: ${{ secrets.CF_BUCKET_PREVIEW }}
|
||||
CF_ENDPOINT: ${{ secrets.CF_ENDPOINT }}
|
||||
UNRAID_BOT_GITHUB_ADMIN_TOKEN: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN }}
|
||||
|
||||
publish-release:
|
||||
name: Publish Release
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- create-release
|
||||
- build-plugin-production
|
||||
steps:
|
||||
- name: Publish Release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
TAG_NAME="${{ needs.create-release.outputs.tag_name }}"
|
||||
echo "Publishing release ${TAG_NAME}..."
|
||||
gh release edit "${TAG_NAME}" --draft=false --repo ${{ github.repository }}
|
||||
|
||||
30
.github/workflows/publish-schema.yml
vendored
Normal file
30
.github/workflows/publish-schema.yml
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
name: Publish GraphQL Schema
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- 'api/generated-schema.graphql'
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
publish-schema:
|
||||
name: Publish Schema to Apollo Studio
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Install Apollo Rover CLI
|
||||
run: |
|
||||
curl -sSL https://rover.apollo.dev/nix/latest | sh
|
||||
echo "$HOME/.rover/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Publish schema to Apollo Studio
|
||||
env:
|
||||
APOLLO_KEY: ${{ secrets.APOLLO_KEY }}
|
||||
run: |
|
||||
rover graph publish Unraid-API@current \
|
||||
--schema api/generated-schema.graphql
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -123,3 +123,6 @@ api/dev/Unraid.net/myservers.cfg
|
||||
# local Mise settings
|
||||
.mise.toml
|
||||
|
||||
# Compiled test pages (generated from Nunjucks templates)
|
||||
web/public/test-pages/*.html
|
||||
|
||||
|
||||
@@ -1 +1 @@
|
||||
{".":"4.25.1"}
|
||||
{".":"4.27.2"}
|
||||
|
||||
@@ -5,13 +5,7 @@
|
||||
*/
|
||||
|
||||
/* Default/White Theme */
|
||||
:root,
|
||||
.theme-white {
|
||||
--header-text-primary: #ffffff;
|
||||
--header-text-secondary: #999999;
|
||||
--header-background-color: #1c1b1b;
|
||||
--header-gradient-start: rgba(28, 27, 27, 0);
|
||||
--header-gradient-end: rgba(28, 27, 27, 0.7);
|
||||
.Theme--white {
|
||||
--color-border: #383735;
|
||||
--color-alpha: #ff8c2f;
|
||||
--color-beta: #1c1b1b;
|
||||
@@ -20,13 +14,8 @@
|
||||
}
|
||||
|
||||
/* Black Theme */
|
||||
.theme-black,
|
||||
.theme-black.dark {
|
||||
--header-text-primary: #1c1b1b;
|
||||
--header-text-secondary: #999999;
|
||||
--header-background-color: #f2f2f2;
|
||||
--header-gradient-start: rgba(242, 242, 242, 0);
|
||||
--header-gradient-end: rgba(242, 242, 242, 0.7);
|
||||
.Theme--black,
|
||||
.Theme--black.dark {
|
||||
--color-border: #e0e0e0;
|
||||
--color-alpha: #ff8c2f;
|
||||
--color-beta: #f2f2f2;
|
||||
@@ -35,12 +24,7 @@
|
||||
}
|
||||
|
||||
/* Gray Theme */
|
||||
.theme-gray {
|
||||
--header-text-primary: #ffffff;
|
||||
--header-text-secondary: #999999;
|
||||
--header-background-color: #1c1b1b;
|
||||
--header-gradient-start: rgba(28, 27, 27, 0);
|
||||
--header-gradient-end: rgba(28, 27, 27, 0.7);
|
||||
.Theme--gray {
|
||||
--color-border: #383735;
|
||||
--color-alpha: #ff8c2f;
|
||||
--color-beta: #383735;
|
||||
@@ -49,12 +33,7 @@
|
||||
}
|
||||
|
||||
/* Azure Theme */
|
||||
.theme-azure {
|
||||
--header-text-primary: #1c1b1b;
|
||||
--header-text-secondary: #999999;
|
||||
--header-background-color: #f2f2f2;
|
||||
--header-gradient-start: rgba(242, 242, 242, 0);
|
||||
--header-gradient-end: rgba(242, 242, 242, 0.7);
|
||||
.Theme--azure {
|
||||
--color-border: #5a8bb8;
|
||||
--color-alpha: #ff8c2f;
|
||||
--color-beta: #e7f2f8;
|
||||
@@ -66,27 +45,3 @@
|
||||
.dark {
|
||||
--color-border: #383735;
|
||||
}
|
||||
|
||||
/*
|
||||
* Dynamic color variables for user overrides from GraphQL
|
||||
* These are set via JavaScript and override the theme defaults
|
||||
* Using :root with class for higher specificity to override theme classes
|
||||
*/
|
||||
:root.has-custom-header-text {
|
||||
--header-text-primary: var(--custom-header-text-primary);
|
||||
--color-header-text-primary: var(--custom-header-text-primary);
|
||||
}
|
||||
|
||||
:root.has-custom-header-meta {
|
||||
--header-text-secondary: var(--custom-header-text-secondary);
|
||||
--color-header-text-secondary: var(--custom-header-text-secondary);
|
||||
}
|
||||
|
||||
:root.has-custom-header-bg {
|
||||
--header-background-color: var(--custom-header-background-color);
|
||||
--color-header-background: var(--custom-header-background-color);
|
||||
--header-gradient-start: var(--custom-header-gradient-start);
|
||||
--header-gradient-end: var(--custom-header-gradient-end);
|
||||
--color-header-gradient-start: var(--custom-header-gradient-start);
|
||||
--color-header-gradient-end: var(--custom-header-gradient-end);
|
||||
}
|
||||
@@ -32,3 +32,4 @@ CHOKIDAR_USEPOLLING=true
|
||||
LOG_TRANSPORT=console
|
||||
LOG_LEVEL=trace
|
||||
ENABLE_NEXT_DOCKER_RELEASE=true
|
||||
SKIP_CONNECT_PLUGIN_CHECK=true
|
||||
|
||||
@@ -42,7 +42,10 @@ export default tseslint.config(
|
||||
'ignorePackages',
|
||||
{
|
||||
js: 'always',
|
||||
ts: 'always',
|
||||
mjs: 'always',
|
||||
cjs: 'always',
|
||||
ts: 'never',
|
||||
tsx: 'never',
|
||||
},
|
||||
],
|
||||
'no-restricted-globals': [
|
||||
|
||||
@@ -1,5 +1,81 @@
|
||||
# Changelog
|
||||
|
||||
## [4.27.2](https://github.com/unraid/api/compare/v4.27.1...v4.27.2) (2025-11-21)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* issue with header flashing + issue with trial date ([64875ed](https://github.com/unraid/api/commit/64875edbba786a0d1ba0113c9e9a3d38594eafcc))
|
||||
|
||||
## [4.27.1](https://github.com/unraid/api/compare/v4.27.0...v4.27.1) (2025-11-21)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* missing translations for expiring trials ([#1800](https://github.com/unraid/api/issues/1800)) ([36c1049](https://github.com/unraid/api/commit/36c104915ece203a3cac9e1a13e0c325e536a839))
|
||||
* resolve header flash when background color is set ([#1796](https://github.com/unraid/api/issues/1796)) ([dc9a036](https://github.com/unraid/api/commit/dc9a036c73d8ba110029364e0d044dc24c7d0dfa))
|
||||
|
||||
## [4.27.0](https://github.com/unraid/api/compare/v4.26.2...v4.27.0) (2025-11-19)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* remove Unraid API log download functionality ([#1793](https://github.com/unraid/api/issues/1793)) ([e4a9b82](https://github.com/unraid/api/commit/e4a9b8291b049752a9ff59b17ff50cf464fe0535))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* auto-uninstallation of connect api plugin ([#1791](https://github.com/unraid/api/issues/1791)) ([e734043](https://github.com/unraid/api/commit/e7340431a58821ec1b4f5d1b452fba6613b01fa5))
|
||||
|
||||
## [4.26.2](https://github.com/unraid/api/compare/v4.26.1...v4.26.2) (2025-11-19)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **theme:** Missing header background color ([e2fdf6c](https://github.com/unraid/api/commit/e2fdf6cadbd816559b8c82546c2bc771a81ffa9e))
|
||||
|
||||
## [4.26.1](https://github.com/unraid/api/compare/v4.26.0...v4.26.1) (2025-11-18)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **theme:** update theme class naming and scoping logic ([b28ef1e](https://github.com/unraid/api/commit/b28ef1ea334cb4842f01fa992effa7024185c6c9))
|
||||
|
||||
## [4.26.0](https://github.com/unraid/api/compare/v4.25.3...v4.26.0) (2025-11-17)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add cpu power query & subscription ([#1745](https://github.com/unraid/api/issues/1745)) ([d7aca81](https://github.com/unraid/api/commit/d7aca81c60281bfa47fb9113929c1ead6ed3361b))
|
||||
* add schema publishing to apollo studio ([#1772](https://github.com/unraid/api/issues/1772)) ([7e13202](https://github.com/unraid/api/commit/7e13202aa1c02803095bb72bb1bcb2472716f53a))
|
||||
* add workflow_dispatch trigger to schema publishing workflow ([818e7ce](https://github.com/unraid/api/commit/818e7ce997059663e07efcf1dab706bf0d7fc9da))
|
||||
* apollo studio readme link ([c4cd0c6](https://github.com/unraid/api/commit/c4cd0c63520deec15d735255f38811f0360fe3a1))
|
||||
* **cli:** make `unraid-api plugins remove` scriptable ([#1774](https://github.com/unraid/api/issues/1774)) ([64eb9ce](https://github.com/unraid/api/commit/64eb9ce9b5d1ff4fb1f08d9963522c5d32221ba7))
|
||||
* use persisted theme css to fix flashes on header ([#1784](https://github.com/unraid/api/issues/1784)) ([854b403](https://github.com/unraid/api/commit/854b403fbd85220a3012af58ce033cf0b8418516))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **api:** decode html entities before parsing notifications ([#1768](https://github.com/unraid/api/issues/1768)) ([42406e7](https://github.com/unraid/api/commit/42406e795da1e5b95622951a467722dde72d51a8))
|
||||
* **connect:** disable api plugin if unraid plugin is absent ([#1773](https://github.com/unraid/api/issues/1773)) ([c264a18](https://github.com/unraid/api/commit/c264a1843cf115e8cc1add1ab4f12fdcc932405a))
|
||||
* detection of flash backup activation state ([#1769](https://github.com/unraid/api/issues/1769)) ([d18eaf2](https://github.com/unraid/api/commit/d18eaf2364e0c04992c52af38679ff0a0c570440))
|
||||
* re-add missing header gradient styles ([#1787](https://github.com/unraid/api/issues/1787)) ([f8a6785](https://github.com/unraid/api/commit/f8a6785e9c92f81acaef76ac5eb78a4a769e69da))
|
||||
* respect OS safe mode in plugin loader ([#1775](https://github.com/unraid/api/issues/1775)) ([92af3b6](https://github.com/unraid/api/commit/92af3b61156cabae70368cf5222a2f7ac5b4d083))
|
||||
|
||||
## [4.25.3](https://github.com/unraid/unraid-api/compare/v4.25.2...v4.25.3) (2025-10-22)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* flaky watch on boot drive's dynamix config ([ec7aa06](https://github.com/unraid/unraid-api/commit/ec7aa06d4a5fb1f0e84420266b0b0d7ee09a3663))
|
||||
|
||||
## [4.25.2](https://github.com/unraid/api/compare/v4.25.1...v4.25.2) (2025-09-30)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* enhance activation code modal visibility logic ([#1733](https://github.com/unraid/api/issues/1733)) ([e57ec00](https://github.com/unraid/api/commit/e57ec00627e54ce76d903fd0fa8686ad02b393f3))
|
||||
|
||||
## [4.25.1](https://github.com/unraid/api/compare/v4.25.0...v4.25.1) (2025-09-30)
|
||||
|
||||
|
||||
|
||||
@@ -71,6 +71,10 @@ unraid-api report -vv
|
||||
|
||||
If you found this file you're likely a developer. If you'd like to know more about the API and when it's available please join [our discord](https://discord.unraid.net/).
|
||||
|
||||
## Internationalization
|
||||
|
||||
- Run `pnpm --filter @unraid/api i18n:extract` to scan the Nest.js source for translation helper usages and update `src/i18n/en.json` with any new keys. The extractor keeps existing translations intact and appends new keys with their English source text.
|
||||
|
||||
## License
|
||||
|
||||
Copyright Lime Technology Inc. All rights reserved.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"version": "4.22.2",
|
||||
"version": "4.27.2",
|
||||
"extraOrigins": [],
|
||||
"sandbox": true,
|
||||
"ssoSubIds": [],
|
||||
|
||||
@@ -0,0 +1,6 @@
|
||||
timestamp=1730937600
|
||||
event=Hashtag Test
|
||||
subject=Warning [UNRAID] - #1 OS is cooking
|
||||
description=Disk 1 temperature has reached #epic # levels of proportion
|
||||
importance=warning
|
||||
|
||||
@@ -0,0 +1,6 @@
|
||||
timestamp=1730937600
|
||||
event=Temperature Test
|
||||
subject=Warning [UNRAID] - High disk temperature detected: 45 °C
|
||||
description=Disk 1 temperature has reached 45 °C (threshold: 40 °C)<br><br>Current temperatures:<br>Parity - 32 °C [OK]<br>Disk 1 - 45 °C [WARNING]<br>Disk 2 - 38 °C [OK]<br>Cache - 28 °C [OK]<br><br>Please check cooling system.
|
||||
importance=warning
|
||||
|
||||
@@ -1391,6 +1391,19 @@ type CpuLoad {
|
||||
percentSteal: Float!
|
||||
}
|
||||
|
||||
type CpuPackages implements Node {
|
||||
id: PrefixedID!
|
||||
|
||||
"""Total CPU package power draw (W)"""
|
||||
totalPower: Float!
|
||||
|
||||
"""Power draw per package (W)"""
|
||||
power: [Float!]!
|
||||
|
||||
"""Temperature per package (°C)"""
|
||||
temp: [Float!]!
|
||||
}
|
||||
|
||||
type CpuUtilization implements Node {
|
||||
id: PrefixedID!
|
||||
|
||||
@@ -1454,6 +1467,12 @@ type InfoCpu implements Node {
|
||||
|
||||
"""CPU feature flags"""
|
||||
flags: [String!]
|
||||
|
||||
"""
|
||||
Per-package array of core/thread pairs, e.g. [[[0,1],[2,3]], [[4,5],[6,7]]]
|
||||
"""
|
||||
topology: [[[Int!]!]!]!
|
||||
packages: CpuPackages!
|
||||
}
|
||||
|
||||
type MemoryLayout implements Node {
|
||||
@@ -2642,6 +2661,7 @@ type Subscription {
|
||||
arraySubscription: UnraidArray!
|
||||
logFile(path: String!): LogFileContent!
|
||||
systemMetricsCpu: CpuUtilization!
|
||||
systemMetricsCpuTelemetry: CpuPackages!
|
||||
systemMetricsMemory: MemoryUtilization!
|
||||
upsUpdates: UPSDevice!
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@unraid/api",
|
||||
"version": "4.25.1",
|
||||
"version": "4.27.2",
|
||||
"main": "src/cli/index.ts",
|
||||
"type": "module",
|
||||
"corepack": {
|
||||
@@ -30,6 +30,8 @@
|
||||
"// GraphQL Codegen": "",
|
||||
"codegen": "graphql-codegen --config codegen.ts",
|
||||
"codegen:watch": "graphql-codegen --config codegen.ts --watch",
|
||||
"// Internationalization": "",
|
||||
"i18n:extract": "node ./scripts/extract-translations.mjs",
|
||||
"// Code Quality": "",
|
||||
"lint": "eslint --config .eslintrc.ts src/",
|
||||
"lint:fix": "eslint --fix --config .eslintrc.ts src/",
|
||||
@@ -114,6 +116,7 @@
|
||||
"graphql-subscriptions": "3.0.0",
|
||||
"graphql-tag": "2.12.6",
|
||||
"graphql-ws": "6.0.6",
|
||||
"html-entities": "^2.6.0",
|
||||
"ini": "5.0.0",
|
||||
"ip": "2.0.1",
|
||||
"jose": "6.0.13",
|
||||
|
||||
162
api/scripts/extract-translations.mjs
Normal file
162
api/scripts/extract-translations.mjs
Normal file
@@ -0,0 +1,162 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
import { readFile, writeFile } from 'node:fs/promises';
|
||||
import path from 'node:path';
|
||||
import { glob } from 'glob';
|
||||
import ts from 'typescript';
|
||||
|
||||
const projectRoot = process.cwd();
|
||||
const sourcePatterns = 'src/**/*.{ts,js}';
|
||||
const ignorePatterns = [
|
||||
'**/__tests__/**',
|
||||
'**/__test__/**',
|
||||
'**/*.spec.ts',
|
||||
'**/*.spec.js',
|
||||
'**/*.test.ts',
|
||||
'**/*.test.js',
|
||||
];
|
||||
|
||||
const englishLocaleFile = path.resolve(projectRoot, 'src/i18n/en.json');
|
||||
|
||||
const identifierTargets = new Set(['t', 'translate']);
|
||||
const propertyTargets = new Set([
|
||||
'i18n.t',
|
||||
'i18n.translate',
|
||||
'ctx.t',
|
||||
'this.translate',
|
||||
'this.i18n.translate',
|
||||
'this.i18n.t',
|
||||
]);
|
||||
|
||||
function getPropertyChain(node) {
|
||||
if (ts.isIdentifier(node)) {
|
||||
return node.text;
|
||||
}
|
||||
if (ts.isPropertyAccessExpression(node)) {
|
||||
const left = getPropertyChain(node.expression);
|
||||
if (!left) return undefined;
|
||||
return `${left}.${node.name.text}`;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function extractLiteral(node) {
|
||||
if (ts.isStringLiteralLike(node)) {
|
||||
return node.text;
|
||||
}
|
||||
if (ts.isNoSubstitutionTemplateLiteral(node)) {
|
||||
return node.text;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function collectKeysFromSource(sourceFile) {
|
||||
const keys = new Set();
|
||||
|
||||
function visit(node) {
|
||||
if (ts.isCallExpression(node)) {
|
||||
const expr = node.expression;
|
||||
let matches = false;
|
||||
|
||||
if (ts.isIdentifier(expr) && identifierTargets.has(expr.text)) {
|
||||
matches = true;
|
||||
} else if (ts.isPropertyAccessExpression(expr)) {
|
||||
const chain = getPropertyChain(expr);
|
||||
if (chain && propertyTargets.has(chain)) {
|
||||
matches = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (matches) {
|
||||
const [firstArg] = node.arguments;
|
||||
if (firstArg) {
|
||||
const literal = extractLiteral(firstArg);
|
||||
if (literal) {
|
||||
keys.add(literal);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ts.forEachChild(node, visit);
|
||||
}
|
||||
|
||||
visit(sourceFile);
|
||||
return keys;
|
||||
}
|
||||
|
||||
async function loadEnglishCatalog() {
|
||||
try {
|
||||
const raw = await readFile(englishLocaleFile, 'utf8');
|
||||
const parsed = raw.trim() ? JSON.parse(raw) : {};
|
||||
if (typeof parsed !== 'object' || Array.isArray(parsed)) {
|
||||
throw new Error('English locale file must contain a JSON object.');
|
||||
}
|
||||
return parsed;
|
||||
} catch (error) {
|
||||
if (error && error.code === 'ENOENT') {
|
||||
return {};
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async function ensureEnglishCatalog(keys) {
|
||||
const existingCatalog = await loadEnglishCatalog();
|
||||
const existingKeys = new Set(Object.keys(existingCatalog));
|
||||
|
||||
let added = 0;
|
||||
const combinedKeys = new Set([...existingKeys, ...keys]);
|
||||
const sortedKeys = Array.from(combinedKeys).sort((a, b) => a.localeCompare(b));
|
||||
const nextCatalog = {};
|
||||
|
||||
for (const key of sortedKeys) {
|
||||
if (Object.prototype.hasOwnProperty.call(existingCatalog, key)) {
|
||||
nextCatalog[key] = existingCatalog[key];
|
||||
} else {
|
||||
nextCatalog[key] = key;
|
||||
added += 1;
|
||||
}
|
||||
}
|
||||
|
||||
const nextJson = `${JSON.stringify(nextCatalog, null, 2)}\n`;
|
||||
const existingJson = JSON.stringify(existingCatalog, null, 2) + '\n';
|
||||
|
||||
if (nextJson !== existingJson) {
|
||||
await writeFile(englishLocaleFile, nextJson, 'utf8');
|
||||
}
|
||||
|
||||
return added;
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const files = await glob(sourcePatterns, {
|
||||
cwd: projectRoot,
|
||||
ignore: ignorePatterns,
|
||||
absolute: true,
|
||||
});
|
||||
|
||||
const collectedKeys = new Set();
|
||||
|
||||
await Promise.all(
|
||||
files.map(async (file) => {
|
||||
const content = await readFile(file, 'utf8');
|
||||
const sourceFile = ts.createSourceFile(file, content, ts.ScriptTarget.Latest, true);
|
||||
const keys = collectKeysFromSource(sourceFile);
|
||||
keys.forEach((key) => collectedKeys.add(key));
|
||||
}),
|
||||
);
|
||||
|
||||
const added = await ensureEnglishCatalog(collectedKeys);
|
||||
|
||||
if (added === 0) {
|
||||
console.log('[i18n] No new backend translation keys detected.');
|
||||
} else {
|
||||
console.log(`[i18n] Added ${added} key(s) to src/i18n/en.json.`);
|
||||
}
|
||||
}
|
||||
|
||||
main().catch((error) => {
|
||||
console.error('[i18n] Failed to extract backend translations.', error);
|
||||
process.exitCode = 1;
|
||||
});
|
||||
@@ -4,23 +4,18 @@ import {
|
||||
getBannerPathIfPresent,
|
||||
getCasePathIfPresent,
|
||||
} from '@app/core/utils/images/image-file-helpers.js';
|
||||
import { loadDynamixConfigFile } from '@app/store/actions/load-dynamix-config-file.js';
|
||||
import { store } from '@app/store/index.js';
|
||||
import { loadDynamixConfig } from '@app/store/index.js';
|
||||
|
||||
test('get case path returns expected result', async () => {
|
||||
await expect(getCasePathIfPresent()).resolves.toContain('/dev/dynamix/case-model.png');
|
||||
});
|
||||
|
||||
test('get banner path returns null (state unloaded)', async () => {
|
||||
await expect(getBannerPathIfPresent()).resolves.toMatchInlineSnapshot('null');
|
||||
});
|
||||
|
||||
test('get banner path returns the banner (state loaded)', async () => {
|
||||
await store.dispatch(loadDynamixConfigFile()).unwrap();
|
||||
loadDynamixConfig();
|
||||
await expect(getBannerPathIfPresent()).resolves.toContain('/dev/dynamix/banner.png');
|
||||
});
|
||||
|
||||
test('get banner path returns null when no banner (state loaded)', async () => {
|
||||
await store.dispatch(loadDynamixConfigFile()).unwrap();
|
||||
loadDynamixConfig();
|
||||
await expect(getBannerPathIfPresent('notabanner.png')).resolves.toMatchInlineSnapshot('null');
|
||||
});
|
||||
|
||||
151
api/src/__test__/store/watch/registration-watch.test.ts
Normal file
151
api/src/__test__/store/watch/registration-watch.test.ts
Normal file
@@ -0,0 +1,151 @@
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { StateFileKey } from '@app/store/types.js';
|
||||
import { RegistrationType } from '@app/unraid-api/graph/resolvers/registration/registration.model.js';
|
||||
|
||||
// Mock the store module
|
||||
vi.mock('@app/store/index.js', () => ({
|
||||
store: {
|
||||
dispatch: vi.fn(),
|
||||
},
|
||||
getters: {
|
||||
emhttp: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
// Mock the emhttp module
|
||||
vi.mock('@app/store/modules/emhttp.js', () => ({
|
||||
loadSingleStateFile: vi.fn((key) => ({ type: 'emhttp/load-single-state-file', payload: key })),
|
||||
}));
|
||||
|
||||
// Mock the registration module
|
||||
vi.mock('@app/store/modules/registration.js', () => ({
|
||||
loadRegistrationKey: vi.fn(() => ({ type: 'registration/load-registration-key' })),
|
||||
}));
|
||||
|
||||
// Mock the logger
|
||||
vi.mock('@app/core/log.js', () => ({
|
||||
keyServerLogger: {
|
||||
info: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
describe('reloadVarIniWithRetry', () => {
|
||||
let store: { dispatch: ReturnType<typeof vi.fn> };
|
||||
let getters: { emhttp: ReturnType<typeof vi.fn> };
|
||||
let loadSingleStateFile: ReturnType<typeof vi.fn>;
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.useFakeTimers();
|
||||
|
||||
const storeModule = await import('@app/store/index.js');
|
||||
const emhttpModule = await import('@app/store/modules/emhttp.js');
|
||||
|
||||
store = storeModule.store as unknown as typeof store;
|
||||
getters = storeModule.getters as unknown as typeof getters;
|
||||
loadSingleStateFile = emhttpModule.loadSingleStateFile as unknown as typeof loadSingleStateFile;
|
||||
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.useRealTimers();
|
||||
});
|
||||
|
||||
it('returns early when registration state changes on first retry', async () => {
|
||||
// Initial state is TRIAL
|
||||
getters.emhttp
|
||||
.mockReturnValueOnce({ var: { regTy: RegistrationType.TRIAL } }) // First call (beforeState)
|
||||
.mockReturnValueOnce({ var: { regTy: RegistrationType.UNLEASHED } }); // After first reload
|
||||
|
||||
const { reloadVarIniWithRetry } = await import('@app/store/watch/registration-watch.js');
|
||||
|
||||
const promise = reloadVarIniWithRetry();
|
||||
|
||||
// Advance past the first delay (500ms)
|
||||
await vi.advanceTimersByTimeAsync(500);
|
||||
await promise;
|
||||
|
||||
// Should only dispatch once since state changed
|
||||
expect(store.dispatch).toHaveBeenCalledTimes(1);
|
||||
expect(loadSingleStateFile).toHaveBeenCalledWith(StateFileKey.var);
|
||||
});
|
||||
|
||||
it('retries up to maxRetries when state does not change', async () => {
|
||||
// State never changes
|
||||
getters.emhttp.mockReturnValue({ var: { regTy: RegistrationType.TRIAL } });
|
||||
|
||||
const { reloadVarIniWithRetry } = await import('@app/store/watch/registration-watch.js');
|
||||
|
||||
const promise = reloadVarIniWithRetry(3);
|
||||
|
||||
// Advance through all retries: 500ms, 1000ms, 2000ms
|
||||
await vi.advanceTimersByTimeAsync(500);
|
||||
await vi.advanceTimersByTimeAsync(1000);
|
||||
await vi.advanceTimersByTimeAsync(2000);
|
||||
await promise;
|
||||
|
||||
// Should dispatch 3 times (maxRetries)
|
||||
expect(store.dispatch).toHaveBeenCalledTimes(3);
|
||||
});
|
||||
|
||||
it('stops retrying when state changes on second attempt', async () => {
|
||||
getters.emhttp
|
||||
.mockReturnValueOnce({ var: { regTy: RegistrationType.TRIAL } }) // beforeState
|
||||
.mockReturnValueOnce({ var: { regTy: RegistrationType.TRIAL } }) // After first reload (no change)
|
||||
.mockReturnValueOnce({ var: { regTy: RegistrationType.UNLEASHED } }); // After second reload (changed!)
|
||||
|
||||
const { reloadVarIniWithRetry } = await import('@app/store/watch/registration-watch.js');
|
||||
|
||||
const promise = reloadVarIniWithRetry(3);
|
||||
|
||||
// First retry
|
||||
await vi.advanceTimersByTimeAsync(500);
|
||||
// Second retry
|
||||
await vi.advanceTimersByTimeAsync(1000);
|
||||
await promise;
|
||||
|
||||
// Should dispatch twice - stopped after state changed
|
||||
expect(store.dispatch).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
it('handles undefined regTy gracefully', async () => {
|
||||
getters.emhttp.mockReturnValue({ var: {} });
|
||||
|
||||
const { reloadVarIniWithRetry } = await import('@app/store/watch/registration-watch.js');
|
||||
|
||||
const promise = reloadVarIniWithRetry(1);
|
||||
|
||||
await vi.advanceTimersByTimeAsync(500);
|
||||
await promise;
|
||||
|
||||
// Should still dispatch even with undefined regTy
|
||||
expect(store.dispatch).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('uses exponential backoff delays', async () => {
|
||||
getters.emhttp.mockReturnValue({ var: { regTy: RegistrationType.TRIAL } });
|
||||
|
||||
const { reloadVarIniWithRetry } = await import('@app/store/watch/registration-watch.js');
|
||||
|
||||
const promise = reloadVarIniWithRetry(3);
|
||||
|
||||
// At 0ms, no dispatch yet
|
||||
expect(store.dispatch).toHaveBeenCalledTimes(0);
|
||||
|
||||
// At 500ms, first dispatch
|
||||
await vi.advanceTimersByTimeAsync(500);
|
||||
expect(store.dispatch).toHaveBeenCalledTimes(1);
|
||||
|
||||
// At 1500ms (500 + 1000), second dispatch
|
||||
await vi.advanceTimersByTimeAsync(1000);
|
||||
expect(store.dispatch).toHaveBeenCalledTimes(2);
|
||||
|
||||
// At 3500ms (500 + 1000 + 2000), third dispatch
|
||||
await vi.advanceTimersByTimeAsync(2000);
|
||||
expect(store.dispatch).toHaveBeenCalledTimes(3);
|
||||
|
||||
await promise;
|
||||
});
|
||||
});
|
||||
12
api/src/connect-plugin-cleanup.ts
Normal file
12
api/src/connect-plugin-cleanup.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
import { existsSync } from 'node:fs';
|
||||
|
||||
/**
|
||||
* Local filesystem and env checks stay synchronous so we can branch at module load.
|
||||
* @returns True if the Connect Unraid plugin is installed, false otherwise.
|
||||
*/
|
||||
export const isConnectPluginInstalled = () => {
|
||||
if (process.env.SKIP_CONNECT_PLUGIN_CHECK === 'true') {
|
||||
return true;
|
||||
}
|
||||
return existsSync('/boot/config/plugins/dynamix.unraid.net.plg');
|
||||
};
|
||||
66
api/src/core/utils/__test__/safe-mode.test.ts
Normal file
66
api/src/core/utils/__test__/safe-mode.test.ts
Normal file
@@ -0,0 +1,66 @@
|
||||
import { afterEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { isSafeModeEnabled } from '@app/core/utils/safe-mode.js';
|
||||
import { store } from '@app/store/index.js';
|
||||
import * as stateFileLoader from '@app/store/services/state-file-loader.js';
|
||||
|
||||
describe('isSafeModeEnabled', () => {
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
it('returns the safe mode flag already present in the store', () => {
|
||||
const baseState = store.getState();
|
||||
vi.spyOn(store, 'getState').mockReturnValue({
|
||||
...baseState,
|
||||
emhttp: {
|
||||
...baseState.emhttp,
|
||||
var: {
|
||||
...(baseState.emhttp?.var ?? {}),
|
||||
safeMode: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
const loaderSpy = vi.spyOn(stateFileLoader, 'loadStateFileSync');
|
||||
|
||||
expect(isSafeModeEnabled()).toBe(true);
|
||||
expect(loaderSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('falls back to the synchronous loader when store state is missing', () => {
|
||||
const baseState = store.getState();
|
||||
vi.spyOn(store, 'getState').mockReturnValue({
|
||||
...baseState,
|
||||
emhttp: {
|
||||
...baseState.emhttp,
|
||||
var: {
|
||||
...(baseState.emhttp?.var ?? {}),
|
||||
safeMode: undefined as unknown as boolean,
|
||||
} as typeof baseState.emhttp.var,
|
||||
} as typeof baseState.emhttp,
|
||||
} as typeof baseState);
|
||||
vi.spyOn(stateFileLoader, 'loadStateFileSync').mockReturnValue({
|
||||
...(baseState.emhttp?.var ?? {}),
|
||||
safeMode: true,
|
||||
} as any);
|
||||
|
||||
expect(isSafeModeEnabled()).toBe(true);
|
||||
});
|
||||
|
||||
it('defaults to false when loader cannot provide state', () => {
|
||||
const baseState = store.getState();
|
||||
vi.spyOn(store, 'getState').mockReturnValue({
|
||||
...baseState,
|
||||
emhttp: {
|
||||
...baseState.emhttp,
|
||||
var: {
|
||||
...(baseState.emhttp?.var ?? {}),
|
||||
safeMode: undefined as unknown as boolean,
|
||||
} as typeof baseState.emhttp.var,
|
||||
} as typeof baseState.emhttp,
|
||||
} as typeof baseState);
|
||||
vi.spyOn(stateFileLoader, 'loadStateFileSync').mockReturnValue(null);
|
||||
|
||||
expect(isSafeModeEnabled()).toBe(false);
|
||||
});
|
||||
});
|
||||
231
api/src/core/utils/misc/__test__/timeout-budget.test.ts
Normal file
231
api/src/core/utils/misc/__test__/timeout-budget.test.ts
Normal file
@@ -0,0 +1,231 @@
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { TimeoutBudget } from '@app/core/utils/misc/timeout-budget.js';
|
||||
|
||||
describe('TimeoutBudget', () => {
|
||||
beforeEach(() => {
|
||||
vi.useFakeTimers();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.useRealTimers();
|
||||
});
|
||||
|
||||
describe('constructor', () => {
|
||||
it('initializes with the given budget', () => {
|
||||
const budget = new TimeoutBudget(10000);
|
||||
expect(budget.remaining()).toBe(10000);
|
||||
expect(budget.elapsed()).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('remaining', () => {
|
||||
it('returns full budget immediately after construction', () => {
|
||||
const budget = new TimeoutBudget(5000);
|
||||
expect(budget.remaining()).toBe(5000);
|
||||
});
|
||||
|
||||
it('decreases as time passes', () => {
|
||||
const budget = new TimeoutBudget(5000);
|
||||
|
||||
vi.advanceTimersByTime(1000);
|
||||
expect(budget.remaining()).toBe(4000);
|
||||
|
||||
vi.advanceTimersByTime(2000);
|
||||
expect(budget.remaining()).toBe(2000);
|
||||
});
|
||||
|
||||
it('never returns negative values', () => {
|
||||
const budget = new TimeoutBudget(1000);
|
||||
|
||||
vi.advanceTimersByTime(5000); // Well past the budget
|
||||
expect(budget.remaining()).toBe(0);
|
||||
});
|
||||
|
||||
it('returns zero when budget is exactly exhausted', () => {
|
||||
const budget = new TimeoutBudget(1000);
|
||||
|
||||
vi.advanceTimersByTime(1000);
|
||||
expect(budget.remaining()).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('elapsed', () => {
|
||||
it('returns zero immediately after construction', () => {
|
||||
const budget = new TimeoutBudget(5000);
|
||||
expect(budget.elapsed()).toBe(0);
|
||||
});
|
||||
|
||||
it('increases as time passes', () => {
|
||||
const budget = new TimeoutBudget(5000);
|
||||
|
||||
vi.advanceTimersByTime(1000);
|
||||
expect(budget.elapsed()).toBe(1000);
|
||||
|
||||
vi.advanceTimersByTime(500);
|
||||
expect(budget.elapsed()).toBe(1500);
|
||||
});
|
||||
|
||||
it('continues increasing past the budget limit', () => {
|
||||
const budget = new TimeoutBudget(1000);
|
||||
|
||||
vi.advanceTimersByTime(2000);
|
||||
expect(budget.elapsed()).toBe(2000);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getTimeout', () => {
|
||||
it('returns maxMs when plenty of budget remains', () => {
|
||||
const budget = new TimeoutBudget(10000);
|
||||
expect(budget.getTimeout(2000)).toBe(2000);
|
||||
});
|
||||
|
||||
it('returns maxMs when budget minus reserve is sufficient', () => {
|
||||
const budget = new TimeoutBudget(10000);
|
||||
expect(budget.getTimeout(2000, 5000)).toBe(2000);
|
||||
});
|
||||
|
||||
it('caps timeout to available budget minus reserve', () => {
|
||||
const budget = new TimeoutBudget(10000);
|
||||
vi.advanceTimersByTime(5000); // 5000ms remaining
|
||||
|
||||
// Want 2000ms but reserve 4000ms, only 1000ms available
|
||||
expect(budget.getTimeout(2000, 4000)).toBe(1000);
|
||||
});
|
||||
|
||||
it('caps timeout to remaining budget when no reserve', () => {
|
||||
const budget = new TimeoutBudget(1000);
|
||||
vi.advanceTimersByTime(800); // 200ms remaining
|
||||
|
||||
expect(budget.getTimeout(500)).toBe(200);
|
||||
});
|
||||
|
||||
it('returns minimum of 100ms even when budget is exhausted', () => {
|
||||
const budget = new TimeoutBudget(1000);
|
||||
vi.advanceTimersByTime(2000); // Budget exhausted
|
||||
|
||||
expect(budget.getTimeout(500)).toBe(100);
|
||||
});
|
||||
|
||||
it('returns minimum of 100ms when reserve exceeds remaining', () => {
|
||||
const budget = new TimeoutBudget(5000);
|
||||
vi.advanceTimersByTime(4000); // 1000ms remaining
|
||||
|
||||
// Reserve 2000ms but only 1000ms remaining
|
||||
expect(budget.getTimeout(500, 2000)).toBe(100);
|
||||
});
|
||||
|
||||
it('uses default reserve of 0 when not specified', () => {
|
||||
const budget = new TimeoutBudget(1000);
|
||||
vi.advanceTimersByTime(500); // 500ms remaining
|
||||
|
||||
expect(budget.getTimeout(1000)).toBe(500); // Capped to remaining
|
||||
});
|
||||
});
|
||||
|
||||
describe('hasTimeFor', () => {
|
||||
it('returns true when enough time remains', () => {
|
||||
const budget = new TimeoutBudget(5000);
|
||||
expect(budget.hasTimeFor(3000)).toBe(true);
|
||||
});
|
||||
|
||||
it('returns true when exactly enough time remains', () => {
|
||||
const budget = new TimeoutBudget(5000);
|
||||
expect(budget.hasTimeFor(5000)).toBe(true);
|
||||
});
|
||||
|
||||
it('returns false when not enough time remains', () => {
|
||||
const budget = new TimeoutBudget(5000);
|
||||
expect(budget.hasTimeFor(6000)).toBe(false);
|
||||
});
|
||||
|
||||
it('accounts for elapsed time', () => {
|
||||
const budget = new TimeoutBudget(5000);
|
||||
vi.advanceTimersByTime(3000); // 2000ms remaining
|
||||
|
||||
expect(budget.hasTimeFor(2000)).toBe(true);
|
||||
expect(budget.hasTimeFor(3000)).toBe(false);
|
||||
});
|
||||
|
||||
it('returns false when budget is exhausted', () => {
|
||||
const budget = new TimeoutBudget(1000);
|
||||
vi.advanceTimersByTime(2000);
|
||||
|
||||
expect(budget.hasTimeFor(1)).toBe(false);
|
||||
});
|
||||
|
||||
it('returns true for zero required time', () => {
|
||||
const budget = new TimeoutBudget(1000);
|
||||
vi.advanceTimersByTime(2000); // Budget exhausted
|
||||
|
||||
expect(budget.hasTimeFor(0)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('integration scenarios', () => {
|
||||
it('simulates a typical startup sequence', () => {
|
||||
const budget = new TimeoutBudget(13000); // 13 second budget
|
||||
const BOOTSTRAP_RESERVE = 8000;
|
||||
const MAX_OP_TIMEOUT = 2000;
|
||||
|
||||
// First operation - should get full 2000ms
|
||||
const op1Timeout = budget.getTimeout(MAX_OP_TIMEOUT, BOOTSTRAP_RESERVE);
|
||||
expect(op1Timeout).toBe(2000);
|
||||
|
||||
// Simulate operation taking 500ms
|
||||
vi.advanceTimersByTime(500);
|
||||
|
||||
// Second operation - still have plenty of budget
|
||||
const op2Timeout = budget.getTimeout(MAX_OP_TIMEOUT, BOOTSTRAP_RESERVE);
|
||||
expect(op2Timeout).toBe(2000);
|
||||
|
||||
// Simulate operation taking 1000ms
|
||||
vi.advanceTimersByTime(1000);
|
||||
|
||||
// Third operation
|
||||
const op3Timeout = budget.getTimeout(MAX_OP_TIMEOUT, BOOTSTRAP_RESERVE);
|
||||
expect(op3Timeout).toBe(2000);
|
||||
|
||||
// Simulate slow operation taking 2000ms
|
||||
vi.advanceTimersByTime(2000);
|
||||
|
||||
// Now 3500ms elapsed, 9500ms remaining
|
||||
// After reserve, only 1500ms available - less than max
|
||||
const op4Timeout = budget.getTimeout(MAX_OP_TIMEOUT, BOOTSTRAP_RESERVE);
|
||||
expect(op4Timeout).toBe(1500);
|
||||
|
||||
// Simulate operation completing
|
||||
vi.advanceTimersByTime(1000);
|
||||
|
||||
// Bootstrap phase - use all remaining time
|
||||
const bootstrapTimeout = budget.remaining();
|
||||
expect(bootstrapTimeout).toBe(8500);
|
||||
expect(budget.hasTimeFor(8000)).toBe(true);
|
||||
});
|
||||
|
||||
it('handles worst-case scenario where all operations timeout', () => {
|
||||
const budget = new TimeoutBudget(13000);
|
||||
const BOOTSTRAP_RESERVE = 8000;
|
||||
const MAX_OP_TIMEOUT = 2000;
|
||||
|
||||
// Each operation times out at its limit
|
||||
// Available for operations: 13000 - 8000 = 5000ms
|
||||
|
||||
// Op 1: gets 2000ms, times out
|
||||
budget.getTimeout(MAX_OP_TIMEOUT, BOOTSTRAP_RESERVE);
|
||||
vi.advanceTimersByTime(2000);
|
||||
|
||||
// Op 2: gets 2000ms, times out
|
||||
budget.getTimeout(MAX_OP_TIMEOUT, BOOTSTRAP_RESERVE);
|
||||
vi.advanceTimersByTime(2000);
|
||||
|
||||
// Op 3: only 1000ms available (5000 - 4000), times out
|
||||
const op3Timeout = budget.getTimeout(MAX_OP_TIMEOUT, BOOTSTRAP_RESERVE);
|
||||
expect(op3Timeout).toBe(1000);
|
||||
vi.advanceTimersByTime(1000);
|
||||
|
||||
// Bootstrap: should still have 8000ms
|
||||
expect(budget.remaining()).toBe(8000);
|
||||
});
|
||||
});
|
||||
});
|
||||
65
api/src/core/utils/misc/__test__/with-timeout.test.ts
Normal file
65
api/src/core/utils/misc/__test__/with-timeout.test.ts
Normal file
@@ -0,0 +1,65 @@
|
||||
import { describe, expect, it } from 'vitest';
|
||||
|
||||
import { withTimeout } from '@app/core/utils/misc/with-timeout.js';
|
||||
|
||||
describe('withTimeout', () => {
|
||||
it('resolves when promise completes before timeout', async () => {
|
||||
const promise = Promise.resolve('success');
|
||||
const result = await withTimeout(promise, 1000, 'testOp');
|
||||
expect(result).toBe('success');
|
||||
});
|
||||
|
||||
it('resolves with correct value for delayed promise within timeout', async () => {
|
||||
const promise = new Promise<number>((resolve) => setTimeout(() => resolve(42), 50));
|
||||
const result = await withTimeout(promise, 1000, 'testOp');
|
||||
expect(result).toBe(42);
|
||||
});
|
||||
|
||||
it('rejects when promise takes longer than timeout', async () => {
|
||||
const promise = new Promise<string>((resolve) => setTimeout(() => resolve('late'), 500));
|
||||
await expect(withTimeout(promise, 50, 'slowOp')).rejects.toThrow('slowOp timed out after 50ms');
|
||||
});
|
||||
|
||||
it('includes operation name in timeout error message', async () => {
|
||||
const promise = new Promise<void>(() => {}); // Never resolves
|
||||
await expect(withTimeout(promise, 10, 'myCustomOperation')).rejects.toThrow(
|
||||
'myCustomOperation timed out after 10ms'
|
||||
);
|
||||
});
|
||||
|
||||
it('propagates rejection from the original promise', async () => {
|
||||
const promise = Promise.reject(new Error('original error'));
|
||||
await expect(withTimeout(promise, 1000, 'testOp')).rejects.toThrow('original error');
|
||||
});
|
||||
|
||||
it('resolves immediately for already-resolved promises', async () => {
|
||||
const promise = Promise.resolve('immediate');
|
||||
const start = Date.now();
|
||||
const result = await withTimeout(promise, 1000, 'testOp');
|
||||
const elapsed = Date.now() - start;
|
||||
|
||||
expect(result).toBe('immediate');
|
||||
expect(elapsed).toBeLessThan(50); // Should be nearly instant
|
||||
});
|
||||
|
||||
it('works with zero timeout (immediately times out for pending promises)', async () => {
|
||||
const promise = new Promise<void>(() => {}); // Never resolves
|
||||
await expect(withTimeout(promise, 0, 'zeroTimeout')).rejects.toThrow(
|
||||
'zeroTimeout timed out after 0ms'
|
||||
);
|
||||
});
|
||||
|
||||
it('preserves the type of the resolved value', async () => {
|
||||
interface TestType {
|
||||
id: number;
|
||||
name: string;
|
||||
}
|
||||
const testObj: TestType = { id: 1, name: 'test' };
|
||||
const promise = Promise.resolve(testObj);
|
||||
|
||||
const result = await withTimeout(promise, 1000, 'testOp');
|
||||
|
||||
expect(result.id).toBe(1);
|
||||
expect(result.name).toBe('test');
|
||||
});
|
||||
});
|
||||
70
api/src/core/utils/misc/timeout-budget.ts
Normal file
70
api/src/core/utils/misc/timeout-budget.ts
Normal file
@@ -0,0 +1,70 @@
|
||||
/**
|
||||
* Tracks remaining time budget to ensure we don't exceed external timeouts (e.g., PM2's listen_timeout).
|
||||
*
|
||||
* This class helps coordinate multiple async operations by:
|
||||
* - Tracking elapsed time from construction
|
||||
* - Calculating dynamic timeouts based on remaining budget
|
||||
* - Reserving time for critical operations (like server bootstrap)
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const budget = new TimeoutBudget(15000); // 15 second total budget
|
||||
*
|
||||
* // Each operation gets a timeout capped by remaining budget
|
||||
* await withTimeout(loadConfig(), budget.getTimeout(2000, 8000), 'loadConfig');
|
||||
* await withTimeout(loadState(), budget.getTimeout(2000, 8000), 'loadState');
|
||||
*
|
||||
* // Bootstrap gets all remaining time
|
||||
* await withTimeout(bootstrap(), budget.remaining(), 'bootstrap');
|
||||
*
|
||||
* console.log(`Completed in ${budget.elapsed()}ms`);
|
||||
* ```
|
||||
*/
|
||||
export class TimeoutBudget {
|
||||
private startTime: number;
|
||||
private budgetMs: number;
|
||||
|
||||
/**
|
||||
* Creates a new startup budget tracker.
|
||||
* @param budgetMs Total time budget in milliseconds
|
||||
*/
|
||||
constructor(budgetMs: number) {
|
||||
this.startTime = Date.now();
|
||||
this.budgetMs = budgetMs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns remaining time in milliseconds.
|
||||
* Never returns negative values.
|
||||
*/
|
||||
remaining(): number {
|
||||
return Math.max(0, this.budgetMs - (Date.now() - this.startTime));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns elapsed time in milliseconds since construction.
|
||||
*/
|
||||
elapsed(): number {
|
||||
return Date.now() - this.startTime;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns timeout for an operation, capped by remaining budget.
|
||||
*
|
||||
* @param maxMs Maximum timeout for this operation
|
||||
* @param reserveMs Time to reserve for future operations (e.g., server bootstrap)
|
||||
* @returns Timeout in milliseconds (minimum 100ms to avoid instant failures)
|
||||
*/
|
||||
getTimeout(maxMs: number, reserveMs: number = 0): number {
|
||||
const available = this.remaining() - reserveMs;
|
||||
return Math.max(100, Math.min(maxMs, available));
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if there's enough time remaining for an operation.
|
||||
* @param requiredMs Time required in milliseconds
|
||||
*/
|
||||
hasTimeFor(requiredMs: number): boolean {
|
||||
return this.remaining() >= requiredMs;
|
||||
}
|
||||
}
|
||||
25
api/src/core/utils/misc/with-timeout.ts
Normal file
25
api/src/core/utils/misc/with-timeout.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
/**
|
||||
* Wraps a promise with a timeout to prevent hangs.
|
||||
* If the operation takes longer than timeoutMs, it rejects with a timeout error.
|
||||
*
|
||||
* @param promise The promise to wrap with a timeout
|
||||
* @param timeoutMs Maximum time in milliseconds before timing out
|
||||
* @param operationName Name of the operation for the error message
|
||||
* @returns The result of the promise if it completes in time
|
||||
* @throws Error if the operation times out
|
||||
*/
|
||||
export const withTimeout = <T>(
|
||||
promise: Promise<T>,
|
||||
timeoutMs: number,
|
||||
operationName: string
|
||||
): Promise<T> => {
|
||||
return Promise.race([
|
||||
promise,
|
||||
new Promise<never>((_, reject) =>
|
||||
setTimeout(
|
||||
() => reject(new Error(`${operationName} timed out after ${timeoutMs}ms`)),
|
||||
timeoutMs
|
||||
)
|
||||
),
|
||||
]);
|
||||
};
|
||||
17
api/src/core/utils/safe-mode.ts
Normal file
17
api/src/core/utils/safe-mode.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { store } from '@app/store/index.js';
|
||||
import { loadStateFileSync } from '@app/store/services/state-file-loader.js';
|
||||
import { StateFileKey } from '@app/store/types.js';
|
||||
|
||||
export const isSafeModeEnabled = (): boolean => {
|
||||
const safeModeFromStore = store.getState().emhttp?.var?.safeMode;
|
||||
if (typeof safeModeFromStore === 'boolean') {
|
||||
return safeModeFromStore;
|
||||
}
|
||||
|
||||
const varState = loadStateFileSync(StateFileKey.var);
|
||||
if (varState) {
|
||||
return Boolean(varState.safeMode);
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
1
api/src/i18n/ar.json
Normal file
1
api/src/i18n/ar.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/bn.json
Normal file
1
api/src/i18n/bn.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/ca.json
Normal file
1
api/src/i18n/ca.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/cs.json
Normal file
1
api/src/i18n/cs.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/da.json
Normal file
1
api/src/i18n/da.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/de.json
Normal file
1
api/src/i18n/de.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/en.json
Normal file
1
api/src/i18n/en.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/es.json
Normal file
1
api/src/i18n/es.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/fr.json
Normal file
1
api/src/i18n/fr.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/hi.json
Normal file
1
api/src/i18n/hi.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/hr.json
Normal file
1
api/src/i18n/hr.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/hu.json
Normal file
1
api/src/i18n/hu.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/it.json
Normal file
1
api/src/i18n/it.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/ja.json
Normal file
1
api/src/i18n/ja.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/ko.json
Normal file
1
api/src/i18n/ko.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/lv.json
Normal file
1
api/src/i18n/lv.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/nl.json
Normal file
1
api/src/i18n/nl.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/no.json
Normal file
1
api/src/i18n/no.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/pl.json
Normal file
1
api/src/i18n/pl.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/pt.json
Normal file
1
api/src/i18n/pt.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/ro.json
Normal file
1
api/src/i18n/ro.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/ru.json
Normal file
1
api/src/i18n/ru.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/sv.json
Normal file
1
api/src/i18n/sv.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/uk.json
Normal file
1
api/src/i18n/uk.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/zh.json
Normal file
1
api/src/i18n/zh.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
113
api/src/index.ts
113
api/src/index.ts
@@ -15,28 +15,38 @@ import { WebSocket } from 'ws';
|
||||
|
||||
import { logger } from '@app/core/log.js';
|
||||
import { fileExistsSync } from '@app/core/utils/files/file-exists.js';
|
||||
import { TimeoutBudget } from '@app/core/utils/misc/timeout-budget.js';
|
||||
import { withTimeout } from '@app/core/utils/misc/with-timeout.js';
|
||||
import { getServerIdentifier } from '@app/core/utils/server-identifier.js';
|
||||
import { environment, PATHS_CONFIG_MODULES, PORT } from '@app/environment.js';
|
||||
import * as envVars from '@app/environment.js';
|
||||
import { loadDynamixConfigFile } from '@app/store/actions/load-dynamix-config-file.js';
|
||||
import { shutdownApiEvent } from '@app/store/actions/shutdown-api-event.js';
|
||||
import { store } from '@app/store/index.js';
|
||||
import { loadDynamixConfig, store } from '@app/store/index.js';
|
||||
import { startMiddlewareListeners } from '@app/store/listeners/listener-middleware.js';
|
||||
import { loadStateFiles } from '@app/store/modules/emhttp.js';
|
||||
import { loadRegistrationKey } from '@app/store/modules/registration.js';
|
||||
import { setupDynamixConfigWatch } from '@app/store/watch/dynamix-config-watch.js';
|
||||
import { setupRegistrationKeyWatch } from '@app/store/watch/registration-watch.js';
|
||||
import { StateManager } from '@app/store/watch/state-watch.js';
|
||||
|
||||
let server: NestFastifyApplication<RawServerDefault> | null = null;
|
||||
|
||||
// PM2 listen_timeout is 15 seconds (ecosystem.config.json)
|
||||
// We use 13 seconds as our total budget to ensure our timeout triggers before PM2 kills us
|
||||
const TOTAL_STARTUP_BUDGET_MS = 13_000;
|
||||
// Reserve time for the NestJS bootstrap (the most critical and time-consuming operation)
|
||||
const BOOTSTRAP_RESERVED_MS = 8_000;
|
||||
// Maximum time for any single pre-bootstrap operation
|
||||
const MAX_OPERATION_TIMEOUT_MS = 2_000;
|
||||
|
||||
const unlinkUnixPort = () => {
|
||||
if (isNaN(parseInt(PORT, 10))) {
|
||||
if (fileExistsSync(PORT)) unlinkSync(PORT);
|
||||
}
|
||||
};
|
||||
|
||||
export const viteNodeApp = async () => {
|
||||
export const viteNodeApp = async (): Promise<NestFastifyApplication<RawServerDefault>> => {
|
||||
const budget = new TimeoutBudget(TOTAL_STARTUP_BUDGET_MS);
|
||||
|
||||
try {
|
||||
await import('json-bigint-patch');
|
||||
environment.IS_MAIN_PROCESS = true;
|
||||
@@ -44,15 +54,15 @@ export const viteNodeApp = async () => {
|
||||
/**------------------------------------------------------------------------
|
||||
* Attaching getServerIdentifier to globalThis
|
||||
|
||||
* getServerIdentifier is tightly coupled to the deprecated redux store,
|
||||
* getServerIdentifier is tightly coupled to the deprecated redux store,
|
||||
* which we don't want to share with other packages or plugins.
|
||||
*
|
||||
*
|
||||
* At the same time, we need to use it in @unraid/shared as a building block,
|
||||
* where it's used & available outside of NestJS's DI context.
|
||||
*
|
||||
* Attaching to globalThis is a temporary solution to avoid refactoring
|
||||
*
|
||||
* Attaching to globalThis is a temporary solution to avoid refactoring
|
||||
* config sync & management outside of NestJS's DI context.
|
||||
*
|
||||
*
|
||||
* Plugin authors should import getServerIdentifier from @unraid/shared instead,
|
||||
* to avoid breaking changes to their code.
|
||||
*------------------------------------------------------------------------**/
|
||||
@@ -60,7 +70,18 @@ export const viteNodeApp = async () => {
|
||||
logger.info('ENV %o', envVars);
|
||||
logger.info('PATHS %o', store.getState().paths);
|
||||
|
||||
await mkdir(PATHS_CONFIG_MODULES, { recursive: true });
|
||||
// Note: we use logger.info for checkpoints instead of a lower log level
|
||||
// to ensure emission during an unraid server's boot,
|
||||
// where the log level will be set to INFO by default.
|
||||
|
||||
// Create config directory
|
||||
try {
|
||||
await mkdir(PATHS_CONFIG_MODULES, { recursive: true });
|
||||
logger.info('Config directory ready');
|
||||
} catch (error) {
|
||||
logger.error(error, 'Failed to create config directory');
|
||||
throw error;
|
||||
}
|
||||
|
||||
const cacheable = new CacheableLookup();
|
||||
|
||||
@@ -70,32 +91,73 @@ export const viteNodeApp = async () => {
|
||||
cacheable.install(https.globalAgent);
|
||||
|
||||
// Load emhttp state into store
|
||||
await store.dispatch(loadStateFiles());
|
||||
try {
|
||||
const timeout = budget.getTimeout(MAX_OPERATION_TIMEOUT_MS, BOOTSTRAP_RESERVED_MS);
|
||||
await withTimeout(store.dispatch(loadStateFiles()), timeout, 'loadStateFiles');
|
||||
logger.info('Emhttp state loaded');
|
||||
} catch (error) {
|
||||
logger.error(error, 'Failed to load emhttp state files');
|
||||
logger.warn('Continuing with default state');
|
||||
}
|
||||
|
||||
// Load initial registration key into store
|
||||
await store.dispatch(loadRegistrationKey());
|
||||
try {
|
||||
const timeout = budget.getTimeout(MAX_OPERATION_TIMEOUT_MS, BOOTSTRAP_RESERVED_MS);
|
||||
await withTimeout(store.dispatch(loadRegistrationKey()), timeout, 'loadRegistrationKey');
|
||||
logger.info('Registration key loaded');
|
||||
} catch (error) {
|
||||
logger.error(error, 'Failed to load registration key');
|
||||
logger.warn('Continuing without registration key');
|
||||
}
|
||||
|
||||
// Load my dynamix config file into store
|
||||
await store.dispatch(loadDynamixConfigFile());
|
||||
try {
|
||||
loadDynamixConfig();
|
||||
logger.info('Dynamix config loaded');
|
||||
} catch (error) {
|
||||
logger.error(error, 'Failed to load dynamix config');
|
||||
logger.warn('Continuing with default dynamix config');
|
||||
}
|
||||
|
||||
// Start listening to file updates
|
||||
StateManager.getInstance();
|
||||
try {
|
||||
StateManager.getInstance();
|
||||
logger.info('State manager initialized');
|
||||
} catch (error) {
|
||||
logger.error(error, 'Failed to initialize state manager');
|
||||
logger.warn('Continuing without state watching');
|
||||
}
|
||||
|
||||
// Start listening to key file changes
|
||||
setupRegistrationKeyWatch();
|
||||
|
||||
// Start listening to dynamix config file changes
|
||||
setupDynamixConfigWatch();
|
||||
try {
|
||||
setupRegistrationKeyWatch();
|
||||
logger.info('Registration key watch active');
|
||||
} catch (error) {
|
||||
logger.error(error, 'Failed to setup registration key watch');
|
||||
logger.warn('Continuing without key file watching');
|
||||
}
|
||||
|
||||
// If port is unix socket, delete old socket before starting http server
|
||||
unlinkUnixPort();
|
||||
|
||||
startMiddlewareListeners();
|
||||
|
||||
// Start webserver
|
||||
const { bootstrapNestServer } = await import('@app/unraid-api/main.js');
|
||||
|
||||
server = await bootstrapNestServer();
|
||||
// Start webserver - use all remaining budget
|
||||
try {
|
||||
const bootstrapTimeout = budget.remaining();
|
||||
if (bootstrapTimeout < 1000) {
|
||||
logger.warn(
|
||||
`Insufficient startup budget remaining (${bootstrapTimeout}ms) for NestJS bootstrap`
|
||||
);
|
||||
}
|
||||
logger.info('Bootstrapping NestJS server (budget: %dms)...', bootstrapTimeout);
|
||||
const { bootstrapNestServer } = await import('@app/unraid-api/main.js');
|
||||
server = await withTimeout(bootstrapNestServer(), bootstrapTimeout, 'bootstrapNestServer');
|
||||
logger.info('Startup complete in %dms', budget.elapsed());
|
||||
} catch (error) {
|
||||
logger.error(error, 'Failed to start NestJS server');
|
||||
throw error; // This is critical - must rethrow to trigger graceful exit
|
||||
}
|
||||
|
||||
asyncExitHook(
|
||||
async (signal) => {
|
||||
@@ -108,8 +170,10 @@ export const viteNodeApp = async () => {
|
||||
|
||||
gracefulExit();
|
||||
},
|
||||
{ wait: 9999 }
|
||||
{ wait: 10_000 }
|
||||
);
|
||||
|
||||
return server;
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
logger.error(error, 'API-ERROR');
|
||||
@@ -120,8 +184,9 @@ export const viteNodeApp = async () => {
|
||||
await server?.close?.();
|
||||
}
|
||||
shutdownApiEvent();
|
||||
// Kill application
|
||||
// Kill application - gracefulExit calls process.exit but TS doesn't know it never returns
|
||||
gracefulExit(1);
|
||||
throw new Error('Unreachable');
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -1,12 +1,9 @@
|
||||
import { F_OK } from 'constants';
|
||||
import { access } from 'fs/promises';
|
||||
|
||||
import { createAsyncThunk } from '@reduxjs/toolkit';
|
||||
import { createTtlMemoizedLoader } from '@unraid/shared';
|
||||
|
||||
import type { RecursivePartial } from '@app/types/index.js';
|
||||
import { type DynamixConfig } from '@app/core/types/ini.js';
|
||||
import { fileExistsSync } from '@app/core/utils/files/file-exists.js';
|
||||
import { parseConfig } from '@app/core/utils/misc/parse-config.js';
|
||||
import { type RecursiveNullable, type RecursivePartial } from '@app/types/index.js';
|
||||
import { batchProcess } from '@app/utils.js';
|
||||
|
||||
/**
|
||||
* Loads a configuration file from disk, parses it to a RecursivePartial of the provided type, and returns it.
|
||||
@@ -16,11 +13,8 @@ import { batchProcess } from '@app/utils.js';
|
||||
* @param path The path to the configuration file on disk.
|
||||
* @returns A parsed RecursivePartial of the provided type.
|
||||
*/
|
||||
async function loadConfigFile<ConfigType>(path: string): Promise<RecursivePartial<ConfigType>> {
|
||||
const fileIsAccessible = await access(path, F_OK)
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
return fileIsAccessible
|
||||
function loadConfigFileSync<ConfigType>(path: string): RecursivePartial<ConfigType> {
|
||||
return fileExistsSync(path)
|
||||
? parseConfig<RecursivePartial<ConfigType>>({
|
||||
filePath: path,
|
||||
type: 'ini',
|
||||
@@ -28,21 +22,40 @@ async function loadConfigFile<ConfigType>(path: string): Promise<RecursivePartia
|
||||
: {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Load the dynamix.cfg into the store.
|
||||
*
|
||||
* Note: If the file doesn't exist this will fallback to default values.
|
||||
*/
|
||||
export const loadDynamixConfigFile = createAsyncThunk<
|
||||
RecursiveNullable<RecursivePartial<DynamixConfig>>,
|
||||
string | undefined
|
||||
>('config/load-dynamix-config-file', async (filePath) => {
|
||||
if (filePath) {
|
||||
return loadConfigFile<DynamixConfig>(filePath);
|
||||
}
|
||||
const store = await import('@app/store/index.js');
|
||||
const paths = store.getters.paths()['dynamix-config'];
|
||||
const { data: configs } = await batchProcess(paths, (path) => loadConfigFile<DynamixConfig>(path));
|
||||
const [defaultConfig = {}, customConfig = {}] = configs;
|
||||
return { ...defaultConfig, ...customConfig };
|
||||
type ConfigPaths = readonly (string | undefined | null)[];
|
||||
const CACHE_WINDOW_MS = 250;
|
||||
|
||||
const memoizedConfigLoader = createTtlMemoizedLoader<
|
||||
RecursivePartial<DynamixConfig>,
|
||||
ConfigPaths,
|
||||
string
|
||||
>({
|
||||
ttlMs: CACHE_WINDOW_MS,
|
||||
getCacheKey: (configPaths: ConfigPaths): string => JSON.stringify(configPaths),
|
||||
load: (configPaths: ConfigPaths) => {
|
||||
const validPaths = configPaths.filter((path): path is string => Boolean(path));
|
||||
if (validPaths.length === 0) {
|
||||
return {};
|
||||
}
|
||||
const configFiles = validPaths.map((path) => loadConfigFileSync<DynamixConfig>(path));
|
||||
return configFiles.reduce<RecursivePartial<DynamixConfig>>(
|
||||
(accumulator, configFile) => ({
|
||||
...accumulator,
|
||||
...configFile,
|
||||
}),
|
||||
{}
|
||||
);
|
||||
},
|
||||
});
|
||||
|
||||
/**
|
||||
* Loads dynamix config from disk with TTL caching.
|
||||
*
|
||||
* @param configPaths - Array of config file paths to load and merge
|
||||
* @returns Merged config object from all valid paths
|
||||
*/
|
||||
export const loadDynamixConfigFromDiskSync = (
|
||||
configPaths: readonly (string | undefined | null)[]
|
||||
): RecursivePartial<DynamixConfig> => {
|
||||
return memoizedConfigLoader.get(configPaths);
|
||||
};
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
import { configureStore } from '@reduxjs/toolkit';
|
||||
|
||||
import { logger } from '@app/core/log.js';
|
||||
import { loadDynamixConfigFromDiskSync } from '@app/store/actions/load-dynamix-config-file.js';
|
||||
import { listenerMiddleware } from '@app/store/listeners/listener-middleware.js';
|
||||
import { updateDynamixConfig } from '@app/store/modules/dynamix.js';
|
||||
import { rootReducer } from '@app/store/root-reducer.js';
|
||||
import { FileLoadStatus } from '@app/store/types.js';
|
||||
|
||||
export const store = configureStore({
|
||||
reducer: rootReducer,
|
||||
@@ -15,8 +19,36 @@ export type RootState = ReturnType<typeof store.getState>;
|
||||
export type AppDispatch = typeof store.dispatch;
|
||||
export type ApiStore = typeof store;
|
||||
|
||||
// loadDynamixConfig is located here and not in the actions/load-dynamix-config-file.js file because it needs to access the store,
|
||||
// and injecting it seemed circular and convoluted for this use case.
|
||||
/**
|
||||
* Loads the dynamix config into the store.
|
||||
* Can be called multiple times - uses TTL caching internally.
|
||||
* @returns The loaded dynamix config.
|
||||
*/
|
||||
export const loadDynamixConfig = () => {
|
||||
const configPaths = store.getState().paths['dynamix-config'] ?? [];
|
||||
try {
|
||||
const config = loadDynamixConfigFromDiskSync(configPaths);
|
||||
store.dispatch(
|
||||
updateDynamixConfig({
|
||||
...config,
|
||||
status: FileLoadStatus.LOADED,
|
||||
})
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error(error, 'Failed to load dynamix config from disk');
|
||||
store.dispatch(
|
||||
updateDynamixConfig({
|
||||
status: FileLoadStatus.FAILED_LOADING,
|
||||
})
|
||||
);
|
||||
}
|
||||
return store.getState().dynamix;
|
||||
};
|
||||
|
||||
export const getters = {
|
||||
dynamix: () => store.getState().dynamix,
|
||||
dynamix: () => loadDynamixConfig(),
|
||||
emhttp: () => store.getState().emhttp,
|
||||
paths: () => store.getState().paths,
|
||||
registration: () => store.getState().registration,
|
||||
|
||||
@@ -2,7 +2,6 @@ import type { PayloadAction } from '@reduxjs/toolkit';
|
||||
import { createSlice } from '@reduxjs/toolkit';
|
||||
|
||||
import { type DynamixConfig } from '@app/core/types/ini.js';
|
||||
import { loadDynamixConfigFile } from '@app/store/actions/load-dynamix-config-file.js';
|
||||
import { FileLoadStatus } from '@app/store/types.js';
|
||||
import { RecursivePartial } from '@app/types/index.js';
|
||||
|
||||
@@ -22,24 +21,6 @@ export const dynamix = createSlice({
|
||||
return Object.assign(state, action.payload);
|
||||
},
|
||||
},
|
||||
extraReducers(builder) {
|
||||
builder.addCase(loadDynamixConfigFile.pending, (state) => {
|
||||
state.status = FileLoadStatus.LOADING;
|
||||
});
|
||||
|
||||
builder.addCase(loadDynamixConfigFile.fulfilled, (state, action) => {
|
||||
return {
|
||||
...(action.payload as DynamixConfig),
|
||||
status: FileLoadStatus.LOADED,
|
||||
};
|
||||
});
|
||||
|
||||
builder.addCase(loadDynamixConfigFile.rejected, (state, action) => {
|
||||
Object.assign(state, action.payload, {
|
||||
status: FileLoadStatus.FAILED_LOADING,
|
||||
});
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
export const { updateDynamixConfig } = dynamix.actions;
|
||||
|
||||
@@ -163,6 +163,18 @@ export const loadStateFiles = createAsyncThunk<
|
||||
return state;
|
||||
});
|
||||
|
||||
const stateFieldKeyMap: Record<StateFileKey, keyof SliceState> = {
|
||||
[StateFileKey.var]: 'var',
|
||||
[StateFileKey.devs]: 'devices',
|
||||
[StateFileKey.network]: 'networks',
|
||||
[StateFileKey.nginx]: 'nginx',
|
||||
[StateFileKey.shares]: 'shares',
|
||||
[StateFileKey.disks]: 'disks',
|
||||
[StateFileKey.users]: 'users',
|
||||
[StateFileKey.sec]: 'smbShares',
|
||||
[StateFileKey.sec_nfs]: 'nfsShares',
|
||||
};
|
||||
|
||||
export const emhttp = createSlice({
|
||||
name: 'emhttp',
|
||||
initialState,
|
||||
@@ -175,7 +187,8 @@ export const emhttp = createSlice({
|
||||
}>
|
||||
) {
|
||||
const { field } = action.payload;
|
||||
return Object.assign(state, { [field]: action.payload.state });
|
||||
const targetField = stateFieldKeyMap[field] ?? (field as keyof SliceState);
|
||||
return Object.assign(state, { [targetField]: action.payload.state });
|
||||
},
|
||||
},
|
||||
extraReducers(builder) {
|
||||
|
||||
81
api/src/store/services/__test__/state-file-loader.test.ts
Normal file
81
api/src/store/services/__test__/state-file-loader.test.ts
Normal file
@@ -0,0 +1,81 @@
|
||||
import { mkdtempSync, readFileSync, rmSync, writeFileSync } from 'node:fs';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { store } from '@app/store/index.js';
|
||||
import { loadStateFileSync } from '@app/store/services/state-file-loader.js';
|
||||
import { StateFileKey } from '@app/store/types.js';
|
||||
|
||||
const VAR_FIXTURE = readFileSync(new URL('../../../../dev/states/var.ini', import.meta.url), 'utf-8');
|
||||
|
||||
const writeVarFixture = (dir: string, safeMode: 'yes' | 'no') => {
|
||||
const content = VAR_FIXTURE.replace(/safeMode="(yes|no)"/, `safeMode="${safeMode}"`);
|
||||
writeFileSync(join(dir, `${StateFileKey.var}.ini`), content);
|
||||
};
|
||||
|
||||
describe('loadStateFileSync', () => {
|
||||
let tempDir: string;
|
||||
let baseState: ReturnType<typeof store.getState>;
|
||||
|
||||
beforeEach(() => {
|
||||
tempDir = mkdtempSync(join(tmpdir(), 'state-file-'));
|
||||
baseState = store.getState();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
rmSync(tempDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('loads var.ini, updates the store, and returns the parsed state', () => {
|
||||
writeVarFixture(tempDir, 'yes');
|
||||
vi.spyOn(store, 'getState').mockReturnValue({
|
||||
...baseState,
|
||||
paths: {
|
||||
...baseState.paths,
|
||||
states: tempDir,
|
||||
},
|
||||
});
|
||||
const dispatchSpy = vi.spyOn(store, 'dispatch').mockImplementation((action) => action as any);
|
||||
|
||||
const result = loadStateFileSync(StateFileKey.var);
|
||||
|
||||
expect(result?.safeMode).toBe(true);
|
||||
expect(dispatchSpy).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
type: 'emhttp/updateEmhttpState',
|
||||
payload: {
|
||||
field: StateFileKey.var,
|
||||
state: expect.objectContaining({ safeMode: true }),
|
||||
},
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it('returns null when the states path is missing', () => {
|
||||
vi.spyOn(store, 'getState').mockReturnValue({
|
||||
...baseState,
|
||||
paths: undefined,
|
||||
} as any);
|
||||
const dispatchSpy = vi.spyOn(store, 'dispatch');
|
||||
|
||||
expect(loadStateFileSync(StateFileKey.var)).toBeNull();
|
||||
expect(dispatchSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('returns null when the requested state file cannot be found', () => {
|
||||
vi.spyOn(store, 'getState').mockReturnValue({
|
||||
...baseState,
|
||||
paths: {
|
||||
...baseState.paths,
|
||||
states: tempDir,
|
||||
},
|
||||
});
|
||||
const dispatchSpy = vi.spyOn(store, 'dispatch');
|
||||
|
||||
expect(loadStateFileSync(StateFileKey.var)).toBeNull();
|
||||
expect(dispatchSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
81
api/src/store/services/state-file-loader.ts
Normal file
81
api/src/store/services/state-file-loader.ts
Normal file
@@ -0,0 +1,81 @@
|
||||
import { join } from 'node:path';
|
||||
|
||||
import type { SliceState } from '@app/store/modules/emhttp.js';
|
||||
import type { StateFileToIniParserMap } from '@app/store/types.js';
|
||||
import { parseConfig } from '@app/core/utils/misc/parse-config.js';
|
||||
import { store } from '@app/store/index.js';
|
||||
import { updateEmhttpState } from '@app/store/modules/emhttp.js';
|
||||
import { parse as parseDevices } from '@app/store/state-parsers/devices.js';
|
||||
import { parse as parseNetwork } from '@app/store/state-parsers/network.js';
|
||||
import { parse as parseNfs } from '@app/store/state-parsers/nfs.js';
|
||||
import { parse as parseNginx } from '@app/store/state-parsers/nginx.js';
|
||||
import { parse as parseShares } from '@app/store/state-parsers/shares.js';
|
||||
import { parse as parseSlots } from '@app/store/state-parsers/slots.js';
|
||||
import { parse as parseSmb } from '@app/store/state-parsers/smb.js';
|
||||
import { parse as parseUsers } from '@app/store/state-parsers/users.js';
|
||||
import { parse as parseVar } from '@app/store/state-parsers/var.js';
|
||||
import { StateFileKey } from '@app/store/types.js';
|
||||
|
||||
type ParserReturnMap = {
|
||||
[StateFileKey.var]: ReturnType<typeof parseVar>;
|
||||
[StateFileKey.devs]: ReturnType<typeof parseDevices>;
|
||||
[StateFileKey.network]: ReturnType<typeof parseNetwork>;
|
||||
[StateFileKey.nginx]: ReturnType<typeof parseNginx>;
|
||||
[StateFileKey.shares]: ReturnType<typeof parseShares>;
|
||||
[StateFileKey.disks]: ReturnType<typeof parseSlots>;
|
||||
[StateFileKey.users]: ReturnType<typeof parseUsers>;
|
||||
[StateFileKey.sec]: ReturnType<typeof parseSmb>;
|
||||
[StateFileKey.sec_nfs]: ReturnType<typeof parseNfs>;
|
||||
};
|
||||
|
||||
const PARSER_MAP: { [K in StateFileKey]: StateFileToIniParserMap[K] } = {
|
||||
[StateFileKey.var]: parseVar,
|
||||
[StateFileKey.devs]: parseDevices,
|
||||
[StateFileKey.network]: parseNetwork,
|
||||
[StateFileKey.nginx]: parseNginx,
|
||||
[StateFileKey.shares]: parseShares,
|
||||
[StateFileKey.disks]: parseSlots,
|
||||
[StateFileKey.users]: parseUsers,
|
||||
[StateFileKey.sec]: parseSmb,
|
||||
[StateFileKey.sec_nfs]: parseNfs,
|
||||
};
|
||||
|
||||
/**
|
||||
* Synchronously loads an emhttp state file, updates the Redux store slice, and returns the parsed state.
|
||||
*
|
||||
* Designed for bootstrap contexts (CLI, plugin loading, etc.) where dispatching the async thunks is
|
||||
* impractical but we still need authoritative emhttp state from disk.
|
||||
*/
|
||||
export const loadStateFileSync = <K extends StateFileKey>(
|
||||
stateFileKey: K
|
||||
): ParserReturnMap[K] | null => {
|
||||
const state = store.getState();
|
||||
const statesDirectory = state.paths?.states;
|
||||
|
||||
if (!statesDirectory) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const filePath = join(statesDirectory, `${stateFileKey}.ini`);
|
||||
|
||||
try {
|
||||
const parser = PARSER_MAP[stateFileKey] as StateFileToIniParserMap[K];
|
||||
const rawConfig = parseConfig<Record<string, unknown>>({
|
||||
filePath,
|
||||
type: 'ini',
|
||||
});
|
||||
const config = rawConfig as Parameters<StateFileToIniParserMap[K]>[0];
|
||||
const parsed = (parser as (input: any) => ParserReturnMap[K])(config);
|
||||
|
||||
store.dispatch(
|
||||
updateEmhttpState({
|
||||
field: stateFileKey,
|
||||
state: parsed as Partial<SliceState[keyof SliceState]>,
|
||||
})
|
||||
);
|
||||
|
||||
return parsed;
|
||||
} catch (error) {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
@@ -1,17 +0,0 @@
|
||||
import { watch } from 'chokidar';
|
||||
|
||||
import { loadDynamixConfigFile } from '@app/store/actions/load-dynamix-config-file.js';
|
||||
import { getters, store } from '@app/store/index.js';
|
||||
|
||||
export const setupDynamixConfigWatch = () => {
|
||||
const configPath = getters.paths()?.['dynamix-config'];
|
||||
|
||||
// Update store when cfg changes
|
||||
watch(configPath, {
|
||||
persistent: true,
|
||||
ignoreInitial: true,
|
||||
}).on('change', async () => {
|
||||
// Load updated dynamix config file into store
|
||||
await store.dispatch(loadDynamixConfigFile());
|
||||
});
|
||||
};
|
||||
@@ -1,17 +1,51 @@
|
||||
import { watch } from 'chokidar';
|
||||
|
||||
import { CHOKIDAR_USEPOLLING } from '@app/environment.js';
|
||||
import { store } from '@app/store/index.js';
|
||||
import { keyServerLogger } from '@app/core/log.js';
|
||||
import { getters, store } from '@app/store/index.js';
|
||||
import { loadSingleStateFile } from '@app/store/modules/emhttp.js';
|
||||
import { loadRegistrationKey } from '@app/store/modules/registration.js';
|
||||
import { StateFileKey } from '@app/store/types.js';
|
||||
|
||||
/**
|
||||
* Reloads var.ini with retry logic to handle timing issues with emhttpd.
|
||||
* When a key file changes, emhttpd needs time to process it and update var.ini.
|
||||
* This function retries loading var.ini until the registration state changes
|
||||
* or max retries are exhausted.
|
||||
*/
|
||||
export const reloadVarIniWithRetry = async (maxRetries = 3): Promise<void> => {
|
||||
const beforeState = getters.emhttp().var?.regTy;
|
||||
|
||||
for (let attempt = 0; attempt < maxRetries; attempt++) {
|
||||
const delay = 500 * Math.pow(2, attempt); // 500ms, 1s, 2s
|
||||
await new Promise((resolve) => setTimeout(resolve, delay));
|
||||
|
||||
await store.dispatch(loadSingleStateFile(StateFileKey.var));
|
||||
|
||||
const afterState = getters.emhttp().var?.regTy;
|
||||
if (beforeState !== afterState) {
|
||||
keyServerLogger.info('Registration state updated: %s -> %s', beforeState, afterState);
|
||||
return;
|
||||
}
|
||||
keyServerLogger.debug('Retry %d: var.ini regTy still %s', attempt + 1, afterState);
|
||||
}
|
||||
keyServerLogger.debug('var.ini regTy unchanged after %d retries (may be expected)', maxRetries);
|
||||
};
|
||||
|
||||
export const setupRegistrationKeyWatch = () => {
|
||||
// IMPORTANT: /boot/config is on FAT32 flash drive which does NOT support inotify
|
||||
// Must use polling to detect file changes on FAT32 filesystems
|
||||
watch('/boot/config', {
|
||||
persistent: true,
|
||||
ignoreInitial: true,
|
||||
ignored: (path: string) => !path.endsWith('.key'),
|
||||
usePolling: CHOKIDAR_USEPOLLING === true,
|
||||
}).on('all', async () => {
|
||||
// Load updated key into store
|
||||
usePolling: true, // Required for FAT32 - inotify doesn't work
|
||||
interval: 5000, // Poll every 5 seconds (balance between responsiveness and CPU usage)
|
||||
}).on('all', async (event, path) => {
|
||||
keyServerLogger.info('Key file %s: %s', event, path);
|
||||
|
||||
await store.dispatch(loadRegistrationKey());
|
||||
|
||||
// Reload var.ini to get updated registration metadata from emhttpd
|
||||
await reloadVarIniWithRetry();
|
||||
});
|
||||
};
|
||||
|
||||
40
api/src/types/jsonforms-i18n.d.ts
vendored
Normal file
40
api/src/types/jsonforms-i18n.d.ts
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
import '@jsonforms/core/lib/models/jsonSchema4';
|
||||
import '@jsonforms/core/lib/models/jsonSchema7';
|
||||
import '@jsonforms/core/src/models/jsonSchema4';
|
||||
import '@jsonforms/core/src/models/jsonSchema7';
|
||||
|
||||
declare module '@jsonforms/core/lib/models/jsonSchema4' {
|
||||
interface JsonSchema4 {
|
||||
i18n?: string;
|
||||
}
|
||||
}
|
||||
|
||||
declare module '@jsonforms/core/lib/models/jsonSchema7' {
|
||||
interface JsonSchema7 {
|
||||
i18n?: string;
|
||||
}
|
||||
}
|
||||
|
||||
declare module '@jsonforms/core/src/models/jsonSchema4' {
|
||||
interface JsonSchema4 {
|
||||
i18n?: string;
|
||||
}
|
||||
}
|
||||
|
||||
declare module '@jsonforms/core/src/models/jsonSchema7' {
|
||||
interface JsonSchema7 {
|
||||
i18n?: string;
|
||||
}
|
||||
}
|
||||
|
||||
declare module '@jsonforms/core/lib/models/jsonSchema4.js' {
|
||||
interface JsonSchema4 {
|
||||
i18n?: string;
|
||||
}
|
||||
}
|
||||
|
||||
declare module '@jsonforms/core/lib/models/jsonSchema7.js' {
|
||||
interface JsonSchema7 {
|
||||
i18n?: string;
|
||||
}
|
||||
}
|
||||
@@ -6,8 +6,7 @@ import { AuthZGuard } from 'nest-authz';
|
||||
import request from 'supertest';
|
||||
import { afterAll, beforeAll, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { loadDynamixConfigFile } from '@app/store/actions/load-dynamix-config-file.js';
|
||||
import { store } from '@app/store/index.js';
|
||||
import { loadDynamixConfig, store } from '@app/store/index.js';
|
||||
import { loadStateFiles } from '@app/store/modules/emhttp.js';
|
||||
import { AppModule } from '@app/unraid-api/app/app.module.js';
|
||||
import { AuthService } from '@app/unraid-api/auth/auth.service.js';
|
||||
@@ -111,8 +110,8 @@ describe('AppModule Integration Tests', () => {
|
||||
|
||||
beforeAll(async () => {
|
||||
// Initialize the dynamix config and state files before creating the module
|
||||
await store.dispatch(loadDynamixConfigFile());
|
||||
await store.dispatch(loadStateFiles());
|
||||
loadDynamixConfig();
|
||||
|
||||
// Debug: Log the CSRF token from the store
|
||||
const { getters } = await import('@app/store/index.js');
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { CacheModule } from '@nestjs/cache-manager';
|
||||
import { ConfigModule } from '@nestjs/config';
|
||||
import { Test } from '@nestjs/testing';
|
||||
|
||||
import { describe, expect, it } from 'vitest';
|
||||
@@ -10,7 +11,11 @@ describe('Module Dependencies Integration', () => {
|
||||
let module;
|
||||
try {
|
||||
module = await Test.createTestingModule({
|
||||
imports: [CacheModule.register({ isGlobal: true }), RestModule],
|
||||
imports: [
|
||||
ConfigModule.forRoot({ ignoreEnvFile: true, isGlobal: true }),
|
||||
CacheModule.register({ isGlobal: true }),
|
||||
RestModule,
|
||||
],
|
||||
}).compile();
|
||||
|
||||
expect(module).toBeDefined();
|
||||
|
||||
@@ -36,6 +36,7 @@ const mockPluginManagementService = {
|
||||
addPlugin: vi.fn(),
|
||||
addBundledPlugin: vi.fn(),
|
||||
removePlugin: vi.fn(),
|
||||
removePluginConfigOnly: vi.fn(),
|
||||
removeBundledPlugin: vi.fn(),
|
||||
plugins: [] as string[],
|
||||
};
|
||||
@@ -147,6 +148,7 @@ describe('Plugin Commands', () => {
|
||||
'@unraid/plugin-example',
|
||||
'@unraid/plugin-test'
|
||||
);
|
||||
expect(mockPluginManagementService.removePluginConfigOnly).not.toHaveBeenCalled();
|
||||
expect(mockLogger.log).toHaveBeenCalledWith('Removed plugin @unraid/plugin-example');
|
||||
expect(mockLogger.log).toHaveBeenCalledWith('Removed plugin @unraid/plugin-test');
|
||||
expect(mockApiConfigPersistence.persist).toHaveBeenCalled();
|
||||
@@ -178,9 +180,72 @@ describe('Plugin Commands', () => {
|
||||
expect(mockPluginManagementService.removePlugin).toHaveBeenCalledWith(
|
||||
'@unraid/plugin-example'
|
||||
);
|
||||
expect(mockPluginManagementService.removePluginConfigOnly).not.toHaveBeenCalled();
|
||||
expect(mockApiConfigPersistence.persist).toHaveBeenCalled();
|
||||
expect(mockRestartCommand.run).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should bypass npm uninstall when bypass flag is provided', async () => {
|
||||
mockInquirerService.prompt.mockResolvedValue({
|
||||
plugins: ['@unraid/plugin-example'],
|
||||
restart: true,
|
||||
bypassNpm: true,
|
||||
});
|
||||
|
||||
await command.run([], { restart: true, bypassNpm: true });
|
||||
|
||||
expect(mockPluginManagementService.removePluginConfigOnly).toHaveBeenCalledWith(
|
||||
'@unraid/plugin-example'
|
||||
);
|
||||
expect(mockPluginManagementService.removePlugin).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should preserve cli flags when prompt supplies plugins', async () => {
|
||||
mockInquirerService.prompt.mockResolvedValue({
|
||||
plugins: ['@unraid/plugin-example'],
|
||||
});
|
||||
|
||||
await command.run([], { restart: false, bypassNpm: true });
|
||||
|
||||
expect(mockPluginManagementService.removePluginConfigOnly).toHaveBeenCalledWith(
|
||||
'@unraid/plugin-example'
|
||||
);
|
||||
expect(mockPluginManagementService.removePlugin).not.toHaveBeenCalled();
|
||||
expect(mockRestartCommand.run).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should honor prompt restart value when cli flag not provided', async () => {
|
||||
mockInquirerService.prompt.mockResolvedValue({
|
||||
plugins: ['@unraid/plugin-example'],
|
||||
restart: false,
|
||||
});
|
||||
|
||||
await command.run([], {});
|
||||
|
||||
expect(mockPluginManagementService.removePlugin).toHaveBeenCalledWith(
|
||||
'@unraid/plugin-example'
|
||||
);
|
||||
expect(mockRestartCommand.run).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should respect passed params and skip inquirer', async () => {
|
||||
await command.run(['@unraid/plugin-example'], { restart: true, bypassNpm: false });
|
||||
|
||||
expect(mockInquirerService.prompt).not.toHaveBeenCalled();
|
||||
expect(mockPluginManagementService.removePlugin).toHaveBeenCalledWith(
|
||||
'@unraid/plugin-example'
|
||||
);
|
||||
});
|
||||
|
||||
it('should bypass npm when flag provided with passed params', async () => {
|
||||
await command.run(['@unraid/plugin-example'], { restart: true, bypassNpm: true });
|
||||
|
||||
expect(mockInquirerService.prompt).not.toHaveBeenCalled();
|
||||
expect(mockPluginManagementService.removePluginConfigOnly).toHaveBeenCalledWith(
|
||||
'@unraid/plugin-example'
|
||||
);
|
||||
expect(mockPluginManagementService.removePlugin).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('ListPluginCommand', () => {
|
||||
|
||||
@@ -74,13 +74,15 @@ export class InstallPluginCommand extends CommandRunner {
|
||||
|
||||
interface RemovePluginCommandOptions {
|
||||
plugins?: string[];
|
||||
restart: boolean;
|
||||
restart?: boolean;
|
||||
bypassNpm?: boolean;
|
||||
}
|
||||
|
||||
@SubCommand({
|
||||
name: 'remove',
|
||||
aliases: ['rm'],
|
||||
description: 'Remove plugin peer dependencies.',
|
||||
arguments: '[plugins...]',
|
||||
})
|
||||
export class RemovePluginCommand extends CommandRunner {
|
||||
constructor(
|
||||
@@ -93,9 +95,83 @@ export class RemovePluginCommand extends CommandRunner {
|
||||
super();
|
||||
}
|
||||
|
||||
async run(_passedParams: string[], options?: RemovePluginCommandOptions): Promise<void> {
|
||||
async run(passedParams: string[], options?: RemovePluginCommandOptions): Promise<void> {
|
||||
const cliBypass = options?.bypassNpm;
|
||||
const cliRestart = options?.restart;
|
||||
const mergedOptions: RemovePluginCommandOptions = {
|
||||
bypassNpm: cliBypass ?? false,
|
||||
restart: cliRestart ?? true,
|
||||
plugins: passedParams.length > 0 ? passedParams : options?.plugins,
|
||||
};
|
||||
|
||||
let resolvedOptions = mergedOptions;
|
||||
if (!mergedOptions.plugins?.length) {
|
||||
const promptOptions = await this.promptForPlugins(mergedOptions);
|
||||
if (!promptOptions) {
|
||||
return;
|
||||
}
|
||||
resolvedOptions = {
|
||||
// precedence: cli > prompt > default (fallback)
|
||||
bypassNpm: cliBypass ?? promptOptions.bypassNpm ?? mergedOptions.bypassNpm,
|
||||
restart: cliRestart ?? promptOptions.restart ?? mergedOptions.restart,
|
||||
// precedence: prompt > default (fallback)
|
||||
plugins: promptOptions.plugins ?? mergedOptions.plugins,
|
||||
};
|
||||
}
|
||||
|
||||
if (!resolvedOptions.plugins?.length) {
|
||||
this.logService.warn('No plugins selected for removal.');
|
||||
return;
|
||||
}
|
||||
|
||||
if (resolvedOptions.bypassNpm) {
|
||||
await this.pluginManagementService.removePluginConfigOnly(...resolvedOptions.plugins);
|
||||
} else {
|
||||
await this.pluginManagementService.removePlugin(...resolvedOptions.plugins);
|
||||
}
|
||||
for (const plugin of resolvedOptions.plugins) {
|
||||
this.logService.log(`Removed plugin ${plugin}`);
|
||||
}
|
||||
await this.apiConfigPersistence.persist();
|
||||
|
||||
if (resolvedOptions.restart) {
|
||||
await this.restartCommand.run();
|
||||
}
|
||||
}
|
||||
|
||||
@Option({
|
||||
flags: '--no-restart',
|
||||
description: 'do NOT restart the service after deploy',
|
||||
defaultValue: true,
|
||||
})
|
||||
parseRestart(): boolean {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Option({
|
||||
flags: '-b, --bypass-npm',
|
||||
description: 'Bypass npm uninstall and only update the config',
|
||||
defaultValue: false,
|
||||
name: 'bypassNpm',
|
||||
})
|
||||
parseBypass(): boolean {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Option({
|
||||
flags: '--npm',
|
||||
description: 'Run npm uninstall for unbundled plugins (default behavior)',
|
||||
name: 'bypassNpm',
|
||||
})
|
||||
parseRunNpm(): boolean {
|
||||
return false;
|
||||
}
|
||||
|
||||
private async promptForPlugins(
|
||||
initialOptions: RemovePluginCommandOptions
|
||||
): Promise<RemovePluginCommandOptions | undefined> {
|
||||
try {
|
||||
options = await this.inquirerService.prompt(RemovePluginQuestionSet.name, options);
|
||||
return await this.inquirerService.prompt(RemovePluginQuestionSet.name, initialOptions);
|
||||
} catch (error) {
|
||||
if (error instanceof NoPluginsFoundError) {
|
||||
this.logService.error(error.message);
|
||||
@@ -108,30 +184,6 @@ export class RemovePluginCommand extends CommandRunner {
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
if (!options.plugins || options.plugins.length === 0) {
|
||||
this.logService.warn('No plugins selected for removal.');
|
||||
return;
|
||||
}
|
||||
|
||||
await this.pluginManagementService.removePlugin(...options.plugins);
|
||||
for (const plugin of options.plugins) {
|
||||
this.logService.log(`Removed plugin ${plugin}`);
|
||||
}
|
||||
await this.apiConfigPersistence.persist();
|
||||
|
||||
if (options.restart) {
|
||||
await this.restartCommand.run();
|
||||
}
|
||||
}
|
||||
|
||||
@Option({
|
||||
flags: '--no-restart',
|
||||
description: 'do NOT restart the service after deploy',
|
||||
defaultValue: true,
|
||||
})
|
||||
parseRestart(): boolean {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -58,7 +58,8 @@ export class PM2Service {
|
||||
...(needsPathUpdate && { PATH: finalPath }),
|
||||
};
|
||||
|
||||
const runCommand = () => execa(PM2_PATH, [...args], execOptions satisfies Options);
|
||||
const pm2Args = args.some((arg) => arg === '--no-color') ? args : ['--no-color', ...args];
|
||||
const runCommand = () => execa(PM2_PATH, pm2Args, execOptions satisfies Options);
|
||||
if (raw) {
|
||||
return runCommand();
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ import type { ApiConfig } from '@unraid/shared/services/api-config.js';
|
||||
import { ConfigFilePersister } from '@unraid/shared/services/config-file.js';
|
||||
import { csvStringToArray } from '@unraid/shared/util/data.js';
|
||||
|
||||
import { isConnectPluginInstalled } from '@app/connect-plugin-cleanup.js';
|
||||
import { API_VERSION, PATHS_CONFIG_MODULES } from '@app/environment.js';
|
||||
|
||||
export { type ApiConfig };
|
||||
@@ -29,6 +30,13 @@ export const loadApiConfig = async () => {
|
||||
const apiHandler = new ApiConfigPersistence(new ConfigService()).getFileHandler();
|
||||
|
||||
const diskConfig: Partial<ApiConfig> = await apiHandler.loadConfig();
|
||||
// Hack: cleanup stale connect plugin entry if necessary
|
||||
if (!isConnectPluginInstalled()) {
|
||||
diskConfig.plugins = diskConfig.plugins?.filter(
|
||||
(plugin) => plugin !== 'unraid-api-plugin-connect'
|
||||
);
|
||||
await apiHandler.writeConfigFile(diskConfig as ApiConfig);
|
||||
}
|
||||
|
||||
return {
|
||||
...defaultConfig,
|
||||
|
||||
@@ -12,6 +12,24 @@ import {
|
||||
createSimpleLabeledControl,
|
||||
} from '@app/unraid-api/graph/utils/form-utils.js';
|
||||
|
||||
const API_KEY_I18N = {
|
||||
name: 'jsonforms.apiKey.name',
|
||||
description: 'jsonforms.apiKey.description',
|
||||
roles: 'jsonforms.apiKey.roles',
|
||||
permissionPresets: 'jsonforms.apiKey.permissionPresets',
|
||||
customPermissions: {
|
||||
root: 'jsonforms.apiKey.customPermissions',
|
||||
resources: 'jsonforms.apiKey.customPermissions.resources',
|
||||
actions: 'jsonforms.apiKey.customPermissions.actions',
|
||||
},
|
||||
permissions: {
|
||||
header: 'jsonforms.apiKey.permissions.header',
|
||||
description: 'jsonforms.apiKey.permissions.description',
|
||||
subheader: 'jsonforms.apiKey.permissions.subheader',
|
||||
help: 'jsonforms.apiKey.permissions.help',
|
||||
},
|
||||
} as const;
|
||||
|
||||
// Helper to get GraphQL enum names for JSON Schema
|
||||
// GraphQL expects the enum names (keys) not the values
|
||||
function getAuthActionEnumNames(): string[] {
|
||||
@@ -82,6 +100,7 @@ export class ApiKeyFormService {
|
||||
properties: {
|
||||
name: {
|
||||
type: 'string',
|
||||
i18n: API_KEY_I18N.name,
|
||||
title: 'API Key Name',
|
||||
description: 'A descriptive name for this API key',
|
||||
minLength: 1,
|
||||
@@ -89,12 +108,14 @@ export class ApiKeyFormService {
|
||||
},
|
||||
description: {
|
||||
type: 'string',
|
||||
i18n: API_KEY_I18N.description,
|
||||
title: 'Description',
|
||||
description: 'Optional description of what this key is used for',
|
||||
maxLength: 500,
|
||||
},
|
||||
roles: {
|
||||
type: 'array',
|
||||
i18n: API_KEY_I18N.roles,
|
||||
title: 'Roles',
|
||||
description: 'Select one or more roles to grant pre-defined permission sets',
|
||||
items: {
|
||||
@@ -105,6 +126,7 @@ export class ApiKeyFormService {
|
||||
},
|
||||
permissionPresets: {
|
||||
type: 'string',
|
||||
i18n: API_KEY_I18N.permissionPresets,
|
||||
title: 'Add Permission Preset',
|
||||
description: 'Quick add common permission sets',
|
||||
enum: [
|
||||
@@ -119,6 +141,7 @@ export class ApiKeyFormService {
|
||||
},
|
||||
customPermissions: {
|
||||
type: 'array',
|
||||
i18n: API_KEY_I18N.customPermissions.root,
|
||||
title: 'Permissions',
|
||||
description: 'Configure specific permissions',
|
||||
items: {
|
||||
@@ -126,6 +149,7 @@ export class ApiKeyFormService {
|
||||
properties: {
|
||||
resources: {
|
||||
type: 'array',
|
||||
i18n: API_KEY_I18N.customPermissions.resources,
|
||||
title: 'Resources',
|
||||
items: {
|
||||
type: 'string',
|
||||
@@ -137,6 +161,7 @@ export class ApiKeyFormService {
|
||||
},
|
||||
actions: {
|
||||
type: 'array',
|
||||
i18n: API_KEY_I18N.customPermissions.actions,
|
||||
title: 'Actions',
|
||||
items: {
|
||||
type: 'string',
|
||||
@@ -167,6 +192,7 @@ export class ApiKeyFormService {
|
||||
controlOptions: {
|
||||
inputType: 'text',
|
||||
},
|
||||
i18nKey: API_KEY_I18N.name,
|
||||
}),
|
||||
createLabeledControl({
|
||||
scope: '#/properties/description',
|
||||
@@ -177,6 +203,7 @@ export class ApiKeyFormService {
|
||||
multi: true,
|
||||
rows: 3,
|
||||
},
|
||||
i18nKey: API_KEY_I18N.description,
|
||||
}),
|
||||
// Permissions section header
|
||||
{
|
||||
@@ -185,6 +212,7 @@ export class ApiKeyFormService {
|
||||
options: {
|
||||
format: 'title',
|
||||
},
|
||||
i18n: API_KEY_I18N.permissions.header,
|
||||
} as LabelElement,
|
||||
{
|
||||
type: 'Label',
|
||||
@@ -192,6 +220,7 @@ export class ApiKeyFormService {
|
||||
options: {
|
||||
format: 'description',
|
||||
},
|
||||
i18n: API_KEY_I18N.permissions.description,
|
||||
} as LabelElement,
|
||||
// Roles selection
|
||||
createLabeledControl({
|
||||
@@ -210,6 +239,7 @@ export class ApiKeyFormService {
|
||||
),
|
||||
descriptions: this.getRoleDescriptions(),
|
||||
},
|
||||
i18nKey: API_KEY_I18N.roles,
|
||||
}),
|
||||
// Separator for permissions
|
||||
{
|
||||
@@ -218,6 +248,7 @@ export class ApiKeyFormService {
|
||||
options: {
|
||||
format: 'subtitle',
|
||||
},
|
||||
i18n: API_KEY_I18N.permissions.subheader,
|
||||
} as LabelElement,
|
||||
{
|
||||
type: 'Label',
|
||||
@@ -225,6 +256,7 @@ export class ApiKeyFormService {
|
||||
options: {
|
||||
format: 'description',
|
||||
},
|
||||
i18n: API_KEY_I18N.permissions.help,
|
||||
} as LabelElement,
|
||||
// Permission preset dropdown
|
||||
createLabeledControl({
|
||||
@@ -242,6 +274,7 @@ export class ApiKeyFormService {
|
||||
network_admin: 'Network Admin (Network & Services Control)',
|
||||
},
|
||||
},
|
||||
i18nKey: API_KEY_I18N.permissionPresets,
|
||||
}),
|
||||
// Custom permissions array - following OIDC pattern exactly
|
||||
{
|
||||
@@ -269,6 +302,7 @@ export class ApiKeyFormService {
|
||||
{}
|
||||
),
|
||||
},
|
||||
i18nKey: API_KEY_I18N.customPermissions.resources,
|
||||
}),
|
||||
createSimpleLabeledControl({
|
||||
scope: '#/properties/actions',
|
||||
@@ -278,6 +312,7 @@ export class ApiKeyFormService {
|
||||
multiple: true,
|
||||
labels: getAuthActionLabels(),
|
||||
},
|
||||
i18nKey: API_KEY_I18N.customPermissions.actions,
|
||||
}),
|
||||
],
|
||||
},
|
||||
|
||||
@@ -3,6 +3,7 @@ import { Test, TestingModule } from '@nestjs/testing';
|
||||
import * as fs from 'fs/promises';
|
||||
import * as path from 'path';
|
||||
|
||||
import type { Mock } from 'vitest';
|
||||
import { plainToInstance } from 'class-transformer';
|
||||
import * as ini from 'ini';
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
@@ -1182,4 +1183,58 @@ describe('CustomizationService - updateCfgFile', () => {
|
||||
writeError
|
||||
);
|
||||
});
|
||||
|
||||
describe('getTheme', () => {
|
||||
const mockDynamix = getters.dynamix as unknown as Mock;
|
||||
const baseDisplay = {
|
||||
theme: 'white',
|
||||
banner: '',
|
||||
showBannerGradient: 'no',
|
||||
background: '123456',
|
||||
headerdescription: 'yes',
|
||||
headermetacolor: '789abc',
|
||||
header: 'abcdef',
|
||||
};
|
||||
|
||||
const setDisplay = (overrides: Partial<typeof baseDisplay>) => {
|
||||
mockDynamix.mockReturnValue({
|
||||
display: {
|
||||
...baseDisplay,
|
||||
...overrides,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
it('reports showBannerImage when banner is "image"', async () => {
|
||||
setDisplay({ banner: 'image' });
|
||||
|
||||
const theme = await service.getTheme();
|
||||
|
||||
expect(theme.showBannerImage).toBe(true);
|
||||
});
|
||||
|
||||
it('reports showBannerImage when banner is "yes"', async () => {
|
||||
setDisplay({ banner: 'yes' });
|
||||
|
||||
const theme = await service.getTheme();
|
||||
|
||||
expect(theme.showBannerImage).toBe(true);
|
||||
});
|
||||
|
||||
it('disables showBannerImage when banner is empty', async () => {
|
||||
setDisplay({ banner: '' });
|
||||
|
||||
const theme = await service.getTheme();
|
||||
|
||||
expect(theme.showBannerImage).toBe(false);
|
||||
});
|
||||
|
||||
it('mirrors showBannerGradient flag from display settings', async () => {
|
||||
setDisplay({ banner: 'image', showBannerGradient: 'yes' });
|
||||
expect((await service.getTheme()).showBannerGradient).toBe(true);
|
||||
|
||||
setDisplay({ banner: 'image', showBannerGradient: 'no' });
|
||||
expect((await service.getTheme()).showBannerGradient).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -458,7 +458,7 @@ export class CustomizationService implements OnModuleInit {
|
||||
|
||||
return {
|
||||
name,
|
||||
showBannerImage: banner === 'yes',
|
||||
showBannerImage: banner === 'image' || banner === 'yes',
|
||||
showBannerGradient: bannerGradient === 'yes',
|
||||
headerBackgroundColor: this.addHashtoHexField(bgColor),
|
||||
headerPrimaryTextColor: this.addHashtoHexField(textColor),
|
||||
|
||||
@@ -0,0 +1,233 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { constants as fsConstants } from 'node:fs';
|
||||
import { access, readdir, readFile } from 'node:fs/promises';
|
||||
import { join } from 'path';
|
||||
|
||||
@Injectable()
|
||||
export class CpuTopologyService {
|
||||
private readonly logger = new Logger(CpuTopologyService.name);
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
// Read static CPU topology, per-package core thread pairs
|
||||
// -----------------------------------------------------------------
|
||||
async generateTopology(): Promise<number[][][]> {
|
||||
const packages: Record<number, number[][]> = {};
|
||||
let cpuDirs: string[];
|
||||
|
||||
try {
|
||||
cpuDirs = await readdir('/sys/devices/system/cpu');
|
||||
} catch (err) {
|
||||
this.logger.warn('CPU topology unavailable, /sys/devices/system/cpu not accessible');
|
||||
return [];
|
||||
}
|
||||
|
||||
for (const dir of cpuDirs) {
|
||||
if (!/^cpu\d+$/.test(dir)) continue;
|
||||
|
||||
const basePath = join('/sys/devices/system/cpu', dir, 'topology');
|
||||
const pkgFile = join(basePath, 'physical_package_id');
|
||||
const siblingsFile = join(basePath, 'thread_siblings_list');
|
||||
|
||||
try {
|
||||
const [pkgIdStr, siblingsStrRaw] = await Promise.all([
|
||||
readFile(pkgFile, 'utf8'),
|
||||
readFile(siblingsFile, 'utf8'),
|
||||
]);
|
||||
|
||||
const pkgId = parseInt(pkgIdStr.trim(), 10);
|
||||
|
||||
// expand ranges
|
||||
const siblings = siblingsStrRaw
|
||||
.trim()
|
||||
.replace(/(\d+)-(\d+)/g, (_, start, end) =>
|
||||
Array.from(
|
||||
{ length: parseInt(end) - parseInt(start) + 1 },
|
||||
(_, i) => parseInt(start) + i
|
||||
).join(',')
|
||||
)
|
||||
.split(',')
|
||||
.map((n) => parseInt(n, 10));
|
||||
|
||||
if (!packages[pkgId]) packages[pkgId] = [];
|
||||
if (!packages[pkgId].some((arr) => arr.join(',') === siblings.join(','))) {
|
||||
packages[pkgId].push(siblings);
|
||||
}
|
||||
} catch (err) {
|
||||
this.logger.warn(err, `Topology read error for ${dir}`);
|
||||
}
|
||||
}
|
||||
// Sort cores within each package, and packages by their lowest core index
|
||||
const result = Object.entries(packages)
|
||||
.sort((a, b) => a[1][0][0] - b[1][0][0]) // sort packages by first CPU ID
|
||||
.map(
|
||||
([pkgId, cores]) => cores.sort((a, b) => a[0] - b[0]) // sort cores within package
|
||||
);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
// Dynamic telemetry (power + temperature)
|
||||
// -----------------------------------------------------------------
|
||||
private async getPackageTemps(): Promise<number[]> {
|
||||
const temps: number[] = [];
|
||||
try {
|
||||
const hwmons = await readdir('/sys/class/hwmon');
|
||||
for (const hwmon of hwmons) {
|
||||
const path = join('/sys/class/hwmon', hwmon);
|
||||
try {
|
||||
const label = (await readFile(join(path, 'name'), 'utf8')).trim();
|
||||
if (/coretemp|k10temp|zenpower/i.test(label)) {
|
||||
const files = await readdir(path);
|
||||
for (const f of files) {
|
||||
if (f.startsWith('temp') && f.endsWith('_label')) {
|
||||
const lbl = (await readFile(join(path, f), 'utf8')).trim().toLowerCase();
|
||||
if (
|
||||
lbl.includes('package id') ||
|
||||
lbl.includes('tctl') ||
|
||||
lbl.includes('tdie')
|
||||
) {
|
||||
const inputFile = join(path, f.replace('_label', '_input'));
|
||||
try {
|
||||
const raw = await readFile(inputFile, 'utf8');
|
||||
const parsed = parseInt(raw.trim(), 10);
|
||||
if (Number.isFinite(parsed)) {
|
||||
temps.push(parsed / 1000);
|
||||
} else {
|
||||
this.logger.warn(`Invalid temperature value: ${raw.trim()}`);
|
||||
}
|
||||
} catch (err) {
|
||||
this.logger.warn('Failed to read file', err);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
this.logger.warn('Failed to read file', err);
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
this.logger.warn('Failed to read file', err);
|
||||
}
|
||||
return temps;
|
||||
}
|
||||
|
||||
private async getPackagePower(): Promise<Record<number, Record<string, number>>> {
|
||||
const basePath = '/sys/class/powercap';
|
||||
const prefixes = ['intel-rapl', 'intel-rapl-mmio', 'amd-rapl'];
|
||||
const raplPaths: string[] = [];
|
||||
|
||||
try {
|
||||
const entries = await readdir(basePath, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
if (entry.isSymbolicLink() && prefixes.some((p) => entry.name.startsWith(p))) {
|
||||
if (/:\d+:\d+/.test(entry.name)) continue;
|
||||
raplPaths.push(join(basePath, entry.name));
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
return {};
|
||||
}
|
||||
|
||||
if (!raplPaths.length) return {};
|
||||
|
||||
const readEnergy = async (p: string): Promise<number | null> => {
|
||||
try {
|
||||
await access(join(p, 'energy_uj'), fsConstants.R_OK);
|
||||
const raw = await readFile(join(p, 'energy_uj'), 'utf8');
|
||||
const parsed = parseInt(raw.trim(), 10);
|
||||
return Number.isFinite(parsed) ? parsed : null;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
const prevE = new Map<string, number>();
|
||||
const prevT = new Map<string, bigint>();
|
||||
|
||||
for (const p of raplPaths) {
|
||||
const val = await readEnergy(p);
|
||||
if (val !== null) {
|
||||
prevE.set(p, val);
|
||||
prevT.set(p, process.hrtime.bigint());
|
||||
}
|
||||
}
|
||||
|
||||
await new Promise((res) => setTimeout(res, 100));
|
||||
|
||||
const results: Record<number, Record<string, number>> = {};
|
||||
|
||||
for (const p of raplPaths) {
|
||||
const now = await readEnergy(p);
|
||||
if (now === null) continue;
|
||||
|
||||
const prevVal = prevE.get(p);
|
||||
const prevTime = prevT.get(p);
|
||||
if (prevVal === undefined || prevTime === undefined) continue;
|
||||
|
||||
const diffE = now - prevVal;
|
||||
const diffT = Number(process.hrtime.bigint() - prevTime);
|
||||
|
||||
if (!Number.isFinite(diffE) || !Number.isFinite(diffT)) {
|
||||
this.logger.warn(`Non-finite energy/time diff for ${p}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (diffT <= 0 || diffE < 0) continue;
|
||||
|
||||
const watts = (diffE * 1e-6) / (diffT * 1e-9);
|
||||
const powerW = Math.round(watts * 100) / 100;
|
||||
|
||||
if (!Number.isFinite(powerW)) {
|
||||
this.logger.warn(`Non-finite power value for ${p}: ${watts}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
const nameFile = join(p, 'name');
|
||||
let label = 'package';
|
||||
try {
|
||||
label = (await readFile(nameFile, 'utf8')).trim();
|
||||
} catch (err) {
|
||||
this.logger.warn('Failed to read file', err);
|
||||
}
|
||||
|
||||
const pkgMatch = label.match(/package-(\d+)/i);
|
||||
const pkgId = pkgMatch ? Number(pkgMatch[1]) : 0;
|
||||
|
||||
if (!results[pkgId]) results[pkgId] = {};
|
||||
results[pkgId][label] = powerW;
|
||||
}
|
||||
|
||||
for (const domains of Object.values(results)) {
|
||||
const total = Object.values(domains).reduce((a, b) => a + b, 0);
|
||||
domains['total'] = Math.round(total * 100) / 100;
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
async generateTelemetry(): Promise<{ id: number; power: number; temp: number }[]> {
|
||||
const temps = await this.getPackageTemps();
|
||||
const powerData = await this.getPackagePower();
|
||||
|
||||
const maxPkg = Math.max(temps.length - 1, ...Object.keys(powerData).map(Number), 0);
|
||||
|
||||
const result: {
|
||||
id: number;
|
||||
power: number;
|
||||
temp: number;
|
||||
}[] = [];
|
||||
|
||||
for (let pkgId = 0; pkgId <= maxPkg; pkgId++) {
|
||||
const entry = powerData[pkgId] ?? {};
|
||||
result.push({
|
||||
id: pkgId,
|
||||
power: entry.total ?? -1,
|
||||
temp: temps[pkgId] ?? -1,
|
||||
});
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
@@ -39,6 +39,18 @@ export class CpuLoad {
|
||||
percentSteal!: number;
|
||||
}
|
||||
|
||||
@ObjectType({ implements: () => Node })
|
||||
export class CpuPackages extends Node {
|
||||
@Field(() => Float, { description: 'Total CPU package power draw (W)' })
|
||||
totalPower!: number;
|
||||
|
||||
@Field(() => [Float], { description: 'Power draw per package (W)' })
|
||||
power!: number[];
|
||||
|
||||
@Field(() => [Float], { description: 'Temperature per package (°C)' })
|
||||
temp!: number[];
|
||||
}
|
||||
|
||||
@ObjectType({ implements: () => Node })
|
||||
export class CpuUtilization extends Node {
|
||||
@Field(() => Float, { description: 'Total CPU load in percent' })
|
||||
@@ -100,4 +112,12 @@ export class InfoCpu extends Node {
|
||||
|
||||
@Field(() => [String], { nullable: true, description: 'CPU feature flags' })
|
||||
flags?: string[];
|
||||
|
||||
@Field(() => [[[Int]]], {
|
||||
description: 'Per-package array of core/thread pairs, e.g. [[[0,1],[2,3]], [[4,5],[6,7]]]',
|
||||
})
|
||||
topology!: number[][][];
|
||||
|
||||
@Field(() => CpuPackages)
|
||||
packages!: CpuPackages;
|
||||
}
|
||||
|
||||
10
api/src/unraid-api/graph/resolvers/info/cpu/cpu.module.ts
Normal file
10
api/src/unraid-api/graph/resolvers/info/cpu/cpu.module.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
|
||||
import { CpuTopologyService } from '@app/unraid-api/graph/resolvers/info/cpu/cpu-topology.service.js';
|
||||
import { CpuService } from '@app/unraid-api/graph/resolvers/info/cpu/cpu.service.js';
|
||||
|
||||
@Module({
|
||||
providers: [CpuService, CpuTopologyService],
|
||||
exports: [CpuService, CpuTopologyService],
|
||||
})
|
||||
export class CpuModule {}
|
||||
@@ -1,5 +1,6 @@
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { CpuTopologyService } from '@app/unraid-api/graph/resolvers/info/cpu/cpu-topology.service.js';
|
||||
import { CpuService } from '@app/unraid-api/graph/resolvers/info/cpu/cpu.service.js';
|
||||
|
||||
vi.mock('systeminformation', () => ({
|
||||
@@ -88,9 +89,27 @@ vi.mock('systeminformation', () => ({
|
||||
|
||||
describe('CpuService', () => {
|
||||
let service: CpuService;
|
||||
let cpuTopologyService: CpuTopologyService;
|
||||
|
||||
beforeEach(() => {
|
||||
service = new CpuService();
|
||||
cpuTopologyService = {
|
||||
generateTopology: vi.fn().mockResolvedValue([
|
||||
[
|
||||
[0, 1],
|
||||
[2, 3],
|
||||
],
|
||||
[
|
||||
[4, 5],
|
||||
[6, 7],
|
||||
],
|
||||
]),
|
||||
generateTelemetry: vi.fn().mockResolvedValue([
|
||||
{ power: 32.5, temp: 45.0 },
|
||||
{ power: 33.0, temp: 46.0 },
|
||||
]),
|
||||
} as unknown as CpuTopologyService;
|
||||
|
||||
service = new CpuService(cpuTopologyService);
|
||||
});
|
||||
|
||||
describe('generateCpu', () => {
|
||||
@@ -121,6 +140,22 @@ describe('CpuService', () => {
|
||||
l3: 12582912,
|
||||
},
|
||||
flags: ['fpu', 'vme', 'de', 'pse', 'tsc', 'msr', 'pae', 'mce', 'cx8'],
|
||||
packages: {
|
||||
id: 'info/cpu/packages',
|
||||
totalPower: 65.5,
|
||||
power: [32.5, 33.0],
|
||||
temp: [45.0, 46.0],
|
||||
},
|
||||
topology: [
|
||||
[
|
||||
[0, 1],
|
||||
[2, 3],
|
||||
],
|
||||
[
|
||||
[4, 5],
|
||||
[6, 7],
|
||||
],
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -2,25 +2,56 @@ import { Injectable } from '@nestjs/common';
|
||||
|
||||
import { cpu, cpuFlags, currentLoad } from 'systeminformation';
|
||||
|
||||
import { CpuUtilization, InfoCpu } from '@app/unraid-api/graph/resolvers/info/cpu/cpu.model.js';
|
||||
import { CpuTopologyService } from '@app/unraid-api/graph/resolvers/info/cpu/cpu-topology.service.js';
|
||||
import {
|
||||
CpuPackages,
|
||||
CpuUtilization,
|
||||
InfoCpu,
|
||||
} from '@app/unraid-api/graph/resolvers/info/cpu/cpu.model.js';
|
||||
|
||||
@Injectable()
|
||||
export class CpuService {
|
||||
constructor(private readonly cpuTopologyService: CpuTopologyService) {}
|
||||
|
||||
async generateCpu(): Promise<InfoCpu> {
|
||||
const { cores, physicalCores, speedMin, speedMax, stepping, ...rest } = await cpu();
|
||||
const { cores, physicalCores, speedMin, speedMax, stepping, processors, ...rest } = await cpu();
|
||||
const flags = await cpuFlags()
|
||||
.then((flags) => flags.split(' '))
|
||||
.then((f) => f.split(' '))
|
||||
.catch(() => []);
|
||||
|
||||
// Gather telemetry
|
||||
const packageList = await this.cpuTopologyService.generateTelemetry();
|
||||
const topology = await this.cpuTopologyService.generateTopology();
|
||||
|
||||
// Compute total power (2 decimals)
|
||||
const totalPower = Number(
|
||||
packageList
|
||||
.map((pkg) => pkg.power)
|
||||
.filter((power) => power >= 0)
|
||||
.reduce((sum, power) => sum + power, 0)
|
||||
.toFixed(2)
|
||||
);
|
||||
|
||||
// Build CpuPackages object
|
||||
const packages: CpuPackages = {
|
||||
id: 'info/cpu/packages',
|
||||
totalPower,
|
||||
power: packageList.map((pkg) => pkg.power ?? -1),
|
||||
temp: packageList.map((pkg) => pkg.temp ?? -1),
|
||||
};
|
||||
|
||||
return {
|
||||
id: 'info/cpu',
|
||||
...rest,
|
||||
cores: physicalCores,
|
||||
threads: cores,
|
||||
processors,
|
||||
flags,
|
||||
stepping: Number(stepping),
|
||||
speedmin: speedMin || -1,
|
||||
speedmax: speedMax || -1,
|
||||
packages,
|
||||
topology,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
import { ConfigModule } from '@nestjs/config';
|
||||
|
||||
import { CpuModule } from '@app/unraid-api/graph/resolvers/info/cpu/cpu.module.js';
|
||||
import { CpuService } from '@app/unraid-api/graph/resolvers/info/cpu/cpu.service.js';
|
||||
import { DevicesResolver } from '@app/unraid-api/graph/resolvers/info/devices/devices.resolver.js';
|
||||
import { DevicesService } from '@app/unraid-api/graph/resolvers/info/devices/devices.service.js';
|
||||
@@ -14,7 +15,7 @@ import { VersionsService } from '@app/unraid-api/graph/resolvers/info/versions/v
|
||||
import { ServicesModule } from '@app/unraid-api/graph/services/services.module.js';
|
||||
|
||||
@Module({
|
||||
imports: [ConfigModule, ServicesModule],
|
||||
imports: [ConfigModule, ServicesModule, CpuModule],
|
||||
providers: [
|
||||
// Main resolver
|
||||
InfoResolver,
|
||||
@@ -25,7 +26,6 @@ import { ServicesModule } from '@app/unraid-api/graph/services/services.module.j
|
||||
CoreVersionsResolver,
|
||||
|
||||
// Services
|
||||
CpuService,
|
||||
MemoryService,
|
||||
DevicesService,
|
||||
OsService,
|
||||
|
||||
@@ -6,6 +6,7 @@ import { Test } from '@nestjs/testing';
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
|
||||
import { CpuTopologyService } from '@app/unraid-api/graph/resolvers/info/cpu/cpu-topology.service.js';
|
||||
import { CpuService } from '@app/unraid-api/graph/resolvers/info/cpu/cpu.service.js';
|
||||
import { DevicesResolver } from '@app/unraid-api/graph/resolvers/info/devices/devices.resolver.js';
|
||||
import { DevicesService } from '@app/unraid-api/graph/resolvers/info/devices/devices.service.js';
|
||||
@@ -28,6 +29,7 @@ describe('InfoResolver Integration Tests', () => {
|
||||
InfoResolver,
|
||||
DevicesResolver,
|
||||
CpuService,
|
||||
CpuTopologyService,
|
||||
MemoryService,
|
||||
DevicesService,
|
||||
OsService,
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
|
||||
import { CpuService } from '@app/unraid-api/graph/resolvers/info/cpu/cpu.service.js';
|
||||
import { CpuModule } from '@app/unraid-api/graph/resolvers/info/cpu/cpu.module.js';
|
||||
import { MemoryService } from '@app/unraid-api/graph/resolvers/info/memory/memory.service.js';
|
||||
import { MetricsResolver } from '@app/unraid-api/graph/resolvers/metrics/metrics.resolver.js';
|
||||
import { ServicesModule } from '@app/unraid-api/graph/services/services.module.js';
|
||||
|
||||
@Module({
|
||||
imports: [ServicesModule],
|
||||
providers: [MetricsResolver, CpuService, MemoryService],
|
||||
imports: [ServicesModule, CpuModule],
|
||||
providers: [MetricsResolver, MemoryService],
|
||||
exports: [MetricsResolver],
|
||||
})
|
||||
export class MetricsModule {}
|
||||
|
||||
@@ -5,6 +5,7 @@ import { Test } from '@nestjs/testing';
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { pubsub, PUBSUB_CHANNEL } from '@app/core/pubsub.js';
|
||||
import { CpuTopologyService } from '@app/unraid-api/graph/resolvers/info/cpu/cpu-topology.service.js';
|
||||
import { CpuService } from '@app/unraid-api/graph/resolvers/info/cpu/cpu.service.js';
|
||||
import { MemoryService } from '@app/unraid-api/graph/resolvers/info/memory/memory.service.js';
|
||||
import { MetricsResolver } from '@app/unraid-api/graph/resolvers/metrics/metrics.resolver.js';
|
||||
@@ -22,6 +23,7 @@ describe('MetricsResolver Integration Tests', () => {
|
||||
providers: [
|
||||
MetricsResolver,
|
||||
CpuService,
|
||||
CpuTopologyService,
|
||||
MemoryService,
|
||||
SubscriptionTrackerService,
|
||||
SubscriptionHelperService,
|
||||
|
||||
@@ -3,6 +3,7 @@ import { Test } from '@nestjs/testing';
|
||||
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { CpuTopologyService } from '@app/unraid-api/graph/resolvers/info/cpu/cpu-topology.service.js';
|
||||
import { CpuService } from '@app/unraid-api/graph/resolvers/info/cpu/cpu.service.js';
|
||||
import { MemoryService } from '@app/unraid-api/graph/resolvers/info/memory/memory.service.js';
|
||||
import { MetricsResolver } from '@app/unraid-api/graph/resolvers/metrics/metrics.resolver.js';
|
||||
@@ -18,6 +19,7 @@ describe('MetricsResolver', () => {
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [
|
||||
MetricsResolver,
|
||||
CpuTopologyService,
|
||||
{
|
||||
provide: CpuService,
|
||||
useValue: {
|
||||
@@ -161,8 +163,14 @@ describe('MetricsResolver', () => {
|
||||
registerTopic: vi.fn(),
|
||||
};
|
||||
|
||||
const cpuTopologyServiceMock = {
|
||||
generateTopology: vi.fn(),
|
||||
generateTelemetry: vi.fn().mockResolvedValue([{ id: 0, power: 42.5, temp: 68.3 }]),
|
||||
} satisfies Pick<CpuTopologyService, 'generateTopology' | 'generateTelemetry'>;
|
||||
|
||||
const testModule = new MetricsResolver(
|
||||
cpuService,
|
||||
cpuTopologyServiceMock as unknown as CpuTopologyService,
|
||||
memoryService,
|
||||
subscriptionTracker as any,
|
||||
{} as any
|
||||
@@ -170,7 +178,7 @@ describe('MetricsResolver', () => {
|
||||
|
||||
testModule.onModuleInit();
|
||||
|
||||
expect(subscriptionTracker.registerTopic).toHaveBeenCalledTimes(2);
|
||||
expect(subscriptionTracker.registerTopic).toHaveBeenCalledTimes(3);
|
||||
expect(subscriptionTracker.registerTopic).toHaveBeenCalledWith(
|
||||
'CPU_UTILIZATION',
|
||||
expect.any(Function),
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
import { OnModuleInit } from '@nestjs/common';
|
||||
import { Logger, OnModuleInit } from '@nestjs/common';
|
||||
import { Query, ResolveField, Resolver, Subscription } from '@nestjs/graphql';
|
||||
|
||||
import { AuthAction, Resource } from '@unraid/shared/graphql.model.js';
|
||||
import { UsePermissions } from '@unraid/shared/use-permissions.directive.js';
|
||||
|
||||
import { pubsub, PUBSUB_CHANNEL } from '@app/core/pubsub.js';
|
||||
import { CpuUtilization } from '@app/unraid-api/graph/resolvers/info/cpu/cpu.model.js';
|
||||
import { CpuTopologyService } from '@app/unraid-api/graph/resolvers/info/cpu/cpu-topology.service.js';
|
||||
import { CpuPackages, CpuUtilization } from '@app/unraid-api/graph/resolvers/info/cpu/cpu.model.js';
|
||||
import { CpuService } from '@app/unraid-api/graph/resolvers/info/cpu/cpu.service.js';
|
||||
import { MemoryUtilization } from '@app/unraid-api/graph/resolvers/info/memory/memory.model.js';
|
||||
import { MemoryService } from '@app/unraid-api/graph/resolvers/info/memory/memory.service.js';
|
||||
@@ -15,8 +16,10 @@ import { SubscriptionTrackerService } from '@app/unraid-api/graph/services/subsc
|
||||
|
||||
@Resolver(() => Metrics)
|
||||
export class MetricsResolver implements OnModuleInit {
|
||||
private readonly logger = new Logger(MetricsResolver.name);
|
||||
constructor(
|
||||
private readonly cpuService: CpuService,
|
||||
private readonly cpuTopologyService: CpuTopologyService,
|
||||
private readonly memoryService: MemoryService,
|
||||
private readonly subscriptionTracker: SubscriptionTrackerService,
|
||||
private readonly subscriptionHelper: SubscriptionHelperService
|
||||
@@ -33,6 +36,38 @@ export class MetricsResolver implements OnModuleInit {
|
||||
1000
|
||||
);
|
||||
|
||||
this.subscriptionTracker.registerTopic(
|
||||
PUBSUB_CHANNEL.CPU_TELEMETRY,
|
||||
async () => {
|
||||
const packageList = (await this.cpuTopologyService.generateTelemetry()) ?? [];
|
||||
|
||||
// Compute total power with 2 decimals
|
||||
const totalPower = Number(
|
||||
packageList
|
||||
.map((pkg) => pkg.power)
|
||||
.filter((power) => power >= 0)
|
||||
.reduce((sum, power) => sum + power, 0)
|
||||
.toFixed(2)
|
||||
);
|
||||
|
||||
const packages: CpuPackages = {
|
||||
id: 'metrics/cpu/packages',
|
||||
totalPower,
|
||||
power: packageList.map((pkg) => pkg.power ?? -1),
|
||||
temp: packageList.map((pkg) => pkg.temp ?? -1),
|
||||
};
|
||||
this.logger.debug(`CPU_TELEMETRY payload: ${JSON.stringify(packages)}`);
|
||||
|
||||
// Publish the payload
|
||||
pubsub.publish(PUBSUB_CHANNEL.CPU_TELEMETRY, {
|
||||
systemMetricsCpuTelemetry: packages,
|
||||
});
|
||||
|
||||
this.logger.debug(`CPU_TELEMETRY payload2: ${JSON.stringify(packages)}`);
|
||||
},
|
||||
5000
|
||||
);
|
||||
|
||||
// Register memory polling with 2 second interval
|
||||
this.subscriptionTracker.registerTopic(
|
||||
PUBSUB_CHANNEL.MEMORY_UTILIZATION,
|
||||
@@ -77,6 +112,18 @@ export class MetricsResolver implements OnModuleInit {
|
||||
return this.subscriptionHelper.createTrackedSubscription(PUBSUB_CHANNEL.CPU_UTILIZATION);
|
||||
}
|
||||
|
||||
@Subscription(() => CpuPackages, {
|
||||
name: 'systemMetricsCpuTelemetry',
|
||||
resolve: (value) => value.systemMetricsCpuTelemetry,
|
||||
})
|
||||
@UsePermissions({
|
||||
action: AuthAction.READ_ANY,
|
||||
resource: Resource.INFO,
|
||||
})
|
||||
public async systemMetricsCpuTelemetrySubscription() {
|
||||
return this.subscriptionHelper.createTrackedSubscription(PUBSUB_CHANNEL.CPU_TELEMETRY);
|
||||
}
|
||||
|
||||
@Subscription(() => MemoryUtilization, {
|
||||
name: 'systemMetricsMemory',
|
||||
resolve: (value) => value.systemMetricsMemory,
|
||||
|
||||
@@ -14,6 +14,16 @@ import {
|
||||
} from '@app/unraid-api/graph/resolvers/notifications/notifications.model.js';
|
||||
import { NotificationsService } from '@app/unraid-api/graph/resolvers/notifications/notifications.service.js';
|
||||
|
||||
// Mock fs/promises for unit tests
|
||||
vi.mock('fs/promises', async () => {
|
||||
const actual = await vi.importActual<typeof import('fs/promises')>('fs/promises');
|
||||
const mockReadFile = vi.fn();
|
||||
return {
|
||||
...actual,
|
||||
readFile: mockReadFile,
|
||||
};
|
||||
});
|
||||
|
||||
// Mock getters.dynamix, Logger, and pubsub
|
||||
vi.mock('@app/store/index.js', () => {
|
||||
// Create test directory path inside factory function
|
||||
@@ -61,24 +71,24 @@ const testNotificationsDir = join(tmpdir(), 'unraid-api-test-notifications');
|
||||
|
||||
describe('NotificationsService - loadNotificationFile (minimal mocks)', () => {
|
||||
let service: NotificationsService;
|
||||
let mockReadFile: any;
|
||||
|
||||
beforeEach(() => {
|
||||
beforeEach(async () => {
|
||||
const fsPromises = await import('fs/promises');
|
||||
mockReadFile = fsPromises.readFile as any;
|
||||
vi.mocked(mockReadFile).mockClear();
|
||||
service = new NotificationsService();
|
||||
});
|
||||
|
||||
it('should load and validate a valid notification file', async () => {
|
||||
const mockNotificationIni: NotificationIni = {
|
||||
timestamp: '1609459200',
|
||||
event: 'Test Event',
|
||||
subject: 'Test Subject',
|
||||
description: 'Test Description',
|
||||
importance: 'alert',
|
||||
link: 'http://example.com',
|
||||
};
|
||||
const mockFileContent = `timestamp=1609459200
|
||||
event=Test Event
|
||||
subject=Test Subject
|
||||
description=Test Description
|
||||
importance=alert
|
||||
link=http://example.com`;
|
||||
|
||||
vi.spyOn(await import('@app/core/utils/misc/parse-config.js'), 'parseConfig').mockReturnValue(
|
||||
mockNotificationIni
|
||||
);
|
||||
vi.mocked(mockReadFile).mockResolvedValue(mockFileContent);
|
||||
|
||||
const result = await (service as any).loadNotificationFile(
|
||||
'/test/path/test.notify',
|
||||
@@ -99,17 +109,12 @@ describe('NotificationsService - loadNotificationFile (minimal mocks)', () => {
|
||||
});
|
||||
|
||||
it('should return masked warning notification on validation error (missing required fields)', async () => {
|
||||
const invalidNotificationIni: Omit<NotificationIni, 'event'> = {
|
||||
timestamp: '1609459200',
|
||||
// event: 'Missing Event', // missing required field
|
||||
subject: 'Test Subject',
|
||||
description: 'Test Description',
|
||||
importance: 'alert',
|
||||
};
|
||||
const mockFileContent = `timestamp=1609459200
|
||||
subject=Test Subject
|
||||
description=Test Description
|
||||
importance=alert`;
|
||||
|
||||
vi.spyOn(await import('@app/core/utils/misc/parse-config.js'), 'parseConfig').mockReturnValue(
|
||||
invalidNotificationIni
|
||||
);
|
||||
vi.mocked(mockReadFile).mockResolvedValue(mockFileContent);
|
||||
|
||||
const result = await (service as any).loadNotificationFile(
|
||||
'/test/path/invalid.notify',
|
||||
@@ -121,17 +126,13 @@ describe('NotificationsService - loadNotificationFile (minimal mocks)', () => {
|
||||
});
|
||||
|
||||
it('should handle invalid enum values', async () => {
|
||||
const invalidNotificationIni: NotificationIni = {
|
||||
timestamp: '1609459200',
|
||||
event: 'Test Event',
|
||||
subject: 'Test Subject',
|
||||
description: 'Test Description',
|
||||
importance: 'not-a-valid-enum' as any,
|
||||
};
|
||||
const mockFileContent = `timestamp=1609459200
|
||||
event=Test Event
|
||||
subject=Test Subject
|
||||
description=Test Description
|
||||
importance=not-a-valid-enum`;
|
||||
|
||||
vi.spyOn(await import('@app/core/utils/misc/parse-config.js'), 'parseConfig').mockReturnValue(
|
||||
invalidNotificationIni
|
||||
);
|
||||
vi.mocked(mockReadFile).mockResolvedValue(mockFileContent);
|
||||
|
||||
const result = await (service as any).loadNotificationFile(
|
||||
'/test/path/invalid-enum.notify',
|
||||
@@ -145,16 +146,12 @@ describe('NotificationsService - loadNotificationFile (minimal mocks)', () => {
|
||||
});
|
||||
|
||||
it('should handle missing description field (should return masked warning notification)', async () => {
|
||||
const mockNotificationIni: Omit<NotificationIni, 'description'> = {
|
||||
timestamp: '1609459200',
|
||||
event: 'Test Event',
|
||||
subject: 'Test Subject',
|
||||
importance: 'normal',
|
||||
};
|
||||
const mockFileContent = `timestamp=1609459200
|
||||
event=Test Event
|
||||
subject=Test Subject
|
||||
importance=normal`;
|
||||
|
||||
vi.spyOn(await import('@app/core/utils/misc/parse-config.js'), 'parseConfig').mockReturnValue(
|
||||
mockNotificationIni
|
||||
);
|
||||
vi.mocked(mockReadFile).mockResolvedValue(mockFileContent);
|
||||
|
||||
const result = await (service as any).loadNotificationFile(
|
||||
'/test/path/test.notify',
|
||||
@@ -166,19 +163,15 @@ describe('NotificationsService - loadNotificationFile (minimal mocks)', () => {
|
||||
});
|
||||
|
||||
it('should preserve passthrough data from notification file (only known fields)', async () => {
|
||||
const mockNotificationIni: NotificationIni & { customField: string } = {
|
||||
timestamp: '1609459200',
|
||||
event: 'Test Event',
|
||||
subject: 'Test Subject',
|
||||
description: 'Test Description',
|
||||
importance: 'normal',
|
||||
link: 'http://example.com',
|
||||
customField: 'custom value',
|
||||
};
|
||||
const mockFileContent = `timestamp=1609459200
|
||||
event=Test Event
|
||||
subject=Test Subject
|
||||
description=Test Description
|
||||
importance=normal
|
||||
link=http://example.com
|
||||
customField=custom value`;
|
||||
|
||||
vi.spyOn(await import('@app/core/utils/misc/parse-config.js'), 'parseConfig').mockReturnValue(
|
||||
mockNotificationIni
|
||||
);
|
||||
vi.mocked(mockReadFile).mockResolvedValue(mockFileContent);
|
||||
|
||||
const result = await (service as any).loadNotificationFile(
|
||||
'/test/path/test.notify',
|
||||
@@ -201,17 +194,12 @@ describe('NotificationsService - loadNotificationFile (minimal mocks)', () => {
|
||||
});
|
||||
|
||||
it('should handle missing timestamp field gracefully', async () => {
|
||||
const mockNotificationIni: Omit<NotificationIni, 'timestamp'> = {
|
||||
// timestamp is missing
|
||||
event: 'Test Event',
|
||||
subject: 'Test Subject',
|
||||
description: 'Test Description',
|
||||
importance: 'alert',
|
||||
};
|
||||
const mockFileContent = `event=Test Event
|
||||
subject=Test Subject
|
||||
description=Test Description
|
||||
importance=alert`;
|
||||
|
||||
vi.spyOn(await import('@app/core/utils/misc/parse-config.js'), 'parseConfig').mockReturnValue(
|
||||
mockNotificationIni
|
||||
);
|
||||
vi.mocked(mockReadFile).mockResolvedValue(mockFileContent);
|
||||
|
||||
const result = await (service as any).loadNotificationFile(
|
||||
'/test/path/missing-timestamp.notify',
|
||||
@@ -225,17 +213,13 @@ describe('NotificationsService - loadNotificationFile (minimal mocks)', () => {
|
||||
});
|
||||
|
||||
it('should handle malformed timestamp field gracefully', async () => {
|
||||
const mockNotificationIni: NotificationIni = {
|
||||
timestamp: 'not-a-timestamp',
|
||||
event: 'Test Event',
|
||||
subject: 'Test Subject',
|
||||
description: 'Test Description',
|
||||
importance: 'alert',
|
||||
};
|
||||
const mockFileContent = `timestamp=not-a-timestamp
|
||||
event=Test Event
|
||||
subject=Test Subject
|
||||
description=Test Description
|
||||
importance=alert`;
|
||||
|
||||
vi.spyOn(await import('@app/core/utils/misc/parse-config.js'), 'parseConfig').mockReturnValue(
|
||||
mockNotificationIni
|
||||
);
|
||||
vi.mocked(mockReadFile).mockResolvedValue(mockFileContent);
|
||||
|
||||
const result = await (service as any).loadNotificationFile(
|
||||
'/test/path/malformed-timestamp.notify',
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { readdir, rename, stat, unlink, writeFile } from 'fs/promises';
|
||||
import { readdir, readFile, rename, stat, unlink, writeFile } from 'fs/promises';
|
||||
import { basename, join } from 'path';
|
||||
|
||||
import type { Stats } from 'fs';
|
||||
@@ -7,6 +7,7 @@ import { FSWatcher, watch } from 'chokidar';
|
||||
import { ValidationError } from 'class-validator';
|
||||
import { execa } from 'execa';
|
||||
import { emptyDir } from 'fs-extra';
|
||||
import { decode } from 'html-entities';
|
||||
import { encode as encodeIni } from 'ini';
|
||||
import { v7 as uuidv7 } from 'uuid';
|
||||
|
||||
@@ -648,8 +649,11 @@ export class NotificationsService {
|
||||
* @throws File system errors (file not found, permission issues) or unexpected validation errors.
|
||||
*/
|
||||
private async loadNotificationFile(path: string, type: NotificationType): Promise<Notification> {
|
||||
const rawContent = await readFile(path, 'utf-8');
|
||||
const decodedContent = decode(rawContent);
|
||||
|
||||
const notificationFile = parseConfig<NotificationIni>({
|
||||
filePath: path,
|
||||
file: decodedContent,
|
||||
type: 'ini',
|
||||
});
|
||||
|
||||
|
||||
@@ -11,6 +11,10 @@ import { OidcConfigPersistence } from '@app/unraid-api/graph/resolvers/sso/core/
|
||||
import { createLabeledControl } from '@app/unraid-api/graph/utils/form-utils.js';
|
||||
import { SettingSlice } from '@app/unraid-api/types/json-forms.js';
|
||||
|
||||
const API_SETTINGS_I18N = {
|
||||
sandbox: 'jsonforms.apiSettings.sandbox',
|
||||
} as const;
|
||||
|
||||
@Injectable()
|
||||
export class ApiSettings {
|
||||
private readonly logger = new Logger(ApiSettings.name);
|
||||
@@ -83,6 +87,7 @@ export class ApiSettings {
|
||||
properties: {
|
||||
sandbox: {
|
||||
type: 'boolean',
|
||||
i18n: API_SETTINGS_I18N.sandbox,
|
||||
title: 'Enable Developer Sandbox',
|
||||
default: false,
|
||||
},
|
||||
@@ -95,6 +100,7 @@ export class ApiSettings {
|
||||
controlOptions: {
|
||||
toggle: true,
|
||||
},
|
||||
i18nKey: API_SETTINGS_I18N.sandbox,
|
||||
}),
|
||||
],
|
||||
};
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { forwardRef, Module } from '@nestjs/common';
|
||||
import { ConfigModule } from '@nestjs/config';
|
||||
|
||||
import { UserSettingsModule } from '@unraid/shared/services/user-settings.js';
|
||||
|
||||
@@ -7,7 +8,7 @@ import { OidcConfigPersistence } from '@app/unraid-api/graph/resolvers/sso/core/
|
||||
import { OidcValidationService } from '@app/unraid-api/graph/resolvers/sso/core/oidc-validation.service.js';
|
||||
|
||||
@Module({
|
||||
imports: [UserSettingsModule, forwardRef(() => OidcClientModule)],
|
||||
imports: [ConfigModule, UserSettingsModule, forwardRef(() => OidcClientModule)],
|
||||
providers: [OidcConfigPersistence, OidcValidationService],
|
||||
exports: [OidcConfigPersistence, OidcValidationService],
|
||||
})
|
||||
|
||||
@@ -21,6 +21,59 @@ import {
|
||||
} from '@app/unraid-api/graph/utils/form-utils.js';
|
||||
import { SettingSlice } from '@app/unraid-api/types/json-forms.js';
|
||||
|
||||
const OIDC_I18N = {
|
||||
provider: {
|
||||
id: 'jsonforms.oidc.provider.id',
|
||||
name: 'jsonforms.oidc.provider.name',
|
||||
clientId: 'jsonforms.oidc.provider.clientId',
|
||||
clientSecret: 'jsonforms.oidc.provider.clientSecret',
|
||||
issuer: 'jsonforms.oidc.provider.issuer',
|
||||
scopes: 'jsonforms.oidc.provider.scopes',
|
||||
discoveryToggle: 'jsonforms.oidc.provider.discoveryToggle',
|
||||
authorizationEndpoint: 'jsonforms.oidc.provider.authorizationEndpoint',
|
||||
tokenEndpoint: 'jsonforms.oidc.provider.tokenEndpoint',
|
||||
userInfoEndpoint: 'jsonforms.oidc.provider.userInfoEndpoint',
|
||||
jwksUri: 'jsonforms.oidc.provider.jwksUri',
|
||||
unraidNet: 'jsonforms.oidc.provider.unraidNet',
|
||||
},
|
||||
restrictions: {
|
||||
sectionTitle: 'jsonforms.oidc.restrictions.title',
|
||||
sectionHelp: 'jsonforms.oidc.restrictions.help',
|
||||
allowedDomains: 'jsonforms.oidc.restrictions.allowedDomains',
|
||||
allowedEmails: 'jsonforms.oidc.restrictions.allowedEmails',
|
||||
allowedUserIds: 'jsonforms.oidc.restrictions.allowedUserIds',
|
||||
workspaceDomain: 'jsonforms.oidc.restrictions.workspaceDomain',
|
||||
},
|
||||
rules: {
|
||||
mode: 'jsonforms.oidc.rules.mode',
|
||||
claim: 'jsonforms.oidc.rules.claim',
|
||||
operator: 'jsonforms.oidc.rules.operator',
|
||||
value: 'jsonforms.oidc.rules.value',
|
||||
collection: 'jsonforms.oidc.rules.collection',
|
||||
sectionTitle: 'jsonforms.oidc.rules.title',
|
||||
sectionDescription: 'jsonforms.oidc.rules.description',
|
||||
},
|
||||
buttons: {
|
||||
text: 'jsonforms.oidc.buttons.text',
|
||||
icon: 'jsonforms.oidc.buttons.icon',
|
||||
variant: 'jsonforms.oidc.buttons.variant',
|
||||
style: 'jsonforms.oidc.buttons.style',
|
||||
sectionTitle: 'jsonforms.oidc.buttons.title',
|
||||
sectionDescription: 'jsonforms.oidc.buttons.description',
|
||||
},
|
||||
accordion: {
|
||||
basicConfiguration: 'jsonforms.oidc.accordion.basicConfiguration',
|
||||
advancedEndpoints: 'jsonforms.oidc.accordion.advancedEndpoints',
|
||||
authorizationRules: 'jsonforms.oidc.accordion.authorizationRules',
|
||||
buttonCustomization: 'jsonforms.oidc.accordion.buttonCustomization',
|
||||
},
|
||||
// Add missing keys for the form schema
|
||||
sso: {
|
||||
providers: 'jsonforms.sso.providers',
|
||||
defaultAllowedOrigins: 'jsonforms.sso.defaultAllowedOrigins',
|
||||
},
|
||||
} as const;
|
||||
|
||||
export interface OidcConfig {
|
||||
providers: OidcProvider[];
|
||||
defaultAllowedOrigins?: string[];
|
||||
@@ -592,6 +645,7 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
default: [],
|
||||
description:
|
||||
'Additional trusted redirect origins to allow redirects from custom ports, reverse proxies, Tailscale, etc.',
|
||||
i18n: OIDC_I18N.sso.defaultAllowedOrigins,
|
||||
};
|
||||
|
||||
// Add the control for defaultAllowedOrigins before the providers control using UnraidSettingsLayout
|
||||
@@ -624,27 +678,32 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
properties: {
|
||||
id: {
|
||||
type: 'string',
|
||||
i18n: OIDC_I18N.provider.id,
|
||||
title: 'Provider ID',
|
||||
description: 'Unique identifier for the provider',
|
||||
pattern: '^[a-zA-Z0-9._-]+$',
|
||||
},
|
||||
name: {
|
||||
type: 'string',
|
||||
i18n: OIDC_I18N.provider.name,
|
||||
title: 'Provider Name',
|
||||
description: 'Display name for the provider',
|
||||
},
|
||||
clientId: {
|
||||
type: 'string',
|
||||
i18n: OIDC_I18N.provider.clientId,
|
||||
title: 'Client ID',
|
||||
description: 'OAuth2 client ID registered with the provider',
|
||||
},
|
||||
clientSecret: {
|
||||
type: 'string',
|
||||
i18n: OIDC_I18N.provider.clientSecret,
|
||||
title: 'Client Secret',
|
||||
description: 'OAuth2 client secret (if required)',
|
||||
},
|
||||
issuer: {
|
||||
type: 'string',
|
||||
i18n: OIDC_I18N.provider.issuer,
|
||||
title: 'Issuer URL',
|
||||
format: 'uri',
|
||||
allOf: [
|
||||
@@ -669,6 +728,7 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
{ type: 'string', minLength: 1, format: 'uri' },
|
||||
{ type: 'string', maxLength: 0 },
|
||||
],
|
||||
i18n: OIDC_I18N.provider.authorizationEndpoint,
|
||||
title: 'Authorization Endpoint',
|
||||
description: 'Optional - will be auto-discovered if not provided',
|
||||
},
|
||||
@@ -677,6 +737,7 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
{ type: 'string', minLength: 1, format: 'uri' },
|
||||
{ type: 'string', maxLength: 0 },
|
||||
],
|
||||
i18n: OIDC_I18N.provider.tokenEndpoint,
|
||||
title: 'Token Endpoint',
|
||||
description: 'Optional - will be auto-discovered if not provided',
|
||||
},
|
||||
@@ -685,12 +746,14 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
{ type: 'string', minLength: 1, format: 'uri' },
|
||||
{ type: 'string', maxLength: 0 },
|
||||
],
|
||||
i18n: OIDC_I18N.provider.jwksUri,
|
||||
title: 'JWKS URI',
|
||||
description: 'Optional - will be auto-discovered if not provided',
|
||||
},
|
||||
scopes: {
|
||||
type: 'array',
|
||||
items: { type: 'string' },
|
||||
i18n: OIDC_I18N.provider.scopes,
|
||||
title: 'Scopes',
|
||||
default: ['openid', 'profile', 'email'],
|
||||
description: 'OAuth2 scopes to request',
|
||||
@@ -709,6 +772,7 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
allowedDomains: {
|
||||
type: 'array',
|
||||
items: { type: 'string' },
|
||||
i18n: OIDC_I18N.restrictions.allowedDomains,
|
||||
title: 'Allowed Email Domains',
|
||||
description:
|
||||
'Email domains that are allowed to login (e.g., company.com)',
|
||||
@@ -716,6 +780,7 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
allowedEmails: {
|
||||
type: 'array',
|
||||
items: { type: 'string' },
|
||||
i18n: OIDC_I18N.restrictions.allowedEmails,
|
||||
title: 'Specific Email Addresses',
|
||||
description:
|
||||
'Specific email addresses that are allowed to login',
|
||||
@@ -723,12 +788,14 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
allowedUserIds: {
|
||||
type: 'array',
|
||||
items: { type: 'string' },
|
||||
i18n: OIDC_I18N.restrictions.allowedUserIds,
|
||||
title: 'Allowed User IDs',
|
||||
description:
|
||||
'Specific user IDs (sub claim) that are allowed to login',
|
||||
},
|
||||
googleWorkspaceDomain: {
|
||||
type: 'string',
|
||||
i18n: OIDC_I18N.restrictions.workspaceDomain,
|
||||
title: 'Google Workspace Domain',
|
||||
description:
|
||||
'Restrict to users from a specific Google Workspace domain',
|
||||
@@ -737,6 +804,7 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
},
|
||||
authorizationRuleMode: {
|
||||
type: 'string',
|
||||
i18n: OIDC_I18N.rules.mode,
|
||||
title: 'Rule Mode',
|
||||
enum: ['or', 'and'],
|
||||
default: 'or',
|
||||
@@ -750,29 +818,34 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
properties: {
|
||||
claim: {
|
||||
type: 'string',
|
||||
i18n: OIDC_I18N.rules.claim,
|
||||
title: 'Claim',
|
||||
description: 'JWT claim to check',
|
||||
},
|
||||
operator: {
|
||||
type: 'string',
|
||||
i18n: OIDC_I18N.rules.operator,
|
||||
title: 'Operator',
|
||||
enum: ['equals', 'contains', 'endsWith', 'startsWith'],
|
||||
},
|
||||
value: {
|
||||
type: 'array',
|
||||
items: { type: 'string' },
|
||||
i18n: OIDC_I18N.rules.value,
|
||||
title: 'Values',
|
||||
description: 'Values to match against',
|
||||
},
|
||||
},
|
||||
required: ['claim', 'operator', 'value'],
|
||||
},
|
||||
i18n: OIDC_I18N.rules.collection,
|
||||
title: 'Claim Rules',
|
||||
description:
|
||||
'Define authorization rules based on claims in the ID token. Rule mode can be configured: OR logic (any rule matches) or AND logic (all rules must match).',
|
||||
},
|
||||
buttonText: {
|
||||
type: 'string',
|
||||
i18n: OIDC_I18N.buttons.text,
|
||||
title: 'Button Text',
|
||||
description: 'Custom text for the login button',
|
||||
},
|
||||
@@ -781,11 +854,13 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
{ type: 'string', minLength: 1 },
|
||||
{ type: 'string', maxLength: 0 },
|
||||
],
|
||||
i18n: OIDC_I18N.buttons.icon,
|
||||
title: 'Button Icon URL',
|
||||
description: 'URL or base64 encoded icon for the login button',
|
||||
},
|
||||
buttonVariant: {
|
||||
type: 'string',
|
||||
i18n: OIDC_I18N.buttons.variant,
|
||||
title: 'Button Style',
|
||||
enum: [
|
||||
'primary',
|
||||
@@ -800,6 +875,7 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
},
|
||||
buttonStyle: {
|
||||
type: 'string',
|
||||
i18n: OIDC_I18N.buttons.style,
|
||||
title: 'Custom CSS Styles',
|
||||
description:
|
||||
'Custom inline CSS styles for the button (e.g., "background: linear-gradient(to right, #4f46e5, #7c3aed); border-radius: 9999px;")',
|
||||
@@ -809,6 +885,7 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
},
|
||||
title: 'OIDC Providers',
|
||||
description: 'Configure OpenID Connect providers for SSO authentication',
|
||||
i18n: OIDC_I18N.sso.providers,
|
||||
},
|
||||
},
|
||||
elements: [
|
||||
@@ -835,6 +912,7 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
title: 'Unraid.net Provider',
|
||||
description:
|
||||
'This is the built-in Unraid.net provider. Only authorization rules can be modified.',
|
||||
i18n: OIDC_I18N.provider.unraidNet,
|
||||
},
|
||||
],
|
||||
detail: createAccordionLayout({
|
||||
@@ -846,6 +924,7 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
accordion: {
|
||||
title: 'Basic Configuration',
|
||||
description: 'Essential provider settings',
|
||||
i18n: OIDC_I18N.accordion.basicConfiguration,
|
||||
},
|
||||
},
|
||||
rule: {
|
||||
@@ -872,6 +951,7 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
schema: { const: 'unraid.net' },
|
||||
},
|
||||
},
|
||||
i18nKey: OIDC_I18N.provider.id,
|
||||
}),
|
||||
createSimpleLabeledControl({
|
||||
scope: '#/properties/name',
|
||||
@@ -888,6 +968,7 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
schema: { const: 'unraid.net' },
|
||||
},
|
||||
},
|
||||
i18nKey: OIDC_I18N.provider.name,
|
||||
}),
|
||||
createSimpleLabeledControl({
|
||||
scope: '#/properties/clientId',
|
||||
@@ -903,6 +984,7 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
schema: { const: 'unraid.net' },
|
||||
},
|
||||
},
|
||||
i18nKey: OIDC_I18N.provider.clientId,
|
||||
}),
|
||||
createSimpleLabeledControl({
|
||||
scope: '#/properties/clientSecret',
|
||||
@@ -919,6 +1001,7 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
schema: { const: 'unraid.net' },
|
||||
},
|
||||
},
|
||||
i18nKey: OIDC_I18N.provider.clientSecret,
|
||||
}),
|
||||
createSimpleLabeledControl({
|
||||
scope: '#/properties/issuer',
|
||||
@@ -935,6 +1018,7 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
schema: { const: 'unraid.net' },
|
||||
},
|
||||
},
|
||||
i18nKey: OIDC_I18N.provider.issuer,
|
||||
}),
|
||||
createSimpleLabeledControl({
|
||||
scope: '#/properties/scopes',
|
||||
@@ -952,6 +1036,7 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
schema: { const: 'unraid.net' },
|
||||
},
|
||||
},
|
||||
i18nKey: OIDC_I18N.provider.scopes,
|
||||
}),
|
||||
],
|
||||
},
|
||||
@@ -962,6 +1047,7 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
title: 'Advanced Endpoints',
|
||||
description:
|
||||
'Override auto-discovery settings (optional)',
|
||||
i18n: OIDC_I18N.accordion.advancedEndpoints,
|
||||
},
|
||||
},
|
||||
rule: {
|
||||
@@ -979,6 +1065,7 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
controlOptions: {
|
||||
inputType: 'url',
|
||||
},
|
||||
i18nKey: OIDC_I18N.provider.authorizationEndpoint,
|
||||
rule: {
|
||||
effect: RuleEffect.HIDE,
|
||||
condition: {
|
||||
@@ -994,6 +1081,7 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
controlOptions: {
|
||||
inputType: 'url',
|
||||
},
|
||||
i18nKey: OIDC_I18N.provider.tokenEndpoint,
|
||||
rule: {
|
||||
effect: RuleEffect.HIDE,
|
||||
condition: {
|
||||
@@ -1009,6 +1097,7 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
controlOptions: {
|
||||
inputType: 'url',
|
||||
},
|
||||
i18nKey: OIDC_I18N.provider.jwksUri,
|
||||
rule: {
|
||||
effect: RuleEffect.HIDE,
|
||||
condition: {
|
||||
@@ -1025,6 +1114,7 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
accordion: {
|
||||
title: 'Authorization Rules',
|
||||
description: 'Configure who can access your server',
|
||||
i18n: OIDC_I18N.accordion.authorizationRules,
|
||||
},
|
||||
},
|
||||
elements: [
|
||||
@@ -1035,6 +1125,7 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
description:
|
||||
'Choose between simple presets or advanced rule configuration',
|
||||
controlOptions: {},
|
||||
i18nKey: OIDC_I18N.rules.mode,
|
||||
}),
|
||||
// Simple Authorization Fields (shown when mode is 'simple')
|
||||
{
|
||||
@@ -1055,6 +1146,7 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
'Configure who can login using simple presets. At least one field must be configured.',
|
||||
format: 'title',
|
||||
},
|
||||
i18n: OIDC_I18N.restrictions.sectionTitle,
|
||||
},
|
||||
createSimpleLabeledControl({
|
||||
scope: '#/properties/simpleAuthorization/properties/allowedDomains',
|
||||
@@ -1066,6 +1158,8 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
inputType: 'text',
|
||||
placeholder: 'company.com',
|
||||
},
|
||||
i18nKey:
|
||||
OIDC_I18N.restrictions.allowedDomains,
|
||||
}),
|
||||
createSimpleLabeledControl({
|
||||
scope: '#/properties/simpleAuthorization/properties/allowedEmails',
|
||||
@@ -1077,6 +1171,8 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
inputType: 'email',
|
||||
placeholder: 'user@example.com',
|
||||
},
|
||||
i18nKey:
|
||||
OIDC_I18N.restrictions.allowedEmails,
|
||||
}),
|
||||
createSimpleLabeledControl({
|
||||
scope: '#/properties/simpleAuthorization/properties/allowedUserIds',
|
||||
@@ -1088,6 +1184,8 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
inputType: 'text',
|
||||
placeholder: 'user-id-123',
|
||||
},
|
||||
i18nKey:
|
||||
OIDC_I18N.restrictions.allowedUserIds,
|
||||
}),
|
||||
// Google-specific field (shown only for Google providers)
|
||||
{
|
||||
@@ -1109,6 +1207,9 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
inputType: 'text',
|
||||
placeholder: 'company.com',
|
||||
},
|
||||
i18nKey:
|
||||
OIDC_I18N.restrictions
|
||||
.workspaceDomain,
|
||||
}),
|
||||
],
|
||||
},
|
||||
@@ -1141,6 +1242,7 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
description:
|
||||
'Define authorization rules based on claims in the ID token. Rule mode can be configured: OR logic (any rule matches) or AND logic (all rules must match).',
|
||||
},
|
||||
i18n: OIDC_I18N.rules.sectionTitle,
|
||||
},
|
||||
createSimpleLabeledControl({
|
||||
scope: '#/properties/authorizationRuleMode',
|
||||
@@ -1148,6 +1250,7 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
description:
|
||||
'How to evaluate multiple rules: OR (any rule passes) or AND (all rules must pass)',
|
||||
controlOptions: {},
|
||||
i18nKey: OIDC_I18N.rules.mode,
|
||||
}),
|
||||
{
|
||||
type: 'Control',
|
||||
@@ -1168,6 +1271,8 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
inputType: 'text',
|
||||
placeholder: 'email',
|
||||
},
|
||||
i18nKey:
|
||||
OIDC_I18N.rules.claim,
|
||||
}),
|
||||
createSimpleLabeledControl({
|
||||
scope: '#/properties/operator',
|
||||
@@ -1175,6 +1280,8 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
description:
|
||||
'How to compare the claim value',
|
||||
controlOptions: {},
|
||||
i18nKey:
|
||||
OIDC_I18N.rules.operator,
|
||||
}),
|
||||
createSimpleLabeledControl({
|
||||
scope: '#/properties/value',
|
||||
@@ -1187,9 +1294,12 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
placeholder:
|
||||
'@company.com',
|
||||
},
|
||||
i18nKey:
|
||||
OIDC_I18N.rules.value,
|
||||
}),
|
||||
],
|
||||
},
|
||||
i18n: OIDC_I18N.rules.collection,
|
||||
},
|
||||
},
|
||||
],
|
||||
@@ -1203,6 +1313,7 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
title: 'Button Customization',
|
||||
description:
|
||||
'Customize the appearance of the login button',
|
||||
i18n: OIDC_I18N.accordion.buttonCustomization,
|
||||
},
|
||||
},
|
||||
rule: {
|
||||
@@ -1221,6 +1332,7 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
inputType: 'text',
|
||||
placeholder: 'Sign in with Provider',
|
||||
},
|
||||
i18nKey: OIDC_I18N.buttons.text,
|
||||
}),
|
||||
createSimpleLabeledControl({
|
||||
scope: '#/properties/buttonIcon',
|
||||
@@ -1230,12 +1342,14 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
controlOptions: {
|
||||
inputType: 'url',
|
||||
},
|
||||
i18nKey: OIDC_I18N.buttons.icon,
|
||||
}),
|
||||
createSimpleLabeledControl({
|
||||
scope: '#/properties/buttonVariant',
|
||||
label: 'Button Style:',
|
||||
description: 'Visual style of the login button',
|
||||
controlOptions: {},
|
||||
i18nKey: OIDC_I18N.buttons.variant,
|
||||
}),
|
||||
createSimpleLabeledControl({
|
||||
scope: '#/properties/buttonStyle',
|
||||
@@ -1247,6 +1361,7 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
placeholder:
|
||||
'background-color: #3b82f6; border-color: #3b82f6; color: white; transition: all 0.2s;',
|
||||
},
|
||||
i18nKey: OIDC_I18N.buttons.style,
|
||||
}),
|
||||
],
|
||||
},
|
||||
|
||||
@@ -10,29 +10,40 @@ export function createSimpleLabeledControl({
|
||||
description,
|
||||
controlOptions,
|
||||
rule,
|
||||
i18nKey,
|
||||
}: {
|
||||
scope: string;
|
||||
label: string;
|
||||
description?: string;
|
||||
controlOptions?: ControlElement['options'];
|
||||
rule?: Rule;
|
||||
i18nKey?: string;
|
||||
}): Layout {
|
||||
const labelElement = {
|
||||
type: 'Label',
|
||||
text: label,
|
||||
options: {
|
||||
description,
|
||||
},
|
||||
} as LabelElement;
|
||||
|
||||
if (i18nKey) {
|
||||
(labelElement as any).i18n = i18nKey;
|
||||
}
|
||||
|
||||
const controlElement = {
|
||||
type: 'Control',
|
||||
scope: scope,
|
||||
options: controlOptions,
|
||||
} as ControlElement;
|
||||
|
||||
if (i18nKey) {
|
||||
(controlElement as any).i18n = i18nKey;
|
||||
}
|
||||
|
||||
const layout: Layout = {
|
||||
type: 'VerticalLayout',
|
||||
elements: [
|
||||
{
|
||||
type: 'Label',
|
||||
text: label,
|
||||
options: {
|
||||
description,
|
||||
},
|
||||
} as LabelElement,
|
||||
{
|
||||
type: 'Control',
|
||||
scope: scope,
|
||||
options: controlOptions,
|
||||
} as ControlElement,
|
||||
],
|
||||
elements: [labelElement, controlElement],
|
||||
};
|
||||
|
||||
// Add rule if provided
|
||||
@@ -56,6 +67,7 @@ export function createLabeledControl({
|
||||
layoutType = 'UnraidSettingsLayout',
|
||||
rule,
|
||||
passScopeToLayout = false,
|
||||
i18nKey,
|
||||
}: {
|
||||
scope: string;
|
||||
label: string;
|
||||
@@ -66,19 +78,29 @@ export function createLabeledControl({
|
||||
layoutType?: 'UnraidSettingsLayout' | 'VerticalLayout' | 'HorizontalLayout';
|
||||
rule?: Rule;
|
||||
passScopeToLayout?: boolean;
|
||||
i18nKey?: string;
|
||||
}): Layout {
|
||||
const elements: Array<LabelElement | ControlElement> = [
|
||||
{
|
||||
type: 'Label',
|
||||
text: label,
|
||||
options: { ...labelOptions, description },
|
||||
} as LabelElement,
|
||||
{
|
||||
type: 'Control',
|
||||
scope: scope,
|
||||
options: controlOptions,
|
||||
} as ControlElement,
|
||||
];
|
||||
const labelElement = {
|
||||
type: 'Label',
|
||||
text: label,
|
||||
options: { ...labelOptions, description },
|
||||
} as LabelElement;
|
||||
|
||||
if (i18nKey) {
|
||||
(labelElement as any).i18n = i18nKey;
|
||||
}
|
||||
|
||||
const controlElement = {
|
||||
type: 'Control',
|
||||
scope: scope,
|
||||
options: controlOptions,
|
||||
} as ControlElement;
|
||||
|
||||
if (i18nKey) {
|
||||
(controlElement as any).i18n = i18nKey;
|
||||
}
|
||||
|
||||
const elements: Array<LabelElement | ControlElement> = [labelElement, controlElement];
|
||||
|
||||
const layout: Layout = {
|
||||
type: layoutType,
|
||||
@@ -113,6 +135,7 @@ export function createAccordionLayout({
|
||||
accordion?: {
|
||||
title?: string;
|
||||
description?: string;
|
||||
i18n?: string;
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
@@ -0,0 +1,73 @@
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { PluginManagementService } from '@app/unraid-api/plugin/plugin-management.service.js';
|
||||
|
||||
describe('PluginManagementService', () => {
|
||||
let service: PluginManagementService;
|
||||
let configStore: string[];
|
||||
let configService: {
|
||||
get: ReturnType<typeof vi.fn>;
|
||||
set: ReturnType<typeof vi.fn>;
|
||||
};
|
||||
let dependencyService: {
|
||||
npm: ReturnType<typeof vi.fn>;
|
||||
rebuildVendorArchive: ReturnType<typeof vi.fn>;
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
configStore = ['unraid-api-plugin-connect', '@unraid/test-plugin'];
|
||||
configService = {
|
||||
get: vi.fn((key: string, defaultValue?: unknown) => {
|
||||
if (key === 'api.plugins') {
|
||||
return configStore ?? defaultValue ?? [];
|
||||
}
|
||||
if (key === 'api') {
|
||||
return { plugins: configStore ?? defaultValue ?? [] };
|
||||
}
|
||||
return defaultValue;
|
||||
}),
|
||||
set: vi.fn((key: string, value: unknown) => {
|
||||
if (key === 'api' && typeof value === 'object' && value !== null) {
|
||||
// @ts-expect-error - value is an object
|
||||
if (Array.isArray(value.plugins)) {
|
||||
// @ts-expect-error - value is an object
|
||||
configStore = [...value.plugins];
|
||||
}
|
||||
}
|
||||
if (key === 'api.plugins' && Array.isArray(value)) {
|
||||
configStore = [...value];
|
||||
}
|
||||
}),
|
||||
};
|
||||
dependencyService = {
|
||||
npm: vi.fn().mockResolvedValue(undefined),
|
||||
rebuildVendorArchive: vi.fn().mockResolvedValue(undefined),
|
||||
};
|
||||
|
||||
service = new PluginManagementService(configService as never, dependencyService as never);
|
||||
});
|
||||
|
||||
it('rebuilds vendor archive when removing unbundled plugins', async () => {
|
||||
await service.removePlugin('@unraid/test-plugin');
|
||||
|
||||
expect(dependencyService.npm).toHaveBeenCalledWith('uninstall', '@unraid/test-plugin');
|
||||
expect(dependencyService.rebuildVendorArchive).toHaveBeenCalledTimes(1);
|
||||
expect(configStore).not.toContain('@unraid/test-plugin');
|
||||
});
|
||||
|
||||
it('skips vendor archive when only bundled plugins are removed', async () => {
|
||||
await service.removePlugin('unraid-api-plugin-connect');
|
||||
|
||||
expect(dependencyService.npm).not.toHaveBeenCalled();
|
||||
expect(dependencyService.rebuildVendorArchive).not.toHaveBeenCalled();
|
||||
expect(configStore).not.toContain('unraid-api-plugin-connect');
|
||||
});
|
||||
|
||||
it('does not rebuild vendor archive when bypassing npm uninstall', async () => {
|
||||
await service.removePluginConfigOnly('@unraid/test-plugin');
|
||||
|
||||
expect(dependencyService.npm).not.toHaveBeenCalled();
|
||||
expect(dependencyService.rebuildVendorArchive).not.toHaveBeenCalled();
|
||||
expect(configStore).not.toContain('@unraid/test-plugin');
|
||||
});
|
||||
});
|
||||
46
api/src/unraid-api/plugin/__test__/plugin.service.test.ts
Normal file
46
api/src/unraid-api/plugin/__test__/plugin.service.test.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import * as safeModeUtils from '@app/core/utils/safe-mode.js';
|
||||
import { PluginService } from '@app/unraid-api/plugin/plugin.service.js';
|
||||
|
||||
type PluginServicePrivateApi = {
|
||||
plugins?: Promise<unknown>;
|
||||
importPlugins(): Promise<unknown>;
|
||||
};
|
||||
|
||||
const PrivatePluginService = PluginService as unknown as PluginServicePrivateApi;
|
||||
|
||||
describe('PluginService.getPlugins safe mode handling', () => {
|
||||
beforeEach(() => {
|
||||
PrivatePluginService.plugins = undefined;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
PrivatePluginService.plugins = undefined;
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
it('returns an empty array and skips imports when safe mode is enabled', async () => {
|
||||
const safeModeSpy = vi.spyOn(safeModeUtils, 'isSafeModeEnabled').mockReturnValue(true);
|
||||
const importSpy = vi
|
||||
.spyOn(PrivatePluginService, 'importPlugins')
|
||||
.mockResolvedValue([{ name: 'example', version: '1.0.0' }]);
|
||||
|
||||
const plugins = await PluginService.getPlugins();
|
||||
|
||||
expect(plugins).toEqual([]);
|
||||
expect(safeModeSpy).toHaveBeenCalledTimes(1);
|
||||
expect(importSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('loads plugins when safe mode is disabled', async () => {
|
||||
const expected = [{ name: 'example', version: '1.0.0' }];
|
||||
vi.spyOn(safeModeUtils, 'isSafeModeEnabled').mockReturnValue(false);
|
||||
const importSpy = vi.spyOn(PrivatePluginService, 'importPlugins').mockResolvedValue(expected);
|
||||
|
||||
const plugins = await PluginService.getPlugins();
|
||||
|
||||
expect(plugins).toBe(expected);
|
||||
expect(importSpy).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
@@ -35,8 +35,10 @@ export class PluginManagementService {
|
||||
*/
|
||||
async removePlugin(...plugins: string[]) {
|
||||
const removed = this.removePluginFromConfig(...plugins);
|
||||
await this.uninstallPlugins(...removed);
|
||||
await this.dependencyService.rebuildVendorArchive();
|
||||
const { unbundledRemoved } = await this.uninstallPlugins(...removed);
|
||||
if (unbundledRemoved.length > 0) {
|
||||
await this.dependencyService.rebuildVendorArchive();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -54,8 +56,7 @@ export class PluginManagementService {
|
||||
}
|
||||
pluginSet.add(plugin);
|
||||
});
|
||||
// @ts-expect-error - This is a valid config key
|
||||
this.configService.set<string[]>('api.plugins', Array.from(pluginSet));
|
||||
this.updatePluginsConfig(Array.from(pluginSet));
|
||||
return added;
|
||||
}
|
||||
|
||||
@@ -69,11 +70,15 @@ export class PluginManagementService {
|
||||
const pluginSet = new Set(this.plugins);
|
||||
const removed = plugins.filter((plugin) => pluginSet.delete(plugin));
|
||||
const pluginsArray = Array.from(pluginSet);
|
||||
// @ts-expect-error - This is a valid config key
|
||||
this.configService.set('api.plugins', pluginsArray);
|
||||
this.updatePluginsConfig(pluginsArray);
|
||||
return removed;
|
||||
}
|
||||
|
||||
private updatePluginsConfig(plugins: string[]) {
|
||||
const apiConfig = this.configService.get<ApiConfig>('api');
|
||||
this.configService.set('api', { ...apiConfig, plugins });
|
||||
}
|
||||
|
||||
/**
|
||||
* Install bundle / unbundled plugins using npm or direct with the config.
|
||||
*
|
||||
@@ -100,12 +105,15 @@ export class PluginManagementService {
|
||||
private async uninstallPlugins(...plugins: string[]) {
|
||||
const bundled = plugins.filter((plugin) => this.isBundled(plugin));
|
||||
const unbundled = plugins.filter((plugin) => !this.isBundled(plugin));
|
||||
|
||||
if (unbundled.length > 0) {
|
||||
await this.dependencyService.npm('uninstall', ...unbundled);
|
||||
}
|
||||
if (bundled.length > 0) {
|
||||
await this.removeBundledPlugin(...bundled);
|
||||
await this.removePluginConfigOnly(...bundled);
|
||||
}
|
||||
|
||||
return { bundledRemoved: bundled, unbundledRemoved: unbundled };
|
||||
}
|
||||
|
||||
/**------------------------------------------------------------------------
|
||||
@@ -125,7 +133,13 @@ export class PluginManagementService {
|
||||
return added;
|
||||
}
|
||||
|
||||
async removeBundledPlugin(...plugins: string[]) {
|
||||
/**
|
||||
* Removes plugins from the config without touching npm (used for bundled/default bypass flow).
|
||||
*
|
||||
* @param plugins - The plugins to remove.
|
||||
* @returns The list of plugins removed from the config.
|
||||
*/
|
||||
async removePluginConfigOnly(...plugins: string[]) {
|
||||
const removed = this.removePluginFromConfig(...plugins);
|
||||
return removed;
|
||||
}
|
||||
|
||||
@@ -75,7 +75,7 @@ export class PluginResolver {
|
||||
})
|
||||
async removePlugin(@Args('input') input: PluginManagementInput): Promise<boolean> {
|
||||
if (input.bundled) {
|
||||
await this.pluginManagementService.removeBundledPlugin(...input.names);
|
||||
await this.pluginManagementService.removePluginConfigOnly(...input.names);
|
||||
} else {
|
||||
await this.pluginManagementService.removePlugin(...input.names);
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ import { Injectable } from '@nestjs/common';
|
||||
|
||||
import type { ApiNestPluginDefinition } from '@app/unraid-api/plugin/plugin.interface.js';
|
||||
import { pluginLogger } from '@app/core/log.js';
|
||||
import { isSafeModeEnabled } from '@app/core/utils/safe-mode.js';
|
||||
import { getPackageJson } from '@app/environment.js';
|
||||
import { loadApiConfig } from '@app/unraid-api/config/api-config.module.js';
|
||||
import { NotificationImportance } from '@app/unraid-api/graph/resolvers/notifications/notifications.model.js';
|
||||
@@ -20,7 +21,16 @@ export class PluginService {
|
||||
private static plugins: Promise<Plugin[]> | undefined;
|
||||
|
||||
static async getPlugins() {
|
||||
PluginService.plugins ??= PluginService.importPlugins();
|
||||
if (!PluginService.plugins) {
|
||||
if (isSafeModeEnabled()) {
|
||||
PluginService.logger.warn(
|
||||
'Safe mode enabled (vars.ini); skipping API plugin discovery and load.'
|
||||
);
|
||||
PluginService.plugins = Promise.resolve([]);
|
||||
} else {
|
||||
PluginService.plugins = PluginService.importPlugins();
|
||||
}
|
||||
}
|
||||
return PluginService.plugins;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,52 +0,0 @@
|
||||
import { Test } from '@nestjs/testing';
|
||||
|
||||
import { describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { ApiReportService } from '@app/unraid-api/cli/api-report.service.js';
|
||||
import { RestService } from '@app/unraid-api/rest/rest.service.js';
|
||||
|
||||
// Mock external dependencies
|
||||
vi.mock('@app/store/index.js', () => ({
|
||||
getters: {
|
||||
paths: vi.fn(() => ({
|
||||
'log-base': '/tmp/logs',
|
||||
})),
|
||||
},
|
||||
}));
|
||||
|
||||
vi.mock('execa', () => ({
|
||||
execa: vi.fn().mockResolvedValue({ stdout: 'mocked output' }),
|
||||
}));
|
||||
|
||||
describe('RestService Dependencies', () => {
|
||||
it('should resolve ApiReportService dependency successfully', async () => {
|
||||
const mockApiReportService = {
|
||||
generateReport: vi.fn().mockResolvedValue({ timestamp: new Date().toISOString() }),
|
||||
};
|
||||
|
||||
const module = await Test.createTestingModule({
|
||||
providers: [
|
||||
RestService,
|
||||
{
|
||||
provide: ApiReportService,
|
||||
useValue: mockApiReportService,
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
const restService = module.get<RestService>(RestService);
|
||||
expect(restService).toBeDefined();
|
||||
expect(restService).toBeInstanceOf(RestService);
|
||||
|
||||
await module.close();
|
||||
});
|
||||
|
||||
it('should fail gracefully when ApiReportService is missing', async () => {
|
||||
// This test ensures we get a clear error when dependencies are missing
|
||||
await expect(
|
||||
Test.createTestingModule({
|
||||
providers: [RestService],
|
||||
}).compile()
|
||||
).rejects.toThrow(/ApiReportService/);
|
||||
});
|
||||
});
|
||||
@@ -1,84 +0,0 @@
|
||||
import { CacheModule } from '@nestjs/cache-manager';
|
||||
import { Test } from '@nestjs/testing';
|
||||
|
||||
import { CANONICAL_INTERNAL_CLIENT_TOKEN } from '@unraid/shared';
|
||||
import { describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { ApiReportService } from '@app/unraid-api/cli/api-report.service.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
import { RestModule } from '@app/unraid-api/rest/rest.module.js';
|
||||
import { RestService } from '@app/unraid-api/rest/rest.service.js';
|
||||
|
||||
// Mock external dependencies that cause issues in tests
|
||||
vi.mock('@app/store/index.js', () => ({
|
||||
store: {
|
||||
getState: vi.fn(() => ({
|
||||
paths: {
|
||||
'log-base': '/tmp/logs',
|
||||
'auth-keys': '/tmp/auth-keys',
|
||||
config: '/tmp/config',
|
||||
},
|
||||
emhttp: {},
|
||||
dynamix: { notify: { path: '/tmp/notifications' } },
|
||||
registration: {},
|
||||
})),
|
||||
subscribe: vi.fn(() => vi.fn()), // Return unsubscribe function
|
||||
},
|
||||
getters: {
|
||||
paths: vi.fn(() => ({
|
||||
'log-base': '/tmp/logs',
|
||||
'auth-keys': '/tmp/auth-keys',
|
||||
config: '/tmp/config',
|
||||
})),
|
||||
dynamix: vi.fn(() => ({ notify: { path: '/tmp/notifications' } })),
|
||||
emhttp: vi.fn(() => ({})),
|
||||
registration: vi.fn(() => ({})),
|
||||
},
|
||||
}));
|
||||
|
||||
vi.mock('@app/core/log.js', () => ({
|
||||
levels: ['trace', 'debug', 'info', 'warn', 'error', 'fatal'],
|
||||
apiLogger: {
|
||||
info: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
},
|
||||
pluginLogger: {
|
||||
info: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
trace: vi.fn(),
|
||||
fatal: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
vi.mock('execa', () => ({
|
||||
execa: vi.fn().mockResolvedValue({ stdout: 'mocked output' }),
|
||||
}));
|
||||
|
||||
describe('RestModule Integration', () => {
|
||||
it('should compile with RestService having access to ApiReportService', async () => {
|
||||
const module = await Test.createTestingModule({
|
||||
imports: [CacheModule.register({ isGlobal: true }), RestModule],
|
||||
})
|
||||
// Override services that have complex dependencies for testing
|
||||
.overrideProvider(CANONICAL_INTERNAL_CLIENT_TOKEN)
|
||||
.useValue({ getClient: vi.fn() })
|
||||
.overrideProvider(LogService)
|
||||
.useValue({ error: vi.fn(), debug: vi.fn() })
|
||||
.compile();
|
||||
|
||||
const restService = module.get<RestService>(RestService);
|
||||
const apiReportService = module.get<ApiReportService>(ApiReportService);
|
||||
|
||||
expect(restService).toBeDefined();
|
||||
expect(apiReportService).toBeDefined();
|
||||
|
||||
// Verify RestService has the injected ApiReportService
|
||||
expect(restService['apiReportService']).toBeDefined();
|
||||
|
||||
await module.close();
|
||||
}, 10000);
|
||||
});
|
||||
@@ -1,132 +0,0 @@
|
||||
import { Test } from '@nestjs/testing';
|
||||
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { ApiReportService } from '@app/unraid-api/cli/api-report.service.js';
|
||||
import { RestService } from '@app/unraid-api/rest/rest.service.js';
|
||||
|
||||
const mockWriteFile = vi.fn();
|
||||
|
||||
vi.mock('node:fs/promises', () => ({
|
||||
writeFile: (...args: any[]) => mockWriteFile(...args),
|
||||
stat: vi.fn(),
|
||||
}));
|
||||
|
||||
// Mock ApiReportService
|
||||
const mockApiReportService = {
|
||||
generateReport: vi.fn(),
|
||||
};
|
||||
|
||||
describe('RestService', () => {
|
||||
let restService: RestService;
|
||||
|
||||
beforeEach(async () => {
|
||||
const module = await Test.createTestingModule({
|
||||
providers: [RestService, { provide: ApiReportService, useValue: mockApiReportService }],
|
||||
}).compile();
|
||||
|
||||
restService = module.get<RestService>(RestService);
|
||||
|
||||
// Clear mocks
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('saveApiReport', () => {
|
||||
it('should generate report using ApiReportService and save to file', async () => {
|
||||
const mockReport = {
|
||||
timestamp: '2023-01-01T00:00:00.000Z',
|
||||
connectionStatus: {
|
||||
running: 'yes' as const,
|
||||
},
|
||||
system: {
|
||||
id: 'test-uuid',
|
||||
name: 'Test Server',
|
||||
version: '6.12.0',
|
||||
machineId: 'REDACTED',
|
||||
manufacturer: 'Test Manufacturer',
|
||||
model: 'Test Model',
|
||||
},
|
||||
connect: {
|
||||
installed: true,
|
||||
dynamicRemoteAccess: {
|
||||
enabledType: 'STATIC',
|
||||
runningType: 'STATIC',
|
||||
error: null,
|
||||
},
|
||||
},
|
||||
config: {
|
||||
valid: true,
|
||||
error: null,
|
||||
},
|
||||
services: {
|
||||
cloud: { name: 'cloud', online: true },
|
||||
minigraph: { name: 'minigraph', online: false },
|
||||
allServices: [],
|
||||
},
|
||||
remote: {
|
||||
apikey: 'REDACTED',
|
||||
localApiKey: 'REDACTED',
|
||||
accesstoken: 'REDACTED',
|
||||
idtoken: 'REDACTED',
|
||||
refreshtoken: 'REDACTED',
|
||||
ssoSubIds: 'REDACTED',
|
||||
allowedOrigins: 'REDACTED',
|
||||
email: 'REDACTED',
|
||||
},
|
||||
};
|
||||
|
||||
const reportPath = '/tmp/test-report.json';
|
||||
mockApiReportService.generateReport.mockResolvedValue(mockReport);
|
||||
mockWriteFile.mockResolvedValue(undefined);
|
||||
|
||||
await restService.saveApiReport(reportPath);
|
||||
|
||||
// Verify ApiReportService was called (defaults to API running)
|
||||
expect(mockApiReportService.generateReport).toHaveBeenCalledWith();
|
||||
|
||||
// Verify file was written with correct content
|
||||
expect(mockWriteFile).toHaveBeenCalledWith(
|
||||
reportPath,
|
||||
JSON.stringify(mockReport, null, 2),
|
||||
'utf-8'
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle ApiReportService errors gracefully', async () => {
|
||||
const reportPath = '/tmp/test-report.json';
|
||||
const error = new Error('Report generation failed');
|
||||
mockApiReportService.generateReport.mockRejectedValue(error);
|
||||
|
||||
// Should not throw error
|
||||
await restService.saveApiReport(reportPath);
|
||||
|
||||
// Verify ApiReportService was called
|
||||
expect(mockApiReportService.generateReport).toHaveBeenCalled();
|
||||
|
||||
// Verify file write was not called due to error
|
||||
expect(mockWriteFile).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle file write errors gracefully', async () => {
|
||||
const mockReport = {
|
||||
timestamp: '2023-01-01T00:00:00.000Z',
|
||||
system: { name: 'Test' },
|
||||
};
|
||||
|
||||
const reportPath = '/tmp/test-report.json';
|
||||
mockApiReportService.generateReport.mockResolvedValue(mockReport);
|
||||
mockWriteFile.mockRejectedValue(new Error('File write failed'));
|
||||
|
||||
// Should not throw error
|
||||
await restService.saveApiReport(reportPath);
|
||||
|
||||
// Verify both service and file operations were attempted
|
||||
expect(mockApiReportService.generateReport).toHaveBeenCalled();
|
||||
expect(mockWriteFile).toHaveBeenCalledWith(
|
||||
reportPath,
|
||||
JSON.stringify(mockReport, null, 2),
|
||||
'utf-8'
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -34,7 +34,6 @@ describe('RestController', () => {
|
||||
{
|
||||
provide: RestService,
|
||||
useValue: {
|
||||
getLogs: vi.fn(),
|
||||
getCustomizationStream: vi.fn(),
|
||||
},
|
||||
},
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user