mirror of
https://github.com/unraid/api.git
synced 2026-01-02 14:40:01 -06:00
Compare commits
87 Commits
4.24.0-bui
...
test/syste
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0d7bee2490 | ||
|
|
e5abbcbf90 | ||
|
|
22bb548833 | ||
|
|
7c0d42a5cb | ||
|
|
f0cfdfc0b5 | ||
|
|
6d9796a981 | ||
|
|
ca5e84f916 | ||
|
|
719795647c | ||
|
|
4c2e212a03 | ||
|
|
9ae3f3cec3 | ||
|
|
071efeac45 | ||
|
|
9ff64629cf | ||
|
|
fa837db09f | ||
|
|
3462e7688d | ||
|
|
dc7a449f3f | ||
|
|
bec54e4feb | ||
|
|
a5e9b83374 | ||
|
|
9253250dc5 | ||
|
|
1d9c76f410 | ||
|
|
33e88bc5f5 | ||
|
|
d4f90d6d64 | ||
|
|
b35da13234 | ||
|
|
6d3d623b66 | ||
|
|
f6521d8c1c | ||
|
|
e5e77321da | ||
|
|
31af99e52f | ||
|
|
933cefa020 | ||
|
|
375dcd0598 | ||
|
|
64875edbba | ||
|
|
330e81a484 | ||
|
|
b8f0fdf8d2 | ||
|
|
36c104915e | ||
|
|
dc9a036c73 | ||
|
|
c71b0487ad | ||
|
|
e7340431a5 | ||
|
|
e4a9b8291b | ||
|
|
6b6b78fa2e | ||
|
|
e2fdf6cadb | ||
|
|
3d4f193fa4 | ||
|
|
b28ef1ea33 | ||
|
|
ee0f240233 | ||
|
|
3aacaa1fb5 | ||
|
|
0cd4c0ae16 | ||
|
|
66625ded6a | ||
|
|
f8a6785e9c | ||
|
|
d7aca81c60 | ||
|
|
854b403fbd | ||
|
|
c264a1843c | ||
|
|
45cda4af80 | ||
|
|
64eb9ce9b5 | ||
|
|
d56797c59f | ||
|
|
92af3b6115 | ||
|
|
35f8bc2258 | ||
|
|
c4cd0c6352 | ||
|
|
818e7ce997 | ||
|
|
7e13202aa1 | ||
|
|
d18eaf2364 | ||
|
|
42406e795d | ||
|
|
11d2de5d08 | ||
|
|
031c1ab5dc | ||
|
|
34075e44c5 | ||
|
|
ff2906e52a | ||
|
|
a0d6cc92c8 | ||
|
|
57acfaacf0 | ||
|
|
ea816c7a5c | ||
|
|
cafde72d38 | ||
|
|
2b481c397c | ||
|
|
8c4e9dd7ae | ||
|
|
f212dce88b | ||
|
|
8cd2a4c124 | ||
|
|
10f048ee1f | ||
|
|
e9e271ade5 | ||
|
|
31c41027fc | ||
|
|
fabe6a2c4b | ||
|
|
754966d5d3 | ||
|
|
ed594e9147 | ||
|
|
50d83313a1 | ||
|
|
e57ec00627 | ||
|
|
84f4a7221d | ||
|
|
d73953f8ff | ||
|
|
0d165a6087 | ||
|
|
f4f3e3c44b | ||
|
|
cd5eff11bc | ||
|
|
7bdeca8338 | ||
|
|
661865f976 | ||
|
|
b7afaf4632 | ||
|
|
b3ca40c639 |
@@ -241,4 +241,3 @@ const pinia = createTestingPinia({
|
||||
- Set initial state for focused testing
|
||||
- Test computed properties by accessing them directly
|
||||
- Verify state changes by updating the store
|
||||
|
||||
|
||||
201
.github/workflows/build-artifacts.yml
vendored
Normal file
201
.github/workflows/build-artifacts.yml
vendored
Normal file
@@ -0,0 +1,201 @@
|
||||
name: Build Artifacts
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ref:
|
||||
type: string
|
||||
required: false
|
||||
description: "Git ref to checkout (commit SHA, branch, or tag)"
|
||||
version_override:
|
||||
type: string
|
||||
required: false
|
||||
description: "Override version (for manual releases)"
|
||||
outputs:
|
||||
build_number:
|
||||
description: "Build number for the artifacts"
|
||||
value: ${{ jobs.build-api.outputs.build_number }}
|
||||
secrets:
|
||||
VITE_ACCOUNT:
|
||||
required: true
|
||||
VITE_CONNECT:
|
||||
required: true
|
||||
VITE_UNRAID_NET:
|
||||
required: true
|
||||
VITE_CALLBACK_KEY:
|
||||
required: true
|
||||
UNRAID_BOT_GITHUB_ADMIN_TOKEN:
|
||||
required: false
|
||||
|
||||
jobs:
|
||||
build-api:
|
||||
name: Build API
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
build_number: ${{ steps.buildnumber.outputs.build_number }}
|
||||
defaults:
|
||||
run:
|
||||
working-directory: api
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
ref: ${{ inputs.ref || github.ref }}
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential
|
||||
version: 1.0
|
||||
|
||||
- name: PNPM Install
|
||||
run: |
|
||||
cd ${{ github.workspace }}
|
||||
pnpm install --frozen-lockfile
|
||||
|
||||
- name: Get Git Short Sha and API version
|
||||
id: vars
|
||||
run: |
|
||||
GIT_SHA=$(git rev-parse --short HEAD)
|
||||
IS_TAGGED=$(git describe --tags --abbrev=0 --exact-match || echo '')
|
||||
PACKAGE_LOCK_VERSION=$(jq -r '.version' package.json)
|
||||
API_VERSION=${{ inputs.version_override && format('"{0}"', inputs.version_override) || '${PACKAGE_LOCK_VERSION}' }}
|
||||
if [ -z "${{ inputs.version_override }}" ] && [ -z "$IS_TAGGED" ]; then
|
||||
API_VERSION="${PACKAGE_LOCK_VERSION}+${GIT_SHA}"
|
||||
fi
|
||||
export API_VERSION
|
||||
echo "API_VERSION=${API_VERSION}" >> $GITHUB_ENV
|
||||
echo "PACKAGE_LOCK_VERSION=${PACKAGE_LOCK_VERSION}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Generate build number
|
||||
id: buildnumber
|
||||
uses: onyxmueller/build-tag-number@v1
|
||||
with:
|
||||
token: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN || github.token }}
|
||||
prefix: ${{ inputs.version_override || steps.vars.outputs.PACKAGE_LOCK_VERSION }}
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
pnpm run build:release
|
||||
tar -czf deploy/unraid-api.tgz -C deploy/pack/ .
|
||||
|
||||
- name: Upload tgz to Github artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: unraid-api
|
||||
path: ${{ github.workspace }}/api/deploy/unraid-api.tgz
|
||||
|
||||
build-unraid-ui-webcomponents:
|
||||
name: Build Unraid UI Library (Webcomponent Version)
|
||||
defaults:
|
||||
run:
|
||||
working-directory: unraid-ui
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
ref: ${{ inputs.ref || github.ref }}
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential
|
||||
version: 1.0
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
cd ${{ github.workspace }}
|
||||
pnpm install --frozen-lockfile --filter @unraid/ui
|
||||
|
||||
- name: Lint
|
||||
run: pnpm run lint
|
||||
|
||||
- name: Build
|
||||
run: pnpm run build:wc
|
||||
|
||||
- name: Upload Artifact to Github
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: unraid-wc-ui
|
||||
path: unraid-ui/dist-wc/
|
||||
|
||||
build-web:
|
||||
name: Build Web App
|
||||
defaults:
|
||||
run:
|
||||
working-directory: web
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
ref: ${{ inputs.ref || github.ref }}
|
||||
|
||||
- name: Create env file
|
||||
run: |
|
||||
touch .env
|
||||
echo VITE_ACCOUNT=${{ secrets.VITE_ACCOUNT }} >> .env
|
||||
echo VITE_CONNECT=${{ secrets.VITE_CONNECT }} >> .env
|
||||
echo VITE_UNRAID_NET=${{ secrets.VITE_UNRAID_NET }} >> .env
|
||||
echo VITE_CALLBACK_KEY=${{ secrets.VITE_CALLBACK_KEY }} >> .env
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: PNPM Install
|
||||
run: |
|
||||
cd ${{ github.workspace }}
|
||||
pnpm install --frozen-lockfile --filter @unraid/web --filter @unraid/ui
|
||||
|
||||
- name: Build Unraid UI
|
||||
run: |
|
||||
cd ${{ github.workspace }}/unraid-ui
|
||||
pnpm run build
|
||||
|
||||
- name: Lint files
|
||||
run: pnpm run lint
|
||||
|
||||
- name: Type Check
|
||||
run: pnpm run type-check
|
||||
|
||||
- name: Build
|
||||
run: pnpm run build
|
||||
|
||||
- name: Upload build to Github artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: unraid-wc-rich
|
||||
path: web/dist
|
||||
|
||||
28
.github/workflows/build-plugin.yml
vendored
28
.github/workflows/build-plugin.yml
vendored
@@ -27,6 +27,15 @@ on:
|
||||
type: string
|
||||
required: true
|
||||
description: "Build number for the plugin builds"
|
||||
ref:
|
||||
type: string
|
||||
required: false
|
||||
description: "Git ref (commit SHA, branch, or tag) to checkout"
|
||||
TRIGGER_PRODUCTION_RELEASE:
|
||||
type: boolean
|
||||
required: false
|
||||
default: false
|
||||
description: "Whether to automatically trigger the release-production workflow (default: false)"
|
||||
secrets:
|
||||
CF_ACCESS_KEY_ID:
|
||||
required: true
|
||||
@@ -49,6 +58,7 @@ jobs:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
ref: ${{ inputs.ref }}
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
@@ -68,7 +78,21 @@ jobs:
|
||||
GIT_SHA=$(git rev-parse --short HEAD)
|
||||
IS_TAGGED=$(git describe --tags --abbrev=0 --exact-match || echo '')
|
||||
PACKAGE_LOCK_VERSION=$(jq -r '.version' package.json)
|
||||
API_VERSION=$([[ -n "$IS_TAGGED" ]] && echo "$PACKAGE_LOCK_VERSION" || echo "${PACKAGE_LOCK_VERSION}+${GIT_SHA}")
|
||||
|
||||
# For release builds, trust the release tag version to avoid stale checkouts
|
||||
if [ "${{ inputs.RELEASE_CREATED }}" = "true" ] && [ -n "${{ inputs.RELEASE_TAG }}" ]; then
|
||||
TAG_VERSION="${{ inputs.RELEASE_TAG }}"
|
||||
TAG_VERSION="${TAG_VERSION#v}" # trim leading v if present
|
||||
|
||||
if [ "$TAG_VERSION" != "$PACKAGE_LOCK_VERSION" ]; then
|
||||
echo "::warning::Release tag version ($TAG_VERSION) does not match package.json version ($PACKAGE_LOCK_VERSION). Using tag version for TXZ naming."
|
||||
fi
|
||||
|
||||
API_VERSION="$TAG_VERSION"
|
||||
else
|
||||
API_VERSION=$([[ -n "$IS_TAGGED" ]] && echo "$PACKAGE_LOCK_VERSION" || echo "${PACKAGE_LOCK_VERSION}+${GIT_SHA}")
|
||||
fi
|
||||
|
||||
echo "API_VERSION=${API_VERSION}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Install dependencies
|
||||
@@ -136,7 +160,7 @@ jobs:
|
||||
done
|
||||
|
||||
- name: Workflow Dispatch and wait
|
||||
if: inputs.RELEASE_CREATED == 'true'
|
||||
if: inputs.RELEASE_CREATED == 'true' && inputs.TRIGGER_PRODUCTION_RELEASE == true
|
||||
uses: the-actions-org/workflow-dispatch@v4.0.0
|
||||
with:
|
||||
workflow: release-production.yml
|
||||
|
||||
103
.github/workflows/claude-code-review.yml
vendored
103
.github/workflows/claude-code-review.yml
vendored
@@ -1,103 +0,0 @@
|
||||
name: Claude Code Review
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize]
|
||||
# Skip reviews for non-code changes
|
||||
paths-ignore:
|
||||
- "**/*.md"
|
||||
- "**/package-lock.json"
|
||||
- "**/pnpm-lock.yaml"
|
||||
- "**/.gitignore"
|
||||
- "**/LICENSE"
|
||||
- "**/*.config.js"
|
||||
- "**/*.config.ts"
|
||||
- "**/tsconfig.json"
|
||||
- "**/.github/workflows/*.yml"
|
||||
- "**/docs/**"
|
||||
|
||||
jobs:
|
||||
claude-review:
|
||||
# Skip review for bot PRs and WIP/skip-review PRs
|
||||
# Only run if changes are significant (>10 lines)
|
||||
if: |
|
||||
(github.event.pull_request.additions > 10 || github.event.pull_request.deletions > 10) &&
|
||||
!contains(github.event.pull_request.title, '[skip-review]') &&
|
||||
!contains(github.event.pull_request.title, '[WIP]') &&
|
||||
!endsWith(github.event.pull_request.user.login, '[bot]') &&
|
||||
github.event.pull_request.user.login != 'dependabot' &&
|
||||
github.event.pull_request.user.login != 'renovate'
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: read
|
||||
issues: read
|
||||
id-token: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Run Claude Code Review
|
||||
id: claude-review
|
||||
uses: anthropics/claude-code-action@beta
|
||||
with:
|
||||
claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
|
||||
|
||||
# Optional: Specify model (defaults to Claude Sonnet 4, uncomment for Claude Opus 4)
|
||||
# model: "claude-opus-4-20250514"
|
||||
|
||||
# Direct prompt for automated review (no @claude mention needed)
|
||||
direct_prompt: |
|
||||
IMPORTANT: Review ONLY the DIFF/CHANGESET - the actual lines that were added or modified in this PR.
|
||||
DO NOT review the entire file context, only analyze the specific changes being made.
|
||||
|
||||
Look for HIGH-PRIORITY issues in the CHANGED LINES ONLY:
|
||||
|
||||
1. CRITICAL BUGS: Logic errors, null pointer issues, infinite loops, race conditions
|
||||
2. SECURITY: SQL injection, XSS, authentication bypass, exposed secrets, unsafe operations
|
||||
3. BREAKING CHANGES: API contract violations, removed exports, changed function signatures
|
||||
4. DATA LOSS RISKS: Destructive operations without safeguards, missing data validation
|
||||
|
||||
DO NOT comment on:
|
||||
- Code that wasn't changed in this PR
|
||||
- Style, formatting, or documentation
|
||||
- Test coverage (unless tests are broken by the changes)
|
||||
- Minor optimizations or best practices
|
||||
- Existing code issues that weren't introduced by this PR
|
||||
|
||||
If you find no critical issues in the DIFF, respond with: "✅ No critical issues found in changes"
|
||||
|
||||
Keep response under 10 lines. Reference specific line numbers from the diff when reporting issues.
|
||||
|
||||
# Optional: Use sticky comments to make Claude reuse the same comment on subsequent pushes to the same PR
|
||||
use_sticky_comment: true
|
||||
|
||||
# Context-aware review based on PR characteristics
|
||||
# Uncomment to enable different review strategies based on context
|
||||
# direct_prompt: |
|
||||
# ${{
|
||||
# (github.event.pull_request.additions > 500) &&
|
||||
# 'Large PR detected. Focus only on architectural issues and breaking changes. Skip minor issues.' ||
|
||||
# contains(github.event.pull_request.title, 'fix') &&
|
||||
# 'Bug fix PR: Verify the fix addresses the root cause and check for regression risks.' ||
|
||||
# contains(github.event.pull_request.title, 'deps') &&
|
||||
# 'Dependency update: Check for breaking changes and security advisories only.' ||
|
||||
# contains(github.event.pull_request.title, 'refactor') &&
|
||||
# 'Refactor PR: Verify no behavior changes and check for performance regressions.' ||
|
||||
# contains(github.event.pull_request.title, 'feat') &&
|
||||
# 'New feature: Check for security issues, edge cases, and integration problems only.' ||
|
||||
# 'Standard review: Check for critical bugs, security issues, and breaking changes only.'
|
||||
# }}
|
||||
|
||||
# Optional: Add specific tools for running tests or linting
|
||||
# allowed_tools: "Bash(npm run test),Bash(npm run lint),Bash(npm run typecheck)"
|
||||
|
||||
# Optional: Skip review for certain conditions
|
||||
# if: |
|
||||
# !contains(github.event.pull_request.title, '[skip-review]') &&
|
||||
# !contains(github.event.pull_request.title, '[WIP]')
|
||||
|
||||
64
.github/workflows/claude.yml
vendored
64
.github/workflows/claude.yml
vendored
@@ -1,64 +0,0 @@
|
||||
name: Claude Code
|
||||
|
||||
on:
|
||||
issue_comment:
|
||||
types: [created]
|
||||
pull_request_review_comment:
|
||||
types: [created]
|
||||
issues:
|
||||
types: [opened, assigned]
|
||||
pull_request_review:
|
||||
types: [submitted]
|
||||
|
||||
jobs:
|
||||
claude:
|
||||
if: |
|
||||
(github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude')) ||
|
||||
(github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude')) ||
|
||||
(github.event_name == 'pull_request_review' && contains(github.event.review.body, '@claude')) ||
|
||||
(github.event_name == 'issues' && (contains(github.event.issue.body, '@claude') || contains(github.event.issue.title, '@claude')))
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: read
|
||||
issues: read
|
||||
id-token: write
|
||||
actions: read # Required for Claude to read CI results on PRs
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Run Claude Code
|
||||
id: claude
|
||||
uses: anthropics/claude-code-action@beta
|
||||
with:
|
||||
claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
|
||||
|
||||
# This is an optional setting that allows Claude to read CI results on PRs
|
||||
additional_permissions: |
|
||||
actions: read
|
||||
|
||||
# Optional: Specify model (defaults to Claude Sonnet 4, uncomment for Claude Opus 4)
|
||||
# model: "claude-opus-4-20250514"
|
||||
|
||||
# Optional: Customize the trigger phrase (default: @claude)
|
||||
# trigger_phrase: "/claude"
|
||||
|
||||
# Optional: Trigger when specific user is assigned to an issue
|
||||
# assignee_trigger: "claude-bot"
|
||||
|
||||
# Optional: Allow Claude to run specific commands
|
||||
# allowed_tools: "Bash(npm install),Bash(npm run build),Bash(npm run test:*),Bash(npm run lint:*)"
|
||||
|
||||
# Optional: Add custom instructions for Claude to customize its behavior for your project
|
||||
# custom_instructions: |
|
||||
# Follow our coding standards
|
||||
# Ensure all new code has tests
|
||||
# Use TypeScript for new files
|
||||
|
||||
# Optional: Custom environment variables for Claude
|
||||
# claude_env: |
|
||||
# NODE_ENV: test
|
||||
|
||||
210
.github/workflows/generate-release-notes.yml
vendored
Normal file
210
.github/workflows/generate-release-notes.yml
vendored
Normal file
@@ -0,0 +1,210 @@
|
||||
name: Generate Release Notes
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
version:
|
||||
description: 'Version number (e.g., 4.25.3)'
|
||||
required: true
|
||||
type: string
|
||||
target_commitish:
|
||||
description: 'Commit SHA or branch (leave empty for current HEAD)'
|
||||
required: false
|
||||
type: string
|
||||
release_notes:
|
||||
description: 'Custom release notes (leave empty to auto-generate)'
|
||||
required: false
|
||||
type: string
|
||||
outputs:
|
||||
release_notes:
|
||||
description: 'Generated or provided release notes'
|
||||
value: ${{ jobs.generate.outputs.release_notes }}
|
||||
secrets:
|
||||
UNRAID_BOT_GITHUB_ADMIN_TOKEN:
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
generate:
|
||||
name: Generate Release Notes
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
release_notes: ${{ steps.generate_notes.outputs.release_notes }}
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
ref: ${{ inputs.target_commitish || github.ref }}
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN }}
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
- name: Generate Release Notes
|
||||
id: generate_notes
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
TAG_NAME="v${{ inputs.version }}"
|
||||
VERSION="${{ inputs.version }}"
|
||||
|
||||
if [ -n "${{ inputs.release_notes }}" ]; then
|
||||
NOTES="${{ inputs.release_notes }}"
|
||||
else
|
||||
CHANGELOG_PATH="api/CHANGELOG.md"
|
||||
|
||||
if [ -f "$CHANGELOG_PATH" ]; then
|
||||
echo "Extracting release notes from CHANGELOG.md for version ${VERSION}"
|
||||
|
||||
NOTES=$(awk -v ver="$VERSION" '
|
||||
BEGIN {
|
||||
found=0; capture=0; output="";
|
||||
gsub(/\./, "\\.", ver);
|
||||
}
|
||||
/^## \[/ {
|
||||
if (capture) exit;
|
||||
if ($0 ~ "\\[" ver "\\]") {
|
||||
found=1;
|
||||
capture=1;
|
||||
}
|
||||
}
|
||||
capture {
|
||||
if (output != "") output = output "\n";
|
||||
output = output $0;
|
||||
}
|
||||
END {
|
||||
if (found) print output;
|
||||
else exit 1;
|
||||
}
|
||||
' "$CHANGELOG_PATH") || EXTRACTION_STATUS=$?
|
||||
|
||||
if [ ${EXTRACTION_STATUS:-0} -eq 0 ] && [ -n "$NOTES" ]; then
|
||||
echo "✓ Found release notes in CHANGELOG.md"
|
||||
else
|
||||
echo "⚠ Version ${VERSION} not found in CHANGELOG.md, generating with conventional-changelog"
|
||||
|
||||
PREV_TAG=$(git describe --tags --abbrev=0 HEAD^ 2>/dev/null || echo "")
|
||||
CHANGELOG_GENERATED=false
|
||||
|
||||
if [ -n "$PREV_TAG" ]; then
|
||||
echo "Generating changelog from ${PREV_TAG}..HEAD using conventional-changelog"
|
||||
|
||||
npm install -g conventional-changelog-cli
|
||||
|
||||
TEMP_NOTES=$(mktemp)
|
||||
conventional-changelog -p conventionalcommits \
|
||||
--release-count 1 \
|
||||
--output-unreleased \
|
||||
> "$TEMP_NOTES" 2>/dev/null || true
|
||||
|
||||
if [ -s "$TEMP_NOTES" ]; then
|
||||
NOTES=$(cat "$TEMP_NOTES")
|
||||
|
||||
if [ -n "$NOTES" ]; then
|
||||
echo "✓ Generated changelog with conventional-changelog"
|
||||
CHANGELOG_GENERATED=true
|
||||
|
||||
TEMP_CHANGELOG=$(mktemp)
|
||||
{
|
||||
if [ -f "$CHANGELOG_PATH" ]; then
|
||||
head -n 1 "$CHANGELOG_PATH"
|
||||
echo ""
|
||||
echo "$NOTES"
|
||||
echo ""
|
||||
tail -n +2 "$CHANGELOG_PATH"
|
||||
else
|
||||
echo "# Changelog"
|
||||
echo ""
|
||||
echo "$NOTES"
|
||||
fi
|
||||
} > "$TEMP_CHANGELOG"
|
||||
|
||||
mv "$TEMP_CHANGELOG" "$CHANGELOG_PATH"
|
||||
echo "✓ Updated CHANGELOG.md with generated notes"
|
||||
else
|
||||
echo "⚠ conventional-changelog produced empty output, using GitHub auto-generation"
|
||||
NOTES=$(gh api repos/${{ github.repository }}/releases/generate-notes \
|
||||
-f tag_name="${TAG_NAME}" \
|
||||
-f target_commitish="${{ inputs.target_commitish || github.sha }}" \
|
||||
-f previous_tag_name="${PREV_TAG}" \
|
||||
--jq '.body')
|
||||
fi
|
||||
else
|
||||
echo "⚠ conventional-changelog failed, using GitHub auto-generation"
|
||||
NOTES=$(gh api repos/${{ github.repository }}/releases/generate-notes \
|
||||
-f tag_name="${TAG_NAME}" \
|
||||
-f target_commitish="${{ inputs.target_commitish || github.sha }}" \
|
||||
-f previous_tag_name="${PREV_TAG}" \
|
||||
--jq '.body')
|
||||
fi
|
||||
|
||||
rm -f "$TEMP_NOTES"
|
||||
else
|
||||
echo "⚠ No previous tag found, using GitHub auto-generation"
|
||||
NOTES=$(gh api repos/${{ github.repository }}/releases/generate-notes \
|
||||
-f tag_name="${TAG_NAME}" \
|
||||
-f target_commitish="${{ inputs.target_commitish || github.sha }}" \
|
||||
--jq '.body' || echo "Release ${VERSION}")
|
||||
fi
|
||||
|
||||
if [ "$CHANGELOG_GENERATED" = true ]; then
|
||||
BRANCH_OR_SHA="${{ inputs.target_commitish || github.ref }}"
|
||||
|
||||
if git show-ref --verify --quiet "refs/heads/${BRANCH_OR_SHA}"; then
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "CHANGELOG GENERATED AND COMMITTED"
|
||||
echo "=========================================="
|
||||
echo ""
|
||||
|
||||
git config user.name "github-actions[bot]"
|
||||
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||
|
||||
BEFORE_SHA=$(git rev-parse HEAD)
|
||||
|
||||
git add "$CHANGELOG_PATH"
|
||||
git commit -m "chore: add changelog for version ${VERSION}"
|
||||
git push origin "HEAD:${BRANCH_OR_SHA}"
|
||||
|
||||
AFTER_SHA=$(git rev-parse HEAD)
|
||||
|
||||
echo "✓ Changelog committed and pushed successfully"
|
||||
echo ""
|
||||
echo "Previous SHA: ${BEFORE_SHA}"
|
||||
echo "New SHA: ${AFTER_SHA}"
|
||||
echo ""
|
||||
echo "⚠️ CRITICAL: A new commit was created, but github.sha is immutable."
|
||||
echo "⚠️ github.sha = ${BEFORE_SHA} (original workflow trigger)"
|
||||
echo "⚠️ The release tag must point to ${AFTER_SHA} (with changelog)"
|
||||
echo ""
|
||||
echo "Re-run this workflow to create the release with the correct commit."
|
||||
echo ""
|
||||
exit 1
|
||||
else
|
||||
echo "⚠ Target is a commit SHA, not a branch. Cannot push changelog updates."
|
||||
echo "Changelog was generated but not committed."
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
else
|
||||
echo "⚠ CHANGELOG.md not found, using GitHub auto-generation"
|
||||
PREV_TAG=$(git describe --tags --abbrev=0 HEAD^ 2>/dev/null || echo "")
|
||||
|
||||
if [ -n "$PREV_TAG" ]; then
|
||||
NOTES=$(gh api repos/${{ github.repository }}/releases/generate-notes \
|
||||
-f tag_name="${TAG_NAME}" \
|
||||
-f target_commitish="${{ inputs.target_commitish || github.sha }}" \
|
||||
-f previous_tag_name="${PREV_TAG}" \
|
||||
--jq '.body')
|
||||
else
|
||||
NOTES="Release ${VERSION}"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "release_notes<<EOF" >> $GITHUB_OUTPUT
|
||||
echo "$NOTES" >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
|
||||
197
.github/workflows/main.yml
vendored
197
.github/workflows/main.yml
vendored
@@ -154,173 +154,15 @@ jobs:
|
||||
files: ./coverage/coverage-final.json,../web/coverage/coverage-final.json,../unraid-ui/coverage/coverage-final.json,../packages/unraid-api-plugin-connect/coverage/coverage-final.json,../packages/unraid-shared/coverage/coverage-final.json
|
||||
fail_ci_if_error: false
|
||||
|
||||
build-api:
|
||||
name: Build API
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
build_number: ${{ steps.buildnumber.outputs.build_number }}
|
||||
defaults:
|
||||
run:
|
||||
working-directory: api
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential
|
||||
version: 1.0
|
||||
|
||||
- name: PNPM Install
|
||||
run: |
|
||||
cd ${{ github.workspace }}
|
||||
pnpm install --frozen-lockfile
|
||||
|
||||
- name: Build
|
||||
run: pnpm run build
|
||||
|
||||
- name: Get Git Short Sha and API version
|
||||
id: vars
|
||||
run: |
|
||||
GIT_SHA=$(git rev-parse --short HEAD)
|
||||
IS_TAGGED=$(git describe --tags --abbrev=0 --exact-match || echo '')
|
||||
PACKAGE_LOCK_VERSION=$(jq -r '.version' package.json)
|
||||
API_VERSION=$([[ -n "$IS_TAGGED" ]] && echo "$PACKAGE_LOCK_VERSION" || echo "${PACKAGE_LOCK_VERSION}+${GIT_SHA}")
|
||||
export API_VERSION
|
||||
echo "API_VERSION=${API_VERSION}" >> $GITHUB_ENV
|
||||
echo "PACKAGE_LOCK_VERSION=${PACKAGE_LOCK_VERSION}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Generate build number
|
||||
id: buildnumber
|
||||
uses: onyxmueller/build-tag-number@v1
|
||||
with:
|
||||
token: ${{secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN}}
|
||||
prefix: ${{steps.vars.outputs.PACKAGE_LOCK_VERSION}}
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
pnpm run build:release
|
||||
tar -czf deploy/unraid-api.tgz -C deploy/pack/ .
|
||||
|
||||
- name: Upload tgz to Github artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: unraid-api
|
||||
path: ${{ github.workspace }}/api/deploy/unraid-api.tgz
|
||||
|
||||
build-unraid-ui-webcomponents:
|
||||
name: Build Unraid UI Library (Webcomponent Version)
|
||||
defaults:
|
||||
run:
|
||||
working-directory: unraid-ui
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential
|
||||
version: 1.0
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
cd ${{ github.workspace }}
|
||||
pnpm install --frozen-lockfile --filter @unraid/ui
|
||||
|
||||
- name: Lint
|
||||
run: pnpm run lint
|
||||
|
||||
- name: Build
|
||||
run: pnpm run build:wc
|
||||
|
||||
- name: Upload Artifact to Github
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: unraid-wc-ui
|
||||
path: unraid-ui/dist-wc/
|
||||
|
||||
build-web:
|
||||
# needs: [build-unraid-ui]
|
||||
name: Build Web App
|
||||
defaults:
|
||||
run:
|
||||
working-directory: web
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Create env file
|
||||
run: |
|
||||
touch .env
|
||||
echo VITE_ACCOUNT=${{ secrets.VITE_ACCOUNT }} >> .env
|
||||
echo VITE_CONNECT=${{ secrets.VITE_CONNECT }} >> .env
|
||||
echo VITE_UNRAID_NET=${{ secrets.VITE_UNRAID_NET }} >> .env
|
||||
echo VITE_CALLBACK_KEY=${{ secrets.VITE_CALLBACK_KEY }} >> .env
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: PNPM Install
|
||||
run: |
|
||||
cd ${{ github.workspace }}
|
||||
pnpm install --frozen-lockfile --filter @unraid/web --filter @unraid/ui
|
||||
|
||||
- name: Build Unraid UI
|
||||
run: |
|
||||
cd ${{ github.workspace }}/unraid-ui
|
||||
pnpm run build
|
||||
|
||||
- name: Lint files
|
||||
run: pnpm run lint
|
||||
|
||||
- name: Type Check
|
||||
run: pnpm run type-check
|
||||
|
||||
- name: Test
|
||||
run: pnpm run test:ci
|
||||
|
||||
- name: Build
|
||||
run: pnpm run build
|
||||
|
||||
- name: Upload build to Github artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: unraid-wc-rich
|
||||
path: web/dist
|
||||
build-artifacts:
|
||||
name: Build All Artifacts
|
||||
uses: ./.github/workflows/build-artifacts.yml
|
||||
secrets:
|
||||
VITE_ACCOUNT: ${{ secrets.VITE_ACCOUNT }}
|
||||
VITE_CONNECT: ${{ secrets.VITE_CONNECT }}
|
||||
VITE_UNRAID_NET: ${{ secrets.VITE_UNRAID_NET }}
|
||||
VITE_CALLBACK_KEY: ${{ secrets.VITE_CALLBACK_KEY }}
|
||||
UNRAID_BOT_GITHUB_ADMIN_TOKEN: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN }}
|
||||
|
||||
release-please:
|
||||
name: Release Please
|
||||
@@ -329,15 +171,15 @@ jobs:
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
||||
needs:
|
||||
- test-api
|
||||
- build-api
|
||||
- build-web
|
||||
- build-unraid-ui-webcomponents
|
||||
- build-artifacts
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- id: release
|
||||
uses: googleapis/release-please-action@v4
|
||||
@@ -348,17 +190,15 @@ jobs:
|
||||
build-plugin-staging-pr:
|
||||
name: Build and Deploy Plugin
|
||||
needs:
|
||||
- build-api
|
||||
- build-web
|
||||
- build-unraid-ui-webcomponents
|
||||
- build-artifacts
|
||||
- test-api
|
||||
uses: ./.github/workflows/build-plugin.yml
|
||||
with:
|
||||
RELEASE_CREATED: false
|
||||
RELEASE_CREATED: 'false'
|
||||
TAG: ${{ github.event.pull_request.number && format('PR{0}', github.event.pull_request.number) || '' }}
|
||||
BUCKET_PATH: ${{ github.event.pull_request.number && format('unraid-api/tag/PR{0}', github.event.pull_request.number) || 'unraid-api' }}
|
||||
BASE_URL: "https://preview.dl.unraid.net/unraid-api"
|
||||
BUILD_NUMBER: ${{ needs.build-api.outputs.build_number }}
|
||||
BUILD_NUMBER: ${{ needs.build-artifacts.outputs.build_number }}
|
||||
secrets:
|
||||
CF_ACCESS_KEY_ID: ${{ secrets.CF_ACCESS_KEY_ID }}
|
||||
CF_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
@@ -370,15 +210,16 @@ jobs:
|
||||
name: Build and Deploy Production Plugin
|
||||
needs:
|
||||
- release-please
|
||||
- build-api
|
||||
- build-artifacts
|
||||
uses: ./.github/workflows/build-plugin.yml
|
||||
with:
|
||||
RELEASE_CREATED: true
|
||||
RELEASE_CREATED: 'true'
|
||||
RELEASE_TAG: ${{ needs.release-please.outputs.tag_name }}
|
||||
TAG: ""
|
||||
BUCKET_PATH: unraid-api
|
||||
BASE_URL: "https://stable.dl.unraid.net/unraid-api"
|
||||
BUILD_NUMBER: ${{ needs.build-api.outputs.build_number }}
|
||||
BUILD_NUMBER: ${{ needs.build-artifacts.outputs.build_number }}
|
||||
TRIGGER_PRODUCTION_RELEASE: true
|
||||
secrets:
|
||||
CF_ACCESS_KEY_ID: ${{ secrets.CF_ACCESS_KEY_ID }}
|
||||
CF_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
|
||||
239
.github/workflows/manual-release.yml
vendored
Normal file
239
.github/workflows/manual-release.yml
vendored
Normal file
@@ -0,0 +1,239 @@
|
||||
name: Manual Release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: 'Version to release (e.g., 4.25.3)'
|
||||
required: true
|
||||
type: string
|
||||
target_commitish:
|
||||
description: 'Commit SHA or branch (leave empty for current HEAD)'
|
||||
required: false
|
||||
type: string
|
||||
release_notes:
|
||||
description: 'Release notes/changelog (leave empty to auto-generate from commits)'
|
||||
required: false
|
||||
type: string
|
||||
prerelease:
|
||||
description: 'Mark as prerelease'
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
validate-version:
|
||||
name: Validate and Update Package Versions
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
ref: ${{ inputs.target_commitish || github.ref }}
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN }}
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
- name: Check and Update Package Versions
|
||||
run: |
|
||||
EXPECTED_VERSION="${{ inputs.version }}"
|
||||
MISMATCHES_FOUND=false
|
||||
|
||||
PACKAGE_JSONS=(
|
||||
"package.json"
|
||||
"api/package.json"
|
||||
"web/package.json"
|
||||
"unraid-ui/package.json"
|
||||
"plugin/package.json"
|
||||
"packages/unraid-shared/package.json"
|
||||
"packages/unraid-api-plugin-health/package.json"
|
||||
"packages/unraid-api-plugin-generator/package.json"
|
||||
"packages/unraid-api-plugin-connect/package.json"
|
||||
)
|
||||
|
||||
echo "Checking package.json versions against expected version: ${EXPECTED_VERSION}"
|
||||
|
||||
for pkg in "${PACKAGE_JSONS[@]}"; do
|
||||
if [ -f "$pkg" ]; then
|
||||
CURRENT_VERSION=$(node -p "require('./$pkg').version")
|
||||
if [ "$CURRENT_VERSION" != "$EXPECTED_VERSION" ]; then
|
||||
echo "❌ Version mismatch in $pkg: $CURRENT_VERSION != $EXPECTED_VERSION"
|
||||
MISMATCHES_FOUND=true
|
||||
|
||||
# Detect indentation by checking the first property line
|
||||
INDENT_SPACES=$(head -10 "$pkg" | grep '^ *"' | head -1 | sed 's/".*//g' | wc -c)
|
||||
INDENT_SPACES=$((INDENT_SPACES - 1))
|
||||
|
||||
jq --indent "$INDENT_SPACES" --arg version "$EXPECTED_VERSION" '.version = $version' "$pkg" > "$pkg.tmp" && mv "$pkg.tmp" "$pkg"
|
||||
echo "✓ Updated $pkg to version $EXPECTED_VERSION"
|
||||
else
|
||||
echo "✓ $pkg version matches: $CURRENT_VERSION"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
if [ "$MISMATCHES_FOUND" = true ]; then
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "Version mismatches found!"
|
||||
echo "=========================================="
|
||||
echo ""
|
||||
|
||||
BRANCH_OR_SHA="${{ inputs.target_commitish || github.ref }}"
|
||||
|
||||
if git show-ref --verify --quiet "refs/heads/${BRANCH_OR_SHA}"; then
|
||||
echo "Creating commit with version updates and pushing to branch: ${BRANCH_OR_SHA}"
|
||||
|
||||
git config user.name "github-actions[bot]"
|
||||
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||
|
||||
BEFORE_SHA=$(git rev-parse HEAD)
|
||||
|
||||
git add ${PACKAGE_JSONS[@]}
|
||||
git commit -m "chore: update package versions to ${{ inputs.version }}"
|
||||
git push origin "HEAD:${BRANCH_OR_SHA}"
|
||||
|
||||
AFTER_SHA=$(git rev-parse HEAD)
|
||||
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "WORKFLOW MUST BE RE-RUN"
|
||||
echo "=========================================="
|
||||
echo ""
|
||||
echo "✓ Version updates committed and pushed successfully"
|
||||
echo ""
|
||||
echo "Previous SHA: ${BEFORE_SHA}"
|
||||
echo "New SHA: ${AFTER_SHA}"
|
||||
echo ""
|
||||
echo "⚠️ CRITICAL: A new commit was created, but github.sha is immutable."
|
||||
echo "⚠️ github.sha = ${BEFORE_SHA} (original workflow trigger)"
|
||||
echo "⚠️ The release tag must point to ${AFTER_SHA} (with version updates)"
|
||||
echo ""
|
||||
echo "Re-run this workflow to create the release with the correct commit."
|
||||
echo ""
|
||||
exit 1
|
||||
else
|
||||
echo "Target is a commit SHA, not a branch. Cannot push version updates."
|
||||
echo "Please update the package.json versions manually and re-run the workflow."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "✓ All package.json versions match the expected version: ${EXPECTED_VERSION}"
|
||||
|
||||
build-artifacts:
|
||||
name: Build All Artifacts
|
||||
needs:
|
||||
- validate-version
|
||||
uses: ./.github/workflows/build-artifacts.yml
|
||||
with:
|
||||
ref: ${{ inputs.target_commitish || github.ref }}
|
||||
version_override: ${{ inputs.version }}
|
||||
secrets:
|
||||
VITE_ACCOUNT: ${{ secrets.VITE_ACCOUNT }}
|
||||
VITE_CONNECT: ${{ secrets.VITE_CONNECT }}
|
||||
VITE_UNRAID_NET: ${{ secrets.VITE_UNRAID_NET }}
|
||||
VITE_CALLBACK_KEY: ${{ secrets.VITE_CALLBACK_KEY }}
|
||||
UNRAID_BOT_GITHUB_ADMIN_TOKEN: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN }}
|
||||
|
||||
generate-release-notes:
|
||||
name: Generate Release Notes
|
||||
needs:
|
||||
- build-artifacts
|
||||
uses: ./.github/workflows/generate-release-notes.yml
|
||||
with:
|
||||
version: ${{ inputs.version }}
|
||||
target_commitish: ${{ inputs.target_commitish || github.ref }}
|
||||
release_notes: ${{ inputs.release_notes }}
|
||||
secrets:
|
||||
UNRAID_BOT_GITHUB_ADMIN_TOKEN: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN }}
|
||||
|
||||
create-release:
|
||||
name: Create GitHub Release (Draft)
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- generate-release-notes
|
||||
outputs:
|
||||
tag_name: ${{ steps.create_release.outputs.tag_name }}
|
||||
release_notes: ${{ needs.generate-release-notes.outputs.release_notes }}
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
ref: ${{ inputs.target_commitish || github.ref }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Create or Update Release as Draft
|
||||
id: create_release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
TAG_NAME="v${{ inputs.version }}"
|
||||
TARGET="${{ inputs.target_commitish || github.sha }}"
|
||||
|
||||
echo "tag_name=${TAG_NAME}" >> $GITHUB_OUTPUT
|
||||
|
||||
if gh release view "${TAG_NAME}" > /dev/null 2>&1; then
|
||||
echo "Release ${TAG_NAME} already exists, updating as draft..."
|
||||
gh release edit "${TAG_NAME}" \
|
||||
--draft \
|
||||
--notes "${{ needs.generate-release-notes.outputs.release_notes }}" \
|
||||
${{ inputs.prerelease && '--prerelease' || '' }}
|
||||
else
|
||||
echo "Creating new draft release ${TAG_NAME}..."
|
||||
git tag "${TAG_NAME}" "${TARGET}" || true
|
||||
git push origin "${TAG_NAME}" || true
|
||||
|
||||
gh release create "${TAG_NAME}" \
|
||||
--draft \
|
||||
--title "${{ inputs.version }}" \
|
||||
--notes "${{ needs.generate-release-notes.outputs.release_notes }}" \
|
||||
--target "${TARGET}" \
|
||||
${{ inputs.prerelease && '--prerelease' || '' }}
|
||||
fi
|
||||
|
||||
build-plugin-production:
|
||||
name: Build and Deploy Production Plugin
|
||||
needs:
|
||||
- create-release
|
||||
- build-artifacts
|
||||
uses: ./.github/workflows/build-plugin.yml
|
||||
with:
|
||||
RELEASE_CREATED: 'true'
|
||||
RELEASE_TAG: ${{ needs.create-release.outputs.tag_name }}
|
||||
TAG: ""
|
||||
BUCKET_PATH: unraid-api
|
||||
BASE_URL: "https://stable.dl.unraid.net/unraid-api"
|
||||
BUILD_NUMBER: ${{ needs.build-artifacts.outputs.build_number }}
|
||||
ref: ${{ inputs.target_commitish || github.ref }}
|
||||
secrets:
|
||||
CF_ACCESS_KEY_ID: ${{ secrets.CF_ACCESS_KEY_ID }}
|
||||
CF_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
CF_BUCKET_PREVIEW: ${{ secrets.CF_BUCKET_PREVIEW }}
|
||||
CF_ENDPOINT: ${{ secrets.CF_ENDPOINT }}
|
||||
UNRAID_BOT_GITHUB_ADMIN_TOKEN: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN }}
|
||||
|
||||
publish-release:
|
||||
name: Publish Release
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- create-release
|
||||
- build-plugin-production
|
||||
steps:
|
||||
- name: Publish Release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
TAG_NAME="${{ needs.create-release.outputs.tag_name }}"
|
||||
echo "Publishing release ${TAG_NAME}..."
|
||||
gh release edit "${TAG_NAME}" --draft=false --repo ${{ github.repository }}
|
||||
|
||||
30
.github/workflows/publish-schema.yml
vendored
Normal file
30
.github/workflows/publish-schema.yml
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
name: Publish GraphQL Schema
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- 'api/generated-schema.graphql'
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
publish-schema:
|
||||
name: Publish Schema to Apollo Studio
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Install Apollo Rover CLI
|
||||
run: |
|
||||
curl -sSL https://rover.apollo.dev/nix/latest | sh
|
||||
echo "$HOME/.rover/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Publish schema to Apollo Studio
|
||||
env:
|
||||
APOLLO_KEY: ${{ secrets.APOLLO_KEY }}
|
||||
run: |
|
||||
rover graph publish Unraid-API@current \
|
||||
--schema api/generated-schema.graphql
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -123,3 +123,6 @@ api/dev/Unraid.net/myservers.cfg
|
||||
# local Mise settings
|
||||
.mise.toml
|
||||
|
||||
# Compiled test pages (generated from Nunjucks templates)
|
||||
web/public/test-pages/*.html
|
||||
|
||||
|
||||
@@ -1 +1 @@
|
||||
{".":"4.24.0"}
|
||||
{".":"4.27.2"}
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
@custom-variant dark (&:where(.dark, .dark *));
|
||||
|
||||
/* Utility defaults for web components (when we were using shadow DOM) */
|
||||
:host {
|
||||
:host,
|
||||
.unapi {
|
||||
--tw-divide-y-reverse: 0;
|
||||
--tw-border-style: solid;
|
||||
--tw-font-weight: initial;
|
||||
@@ -61,7 +62,7 @@
|
||||
}
|
||||
*/
|
||||
|
||||
body {
|
||||
.unapi {
|
||||
--color-alpha: #1c1b1b;
|
||||
--color-beta: #f2f2f2;
|
||||
--color-gamma: #999999;
|
||||
@@ -73,13 +74,14 @@ body {
|
||||
--ring-shadow: 0 0 var(--color-beta);
|
||||
}
|
||||
|
||||
button:not(:disabled),
|
||||
[role='button']:not(:disabled) {
|
||||
.unapi button:not(:disabled),
|
||||
.unapi [role='button']:not(:disabled) {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
/* Font size overrides for SSO button component */
|
||||
unraid-sso-button {
|
||||
.unapi unraid-sso-button,
|
||||
unraid-sso-button.unapi {
|
||||
--text-xs: 0.75rem;
|
||||
--text-sm: 0.875rem;
|
||||
--text-base: 1rem;
|
||||
@@ -93,4 +95,4 @@ unraid-sso-button {
|
||||
--text-7xl: 4.5rem;
|
||||
--text-8xl: 6rem;
|
||||
--text-9xl: 8rem;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,13 +5,7 @@
|
||||
*/
|
||||
|
||||
/* Default/White Theme */
|
||||
:root,
|
||||
.theme-white {
|
||||
--header-text-primary: #ffffff;
|
||||
--header-text-secondary: #999999;
|
||||
--header-background-color: #1c1b1b;
|
||||
--header-gradient-start: rgba(28, 27, 27, 0);
|
||||
--header-gradient-end: rgba(28, 27, 27, 0.7);
|
||||
.Theme--white {
|
||||
--color-border: #383735;
|
||||
--color-alpha: #ff8c2f;
|
||||
--color-beta: #1c1b1b;
|
||||
@@ -20,13 +14,8 @@
|
||||
}
|
||||
|
||||
/* Black Theme */
|
||||
.theme-black,
|
||||
.theme-black.dark {
|
||||
--header-text-primary: #1c1b1b;
|
||||
--header-text-secondary: #999999;
|
||||
--header-background-color: #f2f2f2;
|
||||
--header-gradient-start: rgba(242, 242, 242, 0);
|
||||
--header-gradient-end: rgba(242, 242, 242, 0.7);
|
||||
.Theme--black,
|
||||
.Theme--black.dark {
|
||||
--color-border: #e0e0e0;
|
||||
--color-alpha: #ff8c2f;
|
||||
--color-beta: #f2f2f2;
|
||||
@@ -35,12 +24,7 @@
|
||||
}
|
||||
|
||||
/* Gray Theme */
|
||||
.theme-gray {
|
||||
--header-text-primary: #ffffff;
|
||||
--header-text-secondary: #999999;
|
||||
--header-background-color: #1c1b1b;
|
||||
--header-gradient-start: rgba(28, 27, 27, 0);
|
||||
--header-gradient-end: rgba(28, 27, 27, 0.7);
|
||||
.Theme--gray {
|
||||
--color-border: #383735;
|
||||
--color-alpha: #ff8c2f;
|
||||
--color-beta: #383735;
|
||||
@@ -49,12 +33,7 @@
|
||||
}
|
||||
|
||||
/* Azure Theme */
|
||||
.theme-azure {
|
||||
--header-text-primary: #1c1b1b;
|
||||
--header-text-secondary: #999999;
|
||||
--header-background-color: #f2f2f2;
|
||||
--header-gradient-start: rgba(242, 242, 242, 0);
|
||||
--header-gradient-end: rgba(242, 242, 242, 0.7);
|
||||
.Theme--azure {
|
||||
--color-border: #5a8bb8;
|
||||
--color-alpha: #ff8c2f;
|
||||
--color-beta: #e7f2f8;
|
||||
@@ -66,27 +45,3 @@
|
||||
.dark {
|
||||
--color-border: #383735;
|
||||
}
|
||||
|
||||
/*
|
||||
* Dynamic color variables for user overrides from GraphQL
|
||||
* These are set via JavaScript and override the theme defaults
|
||||
* Using :root with class for higher specificity to override theme classes
|
||||
*/
|
||||
:root.has-custom-header-text {
|
||||
--header-text-primary: var(--custom-header-text-primary);
|
||||
--color-header-text-primary: var(--custom-header-text-primary);
|
||||
}
|
||||
|
||||
:root.has-custom-header-meta {
|
||||
--header-text-secondary: var(--custom-header-text-secondary);
|
||||
--color-header-text-secondary: var(--custom-header-text-secondary);
|
||||
}
|
||||
|
||||
:root.has-custom-header-bg {
|
||||
--header-background-color: var(--custom-header-background-color);
|
||||
--color-header-background: var(--custom-header-background-color);
|
||||
--header-gradient-start: var(--custom-header-gradient-start);
|
||||
--header-gradient-end: var(--custom-header-gradient-end);
|
||||
--color-header-gradient-start: var(--custom-header-gradient-start);
|
||||
--color-header-gradient-end: var(--custom-header-gradient-end);
|
||||
}
|
||||
@@ -14,6 +14,9 @@ This is the Unraid API monorepo containing multiple packages that provide API fu
|
||||
|
||||
## Essential Commands
|
||||
|
||||
pnpm does not use `--` to pass additional arguments.
|
||||
For example, to target a specific test file, `pnpm test <file>` is sufficient.
|
||||
|
||||
### Development
|
||||
|
||||
```bash
|
||||
|
||||
@@ -32,3 +32,4 @@ CHOKIDAR_USEPOLLING=true
|
||||
LOG_TRANSPORT=console
|
||||
LOG_LEVEL=trace
|
||||
ENABLE_NEXT_DOCKER_RELEASE=true
|
||||
SKIP_CONNECT_PLUGIN_CHECK=true
|
||||
|
||||
@@ -42,7 +42,10 @@ export default tseslint.config(
|
||||
'ignorePackages',
|
||||
{
|
||||
js: 'always',
|
||||
ts: 'always',
|
||||
mjs: 'always',
|
||||
cjs: 'always',
|
||||
ts: 'never',
|
||||
tsx: 'never',
|
||||
},
|
||||
],
|
||||
'no-restricted-globals': [
|
||||
|
||||
107
api/CHANGELOG.md
107
api/CHANGELOG.md
@@ -1,5 +1,112 @@
|
||||
# Changelog
|
||||
|
||||
## [4.27.2](https://github.com/unraid/api/compare/v4.27.1...v4.27.2) (2025-11-21)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* issue with header flashing + issue with trial date ([64875ed](https://github.com/unraid/api/commit/64875edbba786a0d1ba0113c9e9a3d38594eafcc))
|
||||
|
||||
## [4.27.1](https://github.com/unraid/api/compare/v4.27.0...v4.27.1) (2025-11-21)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* missing translations for expiring trials ([#1800](https://github.com/unraid/api/issues/1800)) ([36c1049](https://github.com/unraid/api/commit/36c104915ece203a3cac9e1a13e0c325e536a839))
|
||||
* resolve header flash when background color is set ([#1796](https://github.com/unraid/api/issues/1796)) ([dc9a036](https://github.com/unraid/api/commit/dc9a036c73d8ba110029364e0d044dc24c7d0dfa))
|
||||
|
||||
## [4.27.0](https://github.com/unraid/api/compare/v4.26.2...v4.27.0) (2025-11-19)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* remove Unraid API log download functionality ([#1793](https://github.com/unraid/api/issues/1793)) ([e4a9b82](https://github.com/unraid/api/commit/e4a9b8291b049752a9ff59b17ff50cf464fe0535))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* auto-uninstallation of connect api plugin ([#1791](https://github.com/unraid/api/issues/1791)) ([e734043](https://github.com/unraid/api/commit/e7340431a58821ec1b4f5d1b452fba6613b01fa5))
|
||||
|
||||
## [4.26.2](https://github.com/unraid/api/compare/v4.26.1...v4.26.2) (2025-11-19)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **theme:** Missing header background color ([e2fdf6c](https://github.com/unraid/api/commit/e2fdf6cadbd816559b8c82546c2bc771a81ffa9e))
|
||||
|
||||
## [4.26.1](https://github.com/unraid/api/compare/v4.26.0...v4.26.1) (2025-11-18)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **theme:** update theme class naming and scoping logic ([b28ef1e](https://github.com/unraid/api/commit/b28ef1ea334cb4842f01fa992effa7024185c6c9))
|
||||
|
||||
## [4.26.0](https://github.com/unraid/api/compare/v4.25.3...v4.26.0) (2025-11-17)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add cpu power query & subscription ([#1745](https://github.com/unraid/api/issues/1745)) ([d7aca81](https://github.com/unraid/api/commit/d7aca81c60281bfa47fb9113929c1ead6ed3361b))
|
||||
* add schema publishing to apollo studio ([#1772](https://github.com/unraid/api/issues/1772)) ([7e13202](https://github.com/unraid/api/commit/7e13202aa1c02803095bb72bb1bcb2472716f53a))
|
||||
* add workflow_dispatch trigger to schema publishing workflow ([818e7ce](https://github.com/unraid/api/commit/818e7ce997059663e07efcf1dab706bf0d7fc9da))
|
||||
* apollo studio readme link ([c4cd0c6](https://github.com/unraid/api/commit/c4cd0c63520deec15d735255f38811f0360fe3a1))
|
||||
* **cli:** make `unraid-api plugins remove` scriptable ([#1774](https://github.com/unraid/api/issues/1774)) ([64eb9ce](https://github.com/unraid/api/commit/64eb9ce9b5d1ff4fb1f08d9963522c5d32221ba7))
|
||||
* use persisted theme css to fix flashes on header ([#1784](https://github.com/unraid/api/issues/1784)) ([854b403](https://github.com/unraid/api/commit/854b403fbd85220a3012af58ce033cf0b8418516))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **api:** decode html entities before parsing notifications ([#1768](https://github.com/unraid/api/issues/1768)) ([42406e7](https://github.com/unraid/api/commit/42406e795da1e5b95622951a467722dde72d51a8))
|
||||
* **connect:** disable api plugin if unraid plugin is absent ([#1773](https://github.com/unraid/api/issues/1773)) ([c264a18](https://github.com/unraid/api/commit/c264a1843cf115e8cc1add1ab4f12fdcc932405a))
|
||||
* detection of flash backup activation state ([#1769](https://github.com/unraid/api/issues/1769)) ([d18eaf2](https://github.com/unraid/api/commit/d18eaf2364e0c04992c52af38679ff0a0c570440))
|
||||
* re-add missing header gradient styles ([#1787](https://github.com/unraid/api/issues/1787)) ([f8a6785](https://github.com/unraid/api/commit/f8a6785e9c92f81acaef76ac5eb78a4a769e69da))
|
||||
* respect OS safe mode in plugin loader ([#1775](https://github.com/unraid/api/issues/1775)) ([92af3b6](https://github.com/unraid/api/commit/92af3b61156cabae70368cf5222a2f7ac5b4d083))
|
||||
|
||||
## [4.25.3](https://github.com/unraid/unraid-api/compare/v4.25.2...v4.25.3) (2025-10-22)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* flaky watch on boot drive's dynamix config ([ec7aa06](https://github.com/unraid/unraid-api/commit/ec7aa06d4a5fb1f0e84420266b0b0d7ee09a3663))
|
||||
|
||||
## [4.25.2](https://github.com/unraid/api/compare/v4.25.1...v4.25.2) (2025-09-30)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* enhance activation code modal visibility logic ([#1733](https://github.com/unraid/api/issues/1733)) ([e57ec00](https://github.com/unraid/api/commit/e57ec00627e54ce76d903fd0fa8686ad02b393f3))
|
||||
|
||||
## [4.25.1](https://github.com/unraid/api/compare/v4.25.0...v4.25.1) (2025-09-30)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* add cache busting to web component extractor ([#1731](https://github.com/unraid/api/issues/1731)) ([0d165a6](https://github.com/unraid/api/commit/0d165a608740505bdc505dcf69fb615225969741))
|
||||
* Connect won't appear within Apps - Previous Apps ([#1727](https://github.com/unraid/api/issues/1727)) ([d73953f](https://github.com/unraid/api/commit/d73953f8ff3d7425c0aed32d16236ededfd948e1))
|
||||
|
||||
## [4.25.0](https://github.com/unraid/api/compare/v4.24.1...v4.25.0) (2025-09-26)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add Tailwind scoping plugin and integrate into Vite config ([#1722](https://github.com/unraid/api/issues/1722)) ([b7afaf4](https://github.com/unraid/api/commit/b7afaf463243b073e1ab1083961a16a12ac6c4a3))
|
||||
* notification filter controls pill buttons ([#1718](https://github.com/unraid/api/issues/1718)) ([661865f](https://github.com/unraid/api/commit/661865f97611cf802f239fde8232f3109281dde6))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* enable auth guard for nested fields - thanks [@ingel81](https://github.com/ingel81) ([7bdeca8](https://github.com/unraid/api/commit/7bdeca8338a3901f15fde06fd7aede3b0c16e087))
|
||||
* enhance user context validation in auth module ([#1726](https://github.com/unraid/api/issues/1726)) ([cd5eff1](https://github.com/unraid/api/commit/cd5eff11bcb4398581472966cb7ec124eac7ad0a))
|
||||
|
||||
## [4.24.1](https://github.com/unraid/api/compare/v4.24.0...v4.24.1) (2025-09-23)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* cleanup leftover removed packages on upgrade ([#1719](https://github.com/unraid/api/issues/1719)) ([9972a5f](https://github.com/unraid/api/commit/9972a5f178f9a251e6c129d85c5f11cfd25e6281))
|
||||
* enhance version comparison logic in installation script ([d9c561b](https://github.com/unraid/api/commit/d9c561bfebed0c553fe4bfa26b088ae71ca59755))
|
||||
* issue with incorrect permissions on viewer / other roles ([378cdb7](https://github.com/unraid/api/commit/378cdb7f102f63128dd236c13f1a3745902d5a2c))
|
||||
|
||||
## [4.24.0](https://github.com/unraid/api/compare/v4.23.1...v4.24.0) (2025-09-18)
|
||||
|
||||
|
||||
|
||||
@@ -71,6 +71,10 @@ unraid-api report -vv
|
||||
|
||||
If you found this file you're likely a developer. If you'd like to know more about the API and when it's available please join [our discord](https://discord.unraid.net/).
|
||||
|
||||
## Internationalization
|
||||
|
||||
- Run `pnpm --filter @unraid/api i18n:extract` to scan the Nest.js source for translation helper usages and update `src/i18n/en.json` with any new keys. The extractor keeps existing translations intact and appends new keys with their English source text.
|
||||
|
||||
## License
|
||||
|
||||
Copyright Lime Technology Inc. All rights reserved.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"version": "4.22.2",
|
||||
"version": "4.27.2",
|
||||
"extraOrigins": [],
|
||||
"sandbox": true,
|
||||
"ssoSubIds": [],
|
||||
|
||||
@@ -0,0 +1,6 @@
|
||||
timestamp=1730937600
|
||||
event=Hashtag Test
|
||||
subject=Warning [UNRAID] - #1 OS is cooking
|
||||
description=Disk 1 temperature has reached #epic # levels of proportion
|
||||
importance=warning
|
||||
|
||||
@@ -0,0 +1,6 @@
|
||||
timestamp=1730937600
|
||||
event=Temperature Test
|
||||
subject=Warning [UNRAID] - High disk temperature detected: 45 °C
|
||||
description=Disk 1 temperature has reached 45 °C (threshold: 40 °C)<br><br>Current temperatures:<br>Parity - 32 °C [OK]<br>Disk 1 - 45 °C [WARNING]<br>Disk 2 - 38 °C [OK]<br>Cache - 28 °C [OK]<br><br>Please check cooling system.
|
||||
importance=warning
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/pm2-ecosystem",
|
||||
"apps": [
|
||||
{
|
||||
"name": "unraid-api",
|
||||
"script": "./dist/main.js",
|
||||
"cwd": "/usr/local/unraid-api",
|
||||
"exec_mode": "fork",
|
||||
"wait_ready": true,
|
||||
"listen_timeout": 15000,
|
||||
"max_restarts": 10,
|
||||
"min_uptime": 10000,
|
||||
"watch": false,
|
||||
"interpreter": "/usr/local/bin/node",
|
||||
"ignore_watch": ["node_modules", "src", ".env.*", "myservers.cfg"],
|
||||
"out_file": "/var/log/graphql-api.log",
|
||||
"error_file": "/var/log/graphql-api.log",
|
||||
"merge_logs": true,
|
||||
"kill_timeout": 10000
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1391,6 +1391,19 @@ type CpuLoad {
|
||||
percentSteal: Float!
|
||||
}
|
||||
|
||||
type CpuPackages implements Node {
|
||||
id: PrefixedID!
|
||||
|
||||
"""Total CPU package power draw (W)"""
|
||||
totalPower: Float!
|
||||
|
||||
"""Power draw per package (W)"""
|
||||
power: [Float!]!
|
||||
|
||||
"""Temperature per package (°C)"""
|
||||
temp: [Float!]!
|
||||
}
|
||||
|
||||
type CpuUtilization implements Node {
|
||||
id: PrefixedID!
|
||||
|
||||
@@ -1454,6 +1467,12 @@ type InfoCpu implements Node {
|
||||
|
||||
"""CPU feature flags"""
|
||||
flags: [String!]
|
||||
|
||||
"""
|
||||
Per-package array of core/thread pairs, e.g. [[[0,1],[2,3]], [[4,5],[6,7]]]
|
||||
"""
|
||||
topology: [[[Int!]!]!]!
|
||||
packages: CpuPackages!
|
||||
}
|
||||
|
||||
type MemoryLayout implements Node {
|
||||
@@ -1654,8 +1673,8 @@ type PackageVersions {
|
||||
"""npm version"""
|
||||
npm: String
|
||||
|
||||
"""pm2 version"""
|
||||
pm2: String
|
||||
"""nodemon version"""
|
||||
nodemon: String
|
||||
|
||||
"""Git version"""
|
||||
git: String
|
||||
@@ -2642,6 +2661,7 @@ type Subscription {
|
||||
arraySubscription: UnraidArray!
|
||||
logFile(path: String!): LogFileContent!
|
||||
systemMetricsCpu: CpuUtilization!
|
||||
systemMetricsCpuTelemetry: CpuPackages!
|
||||
systemMetricsMemory: MemoryUtilization!
|
||||
upsUpdates: UPSDevice!
|
||||
}
|
||||
@@ -1257,7 +1257,7 @@ type Versions {
|
||||
openssl: String
|
||||
perl: String
|
||||
php: String
|
||||
pm2: String
|
||||
nodemon: String
|
||||
postfix: String
|
||||
postgresql: String
|
||||
python: String
|
||||
|
||||
17
api/nodemon.json
Normal file
17
api/nodemon.json
Normal file
@@ -0,0 +1,17 @@
|
||||
{
|
||||
"watch": [
|
||||
"dist/main.js"
|
||||
],
|
||||
"ignore": [
|
||||
"node_modules",
|
||||
"src",
|
||||
".env.*"
|
||||
],
|
||||
"exec": "node $UNRAID_API_SERVER_ENTRYPOINT",
|
||||
"signal": "SIGTERM",
|
||||
"ext": "js,json",
|
||||
"restartable": "rs",
|
||||
"env": {
|
||||
"NODE_ENV": "production"
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@unraid/api",
|
||||
"version": "4.24.0",
|
||||
"version": "4.27.2",
|
||||
"main": "src/cli/index.ts",
|
||||
"type": "module",
|
||||
"corepack": {
|
||||
@@ -30,6 +30,8 @@
|
||||
"// GraphQL Codegen": "",
|
||||
"codegen": "graphql-codegen --config codegen.ts",
|
||||
"codegen:watch": "graphql-codegen --config codegen.ts --watch",
|
||||
"// Internationalization": "",
|
||||
"i18n:extract": "node ./scripts/extract-translations.mjs",
|
||||
"// Code Quality": "",
|
||||
"lint": "eslint --config .eslintrc.ts src/",
|
||||
"lint:fix": "eslint --fix --config .eslintrc.ts src/",
|
||||
@@ -114,6 +116,7 @@
|
||||
"graphql-subscriptions": "3.0.0",
|
||||
"graphql-tag": "2.12.6",
|
||||
"graphql-ws": "6.0.6",
|
||||
"html-entities": "^2.6.0",
|
||||
"ini": "5.0.0",
|
||||
"ip": "2.0.1",
|
||||
"jose": "6.0.13",
|
||||
@@ -126,6 +129,7 @@
|
||||
"nestjs-pino": "4.4.0",
|
||||
"node-cache": "5.1.2",
|
||||
"node-window-polyfill": "1.0.4",
|
||||
"nodemon": "3.1.10",
|
||||
"openid-client": "6.6.4",
|
||||
"p-retry": "7.0.0",
|
||||
"passport-custom": "1.1.1",
|
||||
@@ -134,7 +138,7 @@
|
||||
"pino": "9.9.0",
|
||||
"pino-http": "10.5.0",
|
||||
"pino-pretty": "13.1.1",
|
||||
"pm2": "6.0.8",
|
||||
"proper-lockfile": "^4.1.2",
|
||||
"reflect-metadata": "^0.1.14",
|
||||
"rxjs": "7.8.2",
|
||||
"semver": "7.7.2",
|
||||
@@ -185,6 +189,7 @@
|
||||
"@types/mustache": "4.2.6",
|
||||
"@types/node": "22.18.0",
|
||||
"@types/pify": "6.1.0",
|
||||
"@types/proper-lockfile": "^4.1.4",
|
||||
"@types/semver": "7.7.0",
|
||||
"@types/sendmail": "1.4.7",
|
||||
"@types/stoppable": "1.1.3",
|
||||
@@ -200,7 +205,6 @@
|
||||
"eslint-plugin-no-relative-import-paths": "1.6.1",
|
||||
"eslint-plugin-prettier": "5.5.4",
|
||||
"jiti": "2.5.1",
|
||||
"nodemon": "3.1.10",
|
||||
"prettier": "3.6.2",
|
||||
"rollup-plugin-node-externals": "8.1.0",
|
||||
"supertest": "7.1.4",
|
||||
|
||||
@@ -7,7 +7,7 @@ import { exit } from 'process';
|
||||
import type { PackageJson } from 'type-fest';
|
||||
import { $, cd } from 'zx';
|
||||
|
||||
import { getDeploymentVersion } from './get-deployment-version.js';
|
||||
import { getDeploymentVersion } from '@app/../scripts/get-deployment-version.js';
|
||||
|
||||
type ApiPackageJson = PackageJson & {
|
||||
version: string;
|
||||
@@ -94,7 +94,7 @@ try {
|
||||
|
||||
await writeFile('./deploy/pack/package.json', JSON.stringify(parsedPackageJson, null, 4));
|
||||
// Copy necessary files to the pack directory
|
||||
await $`cp -r dist README.md .env.* ecosystem.config.json ./deploy/pack/`;
|
||||
await $`cp -r dist README.md .env.* nodemon.json ./deploy/pack/`;
|
||||
|
||||
// Change to the pack directory and install dependencies
|
||||
cd('./deploy/pack');
|
||||
|
||||
162
api/scripts/extract-translations.mjs
Normal file
162
api/scripts/extract-translations.mjs
Normal file
@@ -0,0 +1,162 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
import { readFile, writeFile } from 'node:fs/promises';
|
||||
import path from 'node:path';
|
||||
import { glob } from 'glob';
|
||||
import ts from 'typescript';
|
||||
|
||||
const projectRoot = process.cwd();
|
||||
const sourcePatterns = 'src/**/*.{ts,js}';
|
||||
const ignorePatterns = [
|
||||
'**/__tests__/**',
|
||||
'**/__test__/**',
|
||||
'**/*.spec.ts',
|
||||
'**/*.spec.js',
|
||||
'**/*.test.ts',
|
||||
'**/*.test.js',
|
||||
];
|
||||
|
||||
const englishLocaleFile = path.resolve(projectRoot, 'src/i18n/en.json');
|
||||
|
||||
const identifierTargets = new Set(['t', 'translate']);
|
||||
const propertyTargets = new Set([
|
||||
'i18n.t',
|
||||
'i18n.translate',
|
||||
'ctx.t',
|
||||
'this.translate',
|
||||
'this.i18n.translate',
|
||||
'this.i18n.t',
|
||||
]);
|
||||
|
||||
function getPropertyChain(node) {
|
||||
if (ts.isIdentifier(node)) {
|
||||
return node.text;
|
||||
}
|
||||
if (ts.isPropertyAccessExpression(node)) {
|
||||
const left = getPropertyChain(node.expression);
|
||||
if (!left) return undefined;
|
||||
return `${left}.${node.name.text}`;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function extractLiteral(node) {
|
||||
if (ts.isStringLiteralLike(node)) {
|
||||
return node.text;
|
||||
}
|
||||
if (ts.isNoSubstitutionTemplateLiteral(node)) {
|
||||
return node.text;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function collectKeysFromSource(sourceFile) {
|
||||
const keys = new Set();
|
||||
|
||||
function visit(node) {
|
||||
if (ts.isCallExpression(node)) {
|
||||
const expr = node.expression;
|
||||
let matches = false;
|
||||
|
||||
if (ts.isIdentifier(expr) && identifierTargets.has(expr.text)) {
|
||||
matches = true;
|
||||
} else if (ts.isPropertyAccessExpression(expr)) {
|
||||
const chain = getPropertyChain(expr);
|
||||
if (chain && propertyTargets.has(chain)) {
|
||||
matches = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (matches) {
|
||||
const [firstArg] = node.arguments;
|
||||
if (firstArg) {
|
||||
const literal = extractLiteral(firstArg);
|
||||
if (literal) {
|
||||
keys.add(literal);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ts.forEachChild(node, visit);
|
||||
}
|
||||
|
||||
visit(sourceFile);
|
||||
return keys;
|
||||
}
|
||||
|
||||
async function loadEnglishCatalog() {
|
||||
try {
|
||||
const raw = await readFile(englishLocaleFile, 'utf8');
|
||||
const parsed = raw.trim() ? JSON.parse(raw) : {};
|
||||
if (typeof parsed !== 'object' || Array.isArray(parsed)) {
|
||||
throw new Error('English locale file must contain a JSON object.');
|
||||
}
|
||||
return parsed;
|
||||
} catch (error) {
|
||||
if (error && error.code === 'ENOENT') {
|
||||
return {};
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async function ensureEnglishCatalog(keys) {
|
||||
const existingCatalog = await loadEnglishCatalog();
|
||||
const existingKeys = new Set(Object.keys(existingCatalog));
|
||||
|
||||
let added = 0;
|
||||
const combinedKeys = new Set([...existingKeys, ...keys]);
|
||||
const sortedKeys = Array.from(combinedKeys).sort((a, b) => a.localeCompare(b));
|
||||
const nextCatalog = {};
|
||||
|
||||
for (const key of sortedKeys) {
|
||||
if (Object.prototype.hasOwnProperty.call(existingCatalog, key)) {
|
||||
nextCatalog[key] = existingCatalog[key];
|
||||
} else {
|
||||
nextCatalog[key] = key;
|
||||
added += 1;
|
||||
}
|
||||
}
|
||||
|
||||
const nextJson = `${JSON.stringify(nextCatalog, null, 2)}\n`;
|
||||
const existingJson = JSON.stringify(existingCatalog, null, 2) + '\n';
|
||||
|
||||
if (nextJson !== existingJson) {
|
||||
await writeFile(englishLocaleFile, nextJson, 'utf8');
|
||||
}
|
||||
|
||||
return added;
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const files = await glob(sourcePatterns, {
|
||||
cwd: projectRoot,
|
||||
ignore: ignorePatterns,
|
||||
absolute: true,
|
||||
});
|
||||
|
||||
const collectedKeys = new Set();
|
||||
|
||||
await Promise.all(
|
||||
files.map(async (file) => {
|
||||
const content = await readFile(file, 'utf8');
|
||||
const sourceFile = ts.createSourceFile(file, content, ts.ScriptTarget.Latest, true);
|
||||
const keys = collectKeysFromSource(sourceFile);
|
||||
keys.forEach((key) => collectedKeys.add(key));
|
||||
}),
|
||||
);
|
||||
|
||||
const added = await ensureEnglishCatalog(collectedKeys);
|
||||
|
||||
if (added === 0) {
|
||||
console.log('[i18n] No new backend translation keys detected.');
|
||||
} else {
|
||||
console.log(`[i18n] Added ${added} key(s) to src/i18n/en.json.`);
|
||||
}
|
||||
}
|
||||
|
||||
main().catch((error) => {
|
||||
console.error('[i18n] Failed to extract backend translations.', error);
|
||||
process.exitCode = 1;
|
||||
});
|
||||
@@ -4,23 +4,18 @@ import {
|
||||
getBannerPathIfPresent,
|
||||
getCasePathIfPresent,
|
||||
} from '@app/core/utils/images/image-file-helpers.js';
|
||||
import { loadDynamixConfigFile } from '@app/store/actions/load-dynamix-config-file.js';
|
||||
import { store } from '@app/store/index.js';
|
||||
import { loadDynamixConfig } from '@app/store/index.js';
|
||||
|
||||
test('get case path returns expected result', async () => {
|
||||
await expect(getCasePathIfPresent()).resolves.toContain('/dev/dynamix/case-model.png');
|
||||
});
|
||||
|
||||
test('get banner path returns null (state unloaded)', async () => {
|
||||
await expect(getBannerPathIfPresent()).resolves.toMatchInlineSnapshot('null');
|
||||
});
|
||||
|
||||
test('get banner path returns the banner (state loaded)', async () => {
|
||||
await store.dispatch(loadDynamixConfigFile()).unwrap();
|
||||
loadDynamixConfig();
|
||||
await expect(getBannerPathIfPresent()).resolves.toContain('/dev/dynamix/banner.png');
|
||||
});
|
||||
|
||||
test('get banner path returns null when no banner (state loaded)', async () => {
|
||||
await store.dispatch(loadDynamixConfigFile()).unwrap();
|
||||
loadDynamixConfig();
|
||||
await expect(getBannerPathIfPresent('notabanner.png')).resolves.toMatchInlineSnapshot('null');
|
||||
});
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
/* eslint-disable no-undef */
|
||||
// Dummy process for PM2 testing
|
||||
setInterval(() => {
|
||||
// Keep process alive
|
||||
}, 1000);
|
||||
@@ -1,222 +0,0 @@
|
||||
import { existsSync } from 'node:fs';
|
||||
import { homedir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
|
||||
import { execa } from 'execa';
|
||||
import pm2 from 'pm2';
|
||||
import { afterAll, afterEach, beforeAll, describe, expect, it } from 'vitest';
|
||||
|
||||
import { isUnraidApiRunning } from '@app/core/utils/pm2/unraid-api-running.js';
|
||||
|
||||
const __dirname = fileURLToPath(new URL('.', import.meta.url));
|
||||
const PROJECT_ROOT = join(__dirname, '../../../../..');
|
||||
const DUMMY_PROCESS_PATH = join(__dirname, 'dummy-process.js');
|
||||
const CLI_PATH = join(PROJECT_ROOT, 'dist/cli.js');
|
||||
const TEST_PROCESS_NAME = 'test-unraid-api';
|
||||
|
||||
// Shared PM2 connection state
|
||||
let pm2Connected = false;
|
||||
|
||||
// Helper to ensure PM2 connection is established
|
||||
async function ensurePM2Connection() {
|
||||
if (pm2Connected) return;
|
||||
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
pm2.connect((err) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
pm2Connected = true;
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Helper to delete specific test processes (lightweight, reuses connection)
|
||||
async function deleteTestProcesses() {
|
||||
if (!pm2Connected) {
|
||||
// No connection, nothing to clean up
|
||||
return;
|
||||
}
|
||||
|
||||
const deletePromise = new Promise<void>((resolve) => {
|
||||
// Delete specific processes we might have created
|
||||
const processNames = ['unraid-api', TEST_PROCESS_NAME];
|
||||
let deletedCount = 0;
|
||||
|
||||
const deleteNext = () => {
|
||||
if (deletedCount >= processNames.length) {
|
||||
resolve();
|
||||
return;
|
||||
}
|
||||
|
||||
const processName = processNames[deletedCount];
|
||||
pm2.delete(processName, () => {
|
||||
// Ignore errors, process might not exist
|
||||
deletedCount++;
|
||||
deleteNext();
|
||||
});
|
||||
};
|
||||
|
||||
deleteNext();
|
||||
});
|
||||
|
||||
const timeoutPromise = new Promise<void>((resolve) => {
|
||||
setTimeout(() => resolve(), 3000); // 3 second timeout
|
||||
});
|
||||
|
||||
return Promise.race([deletePromise, timeoutPromise]);
|
||||
}
|
||||
|
||||
// Helper to ensure PM2 is completely clean (heavy cleanup with daemon kill)
|
||||
async function cleanupAllPM2Processes() {
|
||||
// First delete test processes if we have a connection
|
||||
if (pm2Connected) {
|
||||
await deleteTestProcesses();
|
||||
}
|
||||
|
||||
return new Promise<void>((resolve) => {
|
||||
// Always connect fresh for daemon kill (in case we weren't connected)
|
||||
pm2.connect((err) => {
|
||||
if (err) {
|
||||
// If we can't connect, assume PM2 is not running
|
||||
pm2Connected = false;
|
||||
resolve();
|
||||
return;
|
||||
}
|
||||
|
||||
// Kill the daemon to ensure fresh state
|
||||
pm2.killDaemon(() => {
|
||||
pm2.disconnect();
|
||||
pm2Connected = false;
|
||||
// Small delay to let PM2 fully shutdown
|
||||
setTimeout(resolve, 500);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
describe.skipIf(!!process.env.CI)('PM2 integration tests', () => {
|
||||
beforeAll(async () => {
|
||||
// Set PM2_HOME to use home directory for testing (not /var/log)
|
||||
process.env.PM2_HOME = join(homedir(), '.pm2');
|
||||
|
||||
// Build the CLI if it doesn't exist (only for CLI tests)
|
||||
if (!existsSync(CLI_PATH)) {
|
||||
console.log('Building CLI for integration tests...');
|
||||
try {
|
||||
await execa('pnpm', ['build'], {
|
||||
cwd: PROJECT_ROOT,
|
||||
stdio: 'inherit',
|
||||
timeout: 120000, // 2 minute timeout for build
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Failed to build CLI:', error);
|
||||
throw new Error(
|
||||
'Cannot run CLI integration tests without built CLI. Run `pnpm build` first.'
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Only do a full cleanup once at the beginning
|
||||
await cleanupAllPM2Processes();
|
||||
}, 150000); // 2.5 minute timeout for setup
|
||||
|
||||
afterAll(async () => {
|
||||
// Only do a full cleanup once at the end
|
||||
await cleanupAllPM2Processes();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
// Lightweight cleanup after each test - just delete our test processes
|
||||
await deleteTestProcesses();
|
||||
}, 5000); // 5 second timeout for cleanup
|
||||
|
||||
describe('isUnraidApiRunning function', () => {
|
||||
it('should return false when PM2 is not running the unraid-api process', async () => {
|
||||
const result = await isUnraidApiRunning();
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('should return true when PM2 has unraid-api process running', async () => {
|
||||
// Ensure PM2 connection
|
||||
await ensurePM2Connection();
|
||||
|
||||
// Start a dummy process with the name 'unraid-api'
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
pm2.start(
|
||||
{
|
||||
script: DUMMY_PROCESS_PATH,
|
||||
name: 'unraid-api',
|
||||
},
|
||||
(startErr) => {
|
||||
if (startErr) return reject(startErr);
|
||||
resolve();
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
// Give PM2 time to start the process
|
||||
await new Promise((resolve) => setTimeout(resolve, 2000));
|
||||
|
||||
const result = await isUnraidApiRunning();
|
||||
expect(result).toBe(true);
|
||||
}, 30000);
|
||||
|
||||
it('should return false when unraid-api process is stopped', async () => {
|
||||
// Ensure PM2 connection
|
||||
await ensurePM2Connection();
|
||||
|
||||
// Start and then stop the process
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
pm2.start(
|
||||
{
|
||||
script: DUMMY_PROCESS_PATH,
|
||||
name: 'unraid-api',
|
||||
},
|
||||
(startErr) => {
|
||||
if (startErr) return reject(startErr);
|
||||
|
||||
// Stop the process after starting
|
||||
setTimeout(() => {
|
||||
pm2.stop('unraid-api', (stopErr) => {
|
||||
if (stopErr) return reject(stopErr);
|
||||
resolve();
|
||||
});
|
||||
}, 1000);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 1000));
|
||||
|
||||
const result = await isUnraidApiRunning();
|
||||
expect(result).toBe(false);
|
||||
}, 30000);
|
||||
|
||||
it('should handle PM2 connection errors gracefully', async () => {
|
||||
// Disconnect PM2 first to ensure we're testing fresh connection
|
||||
await new Promise<void>((resolve) => {
|
||||
pm2.disconnect();
|
||||
pm2Connected = false;
|
||||
setTimeout(resolve, 100);
|
||||
});
|
||||
|
||||
// Set an invalid PM2_HOME to force connection failure
|
||||
const originalPM2Home = process.env.PM2_HOME;
|
||||
process.env.PM2_HOME = '/invalid/path/that/does/not/exist';
|
||||
|
||||
const result = await isUnraidApiRunning();
|
||||
expect(result).toBe(false);
|
||||
|
||||
// Restore original PM2_HOME
|
||||
if (originalPM2Home) {
|
||||
process.env.PM2_HOME = originalPM2Home;
|
||||
} else {
|
||||
delete process.env.PM2_HOME;
|
||||
}
|
||||
}, 15000); // 15 second timeout to allow for the Promise.race timeout
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,54 @@
|
||||
import { mkdtempSync, rmSync, writeFileSync } from 'node:fs';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
|
||||
import { afterAll, afterEach, beforeAll, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
describe('isUnraidApiRunning (nodemon pid detection)', () => {
|
||||
let tempDir: string;
|
||||
let pidPath: string;
|
||||
|
||||
beforeAll(() => {
|
||||
tempDir = mkdtempSync(join(tmpdir(), 'unraid-api-'));
|
||||
pidPath = join(tempDir, 'nodemon.pid');
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
rmSync(tempDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.resetModules();
|
||||
});
|
||||
|
||||
async function loadIsRunning() {
|
||||
vi.doMock('@app/environment.js', async () => {
|
||||
const actual =
|
||||
await vi.importActual<typeof import('@app/environment.js')>('@app/environment.js');
|
||||
return { ...actual, NODEMON_PID_PATH: pidPath };
|
||||
});
|
||||
|
||||
const module = await import('@app/core/utils/process/unraid-api-running.js');
|
||||
return module.isUnraidApiRunning;
|
||||
}
|
||||
|
||||
it('returns false when pid file is missing', async () => {
|
||||
const isUnraidApiRunning = await loadIsRunning();
|
||||
|
||||
expect(await isUnraidApiRunning()).toBe(false);
|
||||
});
|
||||
|
||||
it('returns true when a live pid is recorded', async () => {
|
||||
writeFileSync(pidPath, `${process.pid}`);
|
||||
const isUnraidApiRunning = await loadIsRunning();
|
||||
|
||||
expect(await isUnraidApiRunning()).toBe(true);
|
||||
});
|
||||
|
||||
it('returns false when pid file is invalid', async () => {
|
||||
writeFileSync(pidPath, 'not-a-number');
|
||||
const isUnraidApiRunning = await loadIsRunning();
|
||||
|
||||
expect(await isUnraidApiRunning()).toBe(false);
|
||||
});
|
||||
});
|
||||
29
api/src/__test__/environment.nodemon-paths.test.ts
Normal file
29
api/src/__test__/environment.nodemon-paths.test.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import { join } from 'node:path';
|
||||
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
describe('nodemon path configuration', () => {
|
||||
const originalUnraidApiCwd = process.env.UNRAID_API_CWD;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetModules();
|
||||
delete process.env.UNRAID_API_CWD;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
if (originalUnraidApiCwd === undefined) {
|
||||
delete process.env.UNRAID_API_CWD;
|
||||
} else {
|
||||
process.env.UNRAID_API_CWD = originalUnraidApiCwd;
|
||||
}
|
||||
});
|
||||
|
||||
it('anchors nodemon paths to the package root by default', async () => {
|
||||
const environment = await import('@app/environment.js');
|
||||
const { UNRAID_API_ROOT, NODEMON_CONFIG_PATH, NODEMON_PATH, UNRAID_API_CWD } = environment;
|
||||
|
||||
expect(UNRAID_API_CWD).toBe(UNRAID_API_ROOT);
|
||||
expect(NODEMON_CONFIG_PATH).toBe(join(UNRAID_API_ROOT, 'nodemon.json'));
|
||||
expect(NODEMON_PATH).toBe(join(UNRAID_API_ROOT, 'node_modules', 'nodemon', 'bin', 'nodemon.js'));
|
||||
});
|
||||
});
|
||||
@@ -51,6 +51,8 @@ vi.mock('@app/store/index.js', () => ({
|
||||
}));
|
||||
vi.mock('@app/environment.js', () => ({
|
||||
ENVIRONMENT: 'development',
|
||||
SUPPRESS_LOGS: false,
|
||||
LOG_LEVEL: 'INFO',
|
||||
environment: {
|
||||
IS_MAIN_PROCESS: true,
|
||||
},
|
||||
|
||||
@@ -1,12 +1,25 @@
|
||||
import '@app/dotenv.js';
|
||||
|
||||
import { Logger } from '@nestjs/common';
|
||||
import { appendFileSync } from 'node:fs';
|
||||
|
||||
import { CommandFactory } from 'nest-commander';
|
||||
|
||||
import { LOG_LEVEL, SUPPRESS_LOGS } from '@app/environment.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
|
||||
const BOOT_LOG_PATH = '/var/log/unraid-api/boot.log';
|
||||
|
||||
const logToBootFile = (message: string): void => {
|
||||
const timestamp = new Date().toISOString();
|
||||
const line = `[${timestamp}] [cli] ${message}\n`;
|
||||
try {
|
||||
appendFileSync(BOOT_LOG_PATH, line);
|
||||
} catch {
|
||||
// Silently fail if we can't write to boot log
|
||||
}
|
||||
};
|
||||
|
||||
const getUnraidApiLocation = async () => {
|
||||
const { execa } = await import('execa');
|
||||
try {
|
||||
@@ -26,6 +39,8 @@ const getLogger = () => {
|
||||
|
||||
const logger = getLogger();
|
||||
try {
|
||||
logToBootFile(`CLI started with args: ${process.argv.slice(2).join(' ')}`);
|
||||
|
||||
await import('json-bigint-patch');
|
||||
const { CliModule } = await import('@app/unraid-api/cli/cli.module.js');
|
||||
|
||||
@@ -38,10 +53,17 @@ try {
|
||||
nativeShell: { executablePath: await getUnraidApiLocation() },
|
||||
},
|
||||
});
|
||||
logToBootFile('CLI completed successfully');
|
||||
process.exit(0);
|
||||
} catch (error) {
|
||||
// Always log errors to boot file for boot-time debugging
|
||||
const errorMessage = error instanceof Error ? error.stack || error.message : String(error);
|
||||
logToBootFile(`CLI ERROR: ${errorMessage}`);
|
||||
|
||||
if (logger) {
|
||||
logger.error('ERROR:', error);
|
||||
} else {
|
||||
console.error('ERROR:', error);
|
||||
}
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
12
api/src/connect-plugin-cleanup.ts
Normal file
12
api/src/connect-plugin-cleanup.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
import { existsSync } from 'node:fs';
|
||||
|
||||
/**
|
||||
* Local filesystem and env checks stay synchronous so we can branch at module load.
|
||||
* @returns True if the Connect Unraid plugin is installed, false otherwise.
|
||||
*/
|
||||
export const isConnectPluginInstalled = () => {
|
||||
if (process.env.SKIP_CONNECT_PLUGIN_CHECK === 'true') {
|
||||
return true;
|
||||
}
|
||||
return existsSync('/boot/config/plugins/dynamix.unraid.net.plg');
|
||||
};
|
||||
@@ -1,7 +1,7 @@
|
||||
import pino from 'pino';
|
||||
import pretty from 'pino-pretty';
|
||||
|
||||
import { API_VERSION, LOG_LEVEL, LOG_TYPE, SUPPRESS_LOGS } from '@app/environment.js';
|
||||
import { API_VERSION, LOG_LEVEL, LOG_TYPE, PATHS_LOGS_FILE, SUPPRESS_LOGS } from '@app/environment.js';
|
||||
|
||||
export const levels = ['trace', 'debug', 'info', 'warn', 'error', 'fatal'] as const;
|
||||
|
||||
@@ -16,8 +16,10 @@ const nullDestination = pino.destination({
|
||||
});
|
||||
|
||||
export const logDestination =
|
||||
process.env.SUPPRESS_LOGS === 'true' ? nullDestination : pino.destination();
|
||||
// Since PM2 captures stdout and writes to the log file, we should not colorize stdout
|
||||
process.env.SUPPRESS_LOGS === 'true'
|
||||
? nullDestination
|
||||
: pino.destination({ dest: PATHS_LOGS_FILE, mkdir: true });
|
||||
// Since process output is piped directly to the log file, we should not colorize stdout
|
||||
// to avoid ANSI escape codes in the log file
|
||||
const stream = SUPPRESS_LOGS
|
||||
? nullDestination
|
||||
@@ -25,7 +27,7 @@ const stream = SUPPRESS_LOGS
|
||||
? pretty({
|
||||
singleLine: true,
|
||||
hideObject: false,
|
||||
colorize: false, // No colors since PM2 writes stdout to file
|
||||
colorize: false, // No colors since logs are written directly to file
|
||||
colorizeObjects: false,
|
||||
levelFirst: false,
|
||||
ignore: 'hostname,pid',
|
||||
|
||||
@@ -7,8 +7,6 @@ import { PubSub } from 'graphql-subscriptions';
|
||||
const eventEmitter = new EventEmitter();
|
||||
eventEmitter.setMaxListeners(30);
|
||||
|
||||
export { GRAPHQL_PUBSUB_CHANNEL as PUBSUB_CHANNEL };
|
||||
|
||||
export const pubsub = new PubSub({ eventEmitter });
|
||||
|
||||
/**
|
||||
|
||||
66
api/src/core/utils/__test__/safe-mode.test.ts
Normal file
66
api/src/core/utils/__test__/safe-mode.test.ts
Normal file
@@ -0,0 +1,66 @@
|
||||
import { afterEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { isSafeModeEnabled } from '@app/core/utils/safe-mode.js';
|
||||
import { store } from '@app/store/index.js';
|
||||
import * as stateFileLoader from '@app/store/services/state-file-loader.js';
|
||||
|
||||
describe('isSafeModeEnabled', () => {
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
it('returns the safe mode flag already present in the store', () => {
|
||||
const baseState = store.getState();
|
||||
vi.spyOn(store, 'getState').mockReturnValue({
|
||||
...baseState,
|
||||
emhttp: {
|
||||
...baseState.emhttp,
|
||||
var: {
|
||||
...(baseState.emhttp?.var ?? {}),
|
||||
safeMode: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
const loaderSpy = vi.spyOn(stateFileLoader, 'loadStateFileSync');
|
||||
|
||||
expect(isSafeModeEnabled()).toBe(true);
|
||||
expect(loaderSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('falls back to the synchronous loader when store state is missing', () => {
|
||||
const baseState = store.getState();
|
||||
vi.spyOn(store, 'getState').mockReturnValue({
|
||||
...baseState,
|
||||
emhttp: {
|
||||
...baseState.emhttp,
|
||||
var: {
|
||||
...(baseState.emhttp?.var ?? {}),
|
||||
safeMode: undefined as unknown as boolean,
|
||||
} as typeof baseState.emhttp.var,
|
||||
} as typeof baseState.emhttp,
|
||||
} as typeof baseState);
|
||||
vi.spyOn(stateFileLoader, 'loadStateFileSync').mockReturnValue({
|
||||
...(baseState.emhttp?.var ?? {}),
|
||||
safeMode: true,
|
||||
} as any);
|
||||
|
||||
expect(isSafeModeEnabled()).toBe(true);
|
||||
});
|
||||
|
||||
it('defaults to false when loader cannot provide state', () => {
|
||||
const baseState = store.getState();
|
||||
vi.spyOn(store, 'getState').mockReturnValue({
|
||||
...baseState,
|
||||
emhttp: {
|
||||
...baseState.emhttp,
|
||||
var: {
|
||||
...(baseState.emhttp?.var ?? {}),
|
||||
safeMode: undefined as unknown as boolean,
|
||||
} as typeof baseState.emhttp.var,
|
||||
} as typeof baseState.emhttp,
|
||||
} as typeof baseState);
|
||||
vi.spyOn(stateFileLoader, 'loadStateFileSync').mockReturnValue(null);
|
||||
|
||||
expect(isSafeModeEnabled()).toBe(false);
|
||||
});
|
||||
});
|
||||
@@ -1,40 +0,0 @@
|
||||
export const isUnraidApiRunning = async (): Promise<boolean | undefined> => {
|
||||
const { PM2_HOME } = await import('@app/environment.js');
|
||||
|
||||
// Set PM2_HOME if not already set
|
||||
if (!process.env.PM2_HOME) {
|
||||
process.env.PM2_HOME = PM2_HOME;
|
||||
}
|
||||
|
||||
const pm2Module = await import('pm2');
|
||||
const pm2 = pm2Module.default || pm2Module;
|
||||
|
||||
const pm2Promise = new Promise<boolean>((resolve) => {
|
||||
pm2.connect(function (err) {
|
||||
if (err) {
|
||||
// Don't reject here, resolve with false since we can't connect to PM2
|
||||
resolve(false);
|
||||
return;
|
||||
}
|
||||
|
||||
// Now try to describe unraid-api specifically
|
||||
pm2.describe('unraid-api', function (err, processDescription) {
|
||||
if (err || processDescription.length === 0) {
|
||||
// Service not found or error occurred
|
||||
resolve(false);
|
||||
} else {
|
||||
const isOnline = processDescription?.[0]?.pm2_env?.status === 'online';
|
||||
resolve(isOnline);
|
||||
}
|
||||
|
||||
pm2.disconnect();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
const timeoutPromise = new Promise<boolean>((resolve) => {
|
||||
setTimeout(() => resolve(false), 10000); // 10 second timeout
|
||||
});
|
||||
|
||||
return Promise.race([pm2Promise, timeoutPromise]);
|
||||
};
|
||||
23
api/src/core/utils/process/unraid-api-running.ts
Normal file
23
api/src/core/utils/process/unraid-api-running.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import { readFile } from 'node:fs/promises';
|
||||
|
||||
import { fileExists } from '@app/core/utils/files/file-exists.js';
|
||||
import { NODEMON_PID_PATH } from '@app/environment.js';
|
||||
|
||||
export const isUnraidApiRunning = async (): Promise<boolean> => {
|
||||
try {
|
||||
if (!(await fileExists(NODEMON_PID_PATH))) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const pidText = (await readFile(NODEMON_PID_PATH, 'utf-8')).trim();
|
||||
const pid = Number.parseInt(pidText, 10);
|
||||
if (Number.isNaN(pid)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
process.kill(pid, 0);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
17
api/src/core/utils/safe-mode.ts
Normal file
17
api/src/core/utils/safe-mode.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { store } from '@app/store/index.js';
|
||||
import { loadStateFileSync } from '@app/store/services/state-file-loader.js';
|
||||
import { StateFileKey } from '@app/store/types.js';
|
||||
|
||||
export const isSafeModeEnabled = (): boolean => {
|
||||
const safeModeFromStore = store.getState().emhttp?.var?.safeMode;
|
||||
if (typeof safeModeFromStore === 'boolean') {
|
||||
return safeModeFromStore;
|
||||
}
|
||||
|
||||
const varState = loadStateFileSync(StateFileKey.var);
|
||||
if (varState) {
|
||||
return Boolean(varState.safeMode);
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
@@ -2,7 +2,7 @@
|
||||
// Non-function exports from this module are loaded into the NestJS Config at runtime.
|
||||
|
||||
import { readFileSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import { dirname, join } from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
|
||||
import type { PackageJson, SetRequired } from 'type-fest';
|
||||
@@ -65,6 +65,7 @@ export const getPackageJsonDependencies = (): string[] | undefined => {
|
||||
};
|
||||
|
||||
export const API_VERSION = process.env.npm_package_version ?? getPackageJson().version;
|
||||
export const UNRAID_API_ROOT = dirname(getPackageJsonPath());
|
||||
|
||||
/** Controls how the app is built/run (i.e. in terms of optimization) */
|
||||
export const NODE_ENV =
|
||||
@@ -91,6 +92,7 @@ export const LOG_LEVEL = process.env.LOG_LEVEL
|
||||
: process.env.ENVIRONMENT === 'production'
|
||||
? 'INFO'
|
||||
: 'DEBUG';
|
||||
export const LOG_CASBIN = process.env.LOG_CASBIN === 'true';
|
||||
export const SUPPRESS_LOGS = process.env.SUPPRESS_LOGS === 'true';
|
||||
export const MOTHERSHIP_GRAPHQL_LINK = process.env.MOTHERSHIP_GRAPHQL_LINK
|
||||
? process.env.MOTHERSHIP_GRAPHQL_LINK
|
||||
@@ -98,12 +100,18 @@ export const MOTHERSHIP_GRAPHQL_LINK = process.env.MOTHERSHIP_GRAPHQL_LINK
|
||||
? 'https://staging.mothership.unraid.net/ws'
|
||||
: 'https://mothership.unraid.net/ws';
|
||||
|
||||
export const PM2_HOME = process.env.PM2_HOME ?? '/var/log/.pm2';
|
||||
export const PM2_PATH = join(import.meta.dirname, '../../', 'node_modules', 'pm2', 'bin', 'pm2');
|
||||
export const ECOSYSTEM_PATH = join(import.meta.dirname, '../../', 'ecosystem.config.json');
|
||||
export const PATHS_LOGS_DIR =
|
||||
process.env.PATHS_LOGS_DIR ?? process.env.LOGS_DIR ?? '/var/log/unraid-api';
|
||||
export const PATHS_LOGS_FILE = process.env.PATHS_LOGS_FILE ?? '/var/log/graphql-api.log';
|
||||
export const PATHS_NODEMON_LOG_FILE =
|
||||
process.env.PATHS_NODEMON_LOG_FILE ?? join(PATHS_LOGS_DIR, 'nodemon.log');
|
||||
|
||||
export const NODEMON_PATH = join(UNRAID_API_ROOT, 'node_modules', 'nodemon', 'bin', 'nodemon.js');
|
||||
export const NODEMON_CONFIG_PATH = join(UNRAID_API_ROOT, 'nodemon.json');
|
||||
export const NODEMON_PID_PATH = process.env.NODEMON_PID_PATH ?? '/var/run/unraid-api/nodemon.pid';
|
||||
export const NODEMON_LOCK_PATH = process.env.NODEMON_LOCK_PATH ?? '/var/run/unraid-api/nodemon.lock';
|
||||
export const UNRAID_API_CWD = process.env.UNRAID_API_CWD ?? UNRAID_API_ROOT;
|
||||
export const UNRAID_API_SERVER_ENTRYPOINT = join(UNRAID_API_CWD, 'dist', 'main.js');
|
||||
|
||||
export const PATHS_CONFIG_MODULES =
|
||||
process.env.PATHS_CONFIG_MODULES ?? '/boot/config/plugins/dynamix.my.servers/configs';
|
||||
|
||||
1
api/src/i18n/ar.json
Normal file
1
api/src/i18n/ar.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/bn.json
Normal file
1
api/src/i18n/bn.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/ca.json
Normal file
1
api/src/i18n/ca.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/cs.json
Normal file
1
api/src/i18n/cs.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/da.json
Normal file
1
api/src/i18n/da.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/de.json
Normal file
1
api/src/i18n/de.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/en.json
Normal file
1
api/src/i18n/en.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/es.json
Normal file
1
api/src/i18n/es.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/fr.json
Normal file
1
api/src/i18n/fr.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/hi.json
Normal file
1
api/src/i18n/hi.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/hr.json
Normal file
1
api/src/i18n/hr.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/hu.json
Normal file
1
api/src/i18n/hu.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/it.json
Normal file
1
api/src/i18n/it.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/ja.json
Normal file
1
api/src/i18n/ja.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/ko.json
Normal file
1
api/src/i18n/ko.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/lv.json
Normal file
1
api/src/i18n/lv.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/nl.json
Normal file
1
api/src/i18n/nl.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/no.json
Normal file
1
api/src/i18n/no.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/pl.json
Normal file
1
api/src/i18n/pl.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/pt.json
Normal file
1
api/src/i18n/pt.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/ro.json
Normal file
1
api/src/i18n/ro.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/ru.json
Normal file
1
api/src/i18n/ru.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/sv.json
Normal file
1
api/src/i18n/sv.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/uk.json
Normal file
1
api/src/i18n/uk.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
api/src/i18n/zh.json
Normal file
1
api/src/i18n/zh.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
@@ -4,7 +4,7 @@ import '@app/dotenv.js';
|
||||
|
||||
import { type NestFastifyApplication } from '@nestjs/platform-fastify';
|
||||
import { unlinkSync } from 'fs';
|
||||
import { mkdir } from 'fs/promises';
|
||||
import { mkdir, readFile } from 'fs/promises';
|
||||
import http from 'http';
|
||||
import https from 'https';
|
||||
|
||||
@@ -18,13 +18,11 @@ import { fileExistsSync } from '@app/core/utils/files/file-exists.js';
|
||||
import { getServerIdentifier } from '@app/core/utils/server-identifier.js';
|
||||
import { environment, PATHS_CONFIG_MODULES, PORT } from '@app/environment.js';
|
||||
import * as envVars from '@app/environment.js';
|
||||
import { loadDynamixConfigFile } from '@app/store/actions/load-dynamix-config-file.js';
|
||||
import { shutdownApiEvent } from '@app/store/actions/shutdown-api-event.js';
|
||||
import { store } from '@app/store/index.js';
|
||||
import { loadDynamixConfig, store } from '@app/store/index.js';
|
||||
import { startMiddlewareListeners } from '@app/store/listeners/listener-middleware.js';
|
||||
import { loadStateFiles } from '@app/store/modules/emhttp.js';
|
||||
import { loadRegistrationKey } from '@app/store/modules/registration.js';
|
||||
import { setupDynamixConfigWatch } from '@app/store/watch/dynamix-config-watch.js';
|
||||
import { setupRegistrationKeyWatch } from '@app/store/watch/registration-watch.js';
|
||||
import { StateManager } from '@app/store/watch/state-watch.js';
|
||||
|
||||
@@ -76,7 +74,7 @@ export const viteNodeApp = async () => {
|
||||
await store.dispatch(loadRegistrationKey());
|
||||
|
||||
// Load my dynamix config file into store
|
||||
await store.dispatch(loadDynamixConfigFile());
|
||||
loadDynamixConfig();
|
||||
|
||||
// Start listening to file updates
|
||||
StateManager.getInstance();
|
||||
@@ -84,9 +82,6 @@ export const viteNodeApp = async () => {
|
||||
// Start listening to key file changes
|
||||
setupRegistrationKeyWatch();
|
||||
|
||||
// Start listening to dynamix config file changes
|
||||
setupDynamixConfigWatch();
|
||||
|
||||
// If port is unix socket, delete old socket before starting http server
|
||||
unlinkUnixPort();
|
||||
|
||||
|
||||
@@ -1,12 +1,9 @@
|
||||
import { F_OK } from 'constants';
|
||||
import { access } from 'fs/promises';
|
||||
|
||||
import { createAsyncThunk } from '@reduxjs/toolkit';
|
||||
import { createTtlMemoizedLoader } from '@unraid/shared';
|
||||
|
||||
import type { RecursivePartial } from '@app/types/index.js';
|
||||
import { type DynamixConfig } from '@app/core/types/ini.js';
|
||||
import { fileExistsSync } from '@app/core/utils/files/file-exists.js';
|
||||
import { parseConfig } from '@app/core/utils/misc/parse-config.js';
|
||||
import { type RecursiveNullable, type RecursivePartial } from '@app/types/index.js';
|
||||
import { batchProcess } from '@app/utils.js';
|
||||
|
||||
/**
|
||||
* Loads a configuration file from disk, parses it to a RecursivePartial of the provided type, and returns it.
|
||||
@@ -16,11 +13,8 @@ import { batchProcess } from '@app/utils.js';
|
||||
* @param path The path to the configuration file on disk.
|
||||
* @returns A parsed RecursivePartial of the provided type.
|
||||
*/
|
||||
async function loadConfigFile<ConfigType>(path: string): Promise<RecursivePartial<ConfigType>> {
|
||||
const fileIsAccessible = await access(path, F_OK)
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
return fileIsAccessible
|
||||
function loadConfigFileSync<ConfigType>(path: string): RecursivePartial<ConfigType> {
|
||||
return fileExistsSync(path)
|
||||
? parseConfig<RecursivePartial<ConfigType>>({
|
||||
filePath: path,
|
||||
type: 'ini',
|
||||
@@ -28,21 +22,40 @@ async function loadConfigFile<ConfigType>(path: string): Promise<RecursivePartia
|
||||
: {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Load the dynamix.cfg into the store.
|
||||
*
|
||||
* Note: If the file doesn't exist this will fallback to default values.
|
||||
*/
|
||||
export const loadDynamixConfigFile = createAsyncThunk<
|
||||
RecursiveNullable<RecursivePartial<DynamixConfig>>,
|
||||
string | undefined
|
||||
>('config/load-dynamix-config-file', async (filePath) => {
|
||||
if (filePath) {
|
||||
return loadConfigFile<DynamixConfig>(filePath);
|
||||
}
|
||||
const store = await import('@app/store/index.js');
|
||||
const paths = store.getters.paths()['dynamix-config'];
|
||||
const { data: configs } = await batchProcess(paths, (path) => loadConfigFile<DynamixConfig>(path));
|
||||
const [defaultConfig = {}, customConfig = {}] = configs;
|
||||
return { ...defaultConfig, ...customConfig };
|
||||
type ConfigPaths = readonly (string | undefined | null)[];
|
||||
const CACHE_WINDOW_MS = 250;
|
||||
|
||||
const memoizedConfigLoader = createTtlMemoizedLoader<
|
||||
RecursivePartial<DynamixConfig>,
|
||||
ConfigPaths,
|
||||
string
|
||||
>({
|
||||
ttlMs: CACHE_WINDOW_MS,
|
||||
getCacheKey: (configPaths: ConfigPaths): string => JSON.stringify(configPaths),
|
||||
load: (configPaths: ConfigPaths) => {
|
||||
const validPaths = configPaths.filter((path): path is string => Boolean(path));
|
||||
if (validPaths.length === 0) {
|
||||
return {};
|
||||
}
|
||||
const configFiles = validPaths.map((path) => loadConfigFileSync<DynamixConfig>(path));
|
||||
return configFiles.reduce<RecursivePartial<DynamixConfig>>(
|
||||
(accumulator, configFile) => ({
|
||||
...accumulator,
|
||||
...configFile,
|
||||
}),
|
||||
{}
|
||||
);
|
||||
},
|
||||
});
|
||||
|
||||
/**
|
||||
* Loads dynamix config from disk with TTL caching.
|
||||
*
|
||||
* @param configPaths - Array of config file paths to load and merge
|
||||
* @returns Merged config object from all valid paths
|
||||
*/
|
||||
export const loadDynamixConfigFromDiskSync = (
|
||||
configPaths: readonly (string | undefined | null)[]
|
||||
): RecursivePartial<DynamixConfig> => {
|
||||
return memoizedConfigLoader.get(configPaths);
|
||||
};
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
import { configureStore } from '@reduxjs/toolkit';
|
||||
|
||||
import { logger } from '@app/core/log.js';
|
||||
import { loadDynamixConfigFromDiskSync } from '@app/store/actions/load-dynamix-config-file.js';
|
||||
import { listenerMiddleware } from '@app/store/listeners/listener-middleware.js';
|
||||
import { updateDynamixConfig } from '@app/store/modules/dynamix.js';
|
||||
import { rootReducer } from '@app/store/root-reducer.js';
|
||||
import { FileLoadStatus } from '@app/store/types.js';
|
||||
|
||||
export const store = configureStore({
|
||||
reducer: rootReducer,
|
||||
@@ -15,8 +19,36 @@ export type RootState = ReturnType<typeof store.getState>;
|
||||
export type AppDispatch = typeof store.dispatch;
|
||||
export type ApiStore = typeof store;
|
||||
|
||||
// loadDynamixConfig is located here and not in the actions/load-dynamix-config-file.js file because it needs to access the store,
|
||||
// and injecting it seemed circular and convoluted for this use case.
|
||||
/**
|
||||
* Loads the dynamix config into the store.
|
||||
* Can be called multiple times - uses TTL caching internally.
|
||||
* @returns The loaded dynamix config.
|
||||
*/
|
||||
export const loadDynamixConfig = () => {
|
||||
const configPaths = store.getState().paths['dynamix-config'] ?? [];
|
||||
try {
|
||||
const config = loadDynamixConfigFromDiskSync(configPaths);
|
||||
store.dispatch(
|
||||
updateDynamixConfig({
|
||||
...config,
|
||||
status: FileLoadStatus.LOADED,
|
||||
})
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error(error, 'Failed to load dynamix config from disk');
|
||||
store.dispatch(
|
||||
updateDynamixConfig({
|
||||
status: FileLoadStatus.FAILED_LOADING,
|
||||
})
|
||||
);
|
||||
}
|
||||
return store.getState().dynamix;
|
||||
};
|
||||
|
||||
export const getters = {
|
||||
dynamix: () => store.getState().dynamix,
|
||||
dynamix: () => loadDynamixConfig(),
|
||||
emhttp: () => store.getState().emhttp,
|
||||
paths: () => store.getState().paths,
|
||||
registration: () => store.getState().registration,
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import { isAnyOf } from '@reduxjs/toolkit';
|
||||
import { GRAPHQL_PUBSUB_CHANNEL } from '@unraid/shared/pubsub/graphql.pubsub.js';
|
||||
import { isEqual } from 'lodash-es';
|
||||
|
||||
import { logger } from '@app/core/log.js';
|
||||
import { getArrayData } from '@app/core/modules/array/get-array-data.js';
|
||||
import { pubsub, PUBSUB_CHANNEL } from '@app/core/pubsub.js';
|
||||
import { pubsub } from '@app/core/pubsub.js';
|
||||
import { startAppListening } from '@app/store/listeners/listener-middleware.js';
|
||||
import { loadSingleStateFile } from '@app/store/modules/emhttp.js';
|
||||
import { StateFileKey } from '@app/store/types.js';
|
||||
@@ -20,14 +21,14 @@ export const enableArrayEventListener = () =>
|
||||
await delay(5_000);
|
||||
const array = getArrayData(getState);
|
||||
if (!isEqual(oldArrayData, array)) {
|
||||
pubsub.publish(PUBSUB_CHANNEL.ARRAY, { array });
|
||||
pubsub.publish(GRAPHQL_PUBSUB_CHANNEL.ARRAY, { array });
|
||||
logger.debug({ event: array }, 'Array was updated, publishing event');
|
||||
}
|
||||
|
||||
subscribe();
|
||||
} else if (action.meta.arg === StateFileKey.var) {
|
||||
if (!isEqual(getOriginalState().emhttp.var?.name, getState().emhttp.var?.name)) {
|
||||
await pubsub.publish(PUBSUB_CHANNEL.INFO, {
|
||||
await pubsub.publish(GRAPHQL_PUBSUB_CHANNEL.INFO, {
|
||||
info: {
|
||||
os: {
|
||||
hostname: getState().emhttp.var?.name,
|
||||
|
||||
@@ -2,7 +2,6 @@ import type { PayloadAction } from '@reduxjs/toolkit';
|
||||
import { createSlice } from '@reduxjs/toolkit';
|
||||
|
||||
import { type DynamixConfig } from '@app/core/types/ini.js';
|
||||
import { loadDynamixConfigFile } from '@app/store/actions/load-dynamix-config-file.js';
|
||||
import { FileLoadStatus } from '@app/store/types.js';
|
||||
import { RecursivePartial } from '@app/types/index.js';
|
||||
|
||||
@@ -22,24 +21,6 @@ export const dynamix = createSlice({
|
||||
return Object.assign(state, action.payload);
|
||||
},
|
||||
},
|
||||
extraReducers(builder) {
|
||||
builder.addCase(loadDynamixConfigFile.pending, (state) => {
|
||||
state.status = FileLoadStatus.LOADING;
|
||||
});
|
||||
|
||||
builder.addCase(loadDynamixConfigFile.fulfilled, (state, action) => {
|
||||
return {
|
||||
...(action.payload as DynamixConfig),
|
||||
status: FileLoadStatus.LOADED,
|
||||
};
|
||||
});
|
||||
|
||||
builder.addCase(loadDynamixConfigFile.rejected, (state, action) => {
|
||||
Object.assign(state, action.payload, {
|
||||
status: FileLoadStatus.FAILED_LOADING,
|
||||
});
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
export const { updateDynamixConfig } = dynamix.actions;
|
||||
|
||||
@@ -163,6 +163,18 @@ export const loadStateFiles = createAsyncThunk<
|
||||
return state;
|
||||
});
|
||||
|
||||
const stateFieldKeyMap: Record<StateFileKey, keyof SliceState> = {
|
||||
[StateFileKey.var]: 'var',
|
||||
[StateFileKey.devs]: 'devices',
|
||||
[StateFileKey.network]: 'networks',
|
||||
[StateFileKey.nginx]: 'nginx',
|
||||
[StateFileKey.shares]: 'shares',
|
||||
[StateFileKey.disks]: 'disks',
|
||||
[StateFileKey.users]: 'users',
|
||||
[StateFileKey.sec]: 'smbShares',
|
||||
[StateFileKey.sec_nfs]: 'nfsShares',
|
||||
};
|
||||
|
||||
export const emhttp = createSlice({
|
||||
name: 'emhttp',
|
||||
initialState,
|
||||
@@ -175,7 +187,8 @@ export const emhttp = createSlice({
|
||||
}>
|
||||
) {
|
||||
const { field } = action.payload;
|
||||
return Object.assign(state, { [field]: action.payload.state });
|
||||
const targetField = stateFieldKeyMap[field] ?? (field as keyof SliceState);
|
||||
return Object.assign(state, { [targetField]: action.payload.state });
|
||||
},
|
||||
},
|
||||
extraReducers(builder) {
|
||||
|
||||
81
api/src/store/services/__test__/state-file-loader.test.ts
Normal file
81
api/src/store/services/__test__/state-file-loader.test.ts
Normal file
@@ -0,0 +1,81 @@
|
||||
import { mkdtempSync, readFileSync, rmSync, writeFileSync } from 'node:fs';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { store } from '@app/store/index.js';
|
||||
import { loadStateFileSync } from '@app/store/services/state-file-loader.js';
|
||||
import { StateFileKey } from '@app/store/types.js';
|
||||
|
||||
const VAR_FIXTURE = readFileSync(new URL('../../../../dev/states/var.ini', import.meta.url), 'utf-8');
|
||||
|
||||
const writeVarFixture = (dir: string, safeMode: 'yes' | 'no') => {
|
||||
const content = VAR_FIXTURE.replace(/safeMode="(yes|no)"/, `safeMode="${safeMode}"`);
|
||||
writeFileSync(join(dir, `${StateFileKey.var}.ini`), content);
|
||||
};
|
||||
|
||||
describe('loadStateFileSync', () => {
|
||||
let tempDir: string;
|
||||
let baseState: ReturnType<typeof store.getState>;
|
||||
|
||||
beforeEach(() => {
|
||||
tempDir = mkdtempSync(join(tmpdir(), 'state-file-'));
|
||||
baseState = store.getState();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
rmSync(tempDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('loads var.ini, updates the store, and returns the parsed state', () => {
|
||||
writeVarFixture(tempDir, 'yes');
|
||||
vi.spyOn(store, 'getState').mockReturnValue({
|
||||
...baseState,
|
||||
paths: {
|
||||
...baseState.paths,
|
||||
states: tempDir,
|
||||
},
|
||||
});
|
||||
const dispatchSpy = vi.spyOn(store, 'dispatch').mockImplementation((action) => action as any);
|
||||
|
||||
const result = loadStateFileSync(StateFileKey.var);
|
||||
|
||||
expect(result?.safeMode).toBe(true);
|
||||
expect(dispatchSpy).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
type: 'emhttp/updateEmhttpState',
|
||||
payload: {
|
||||
field: StateFileKey.var,
|
||||
state: expect.objectContaining({ safeMode: true }),
|
||||
},
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it('returns null when the states path is missing', () => {
|
||||
vi.spyOn(store, 'getState').mockReturnValue({
|
||||
...baseState,
|
||||
paths: undefined,
|
||||
} as any);
|
||||
const dispatchSpy = vi.spyOn(store, 'dispatch');
|
||||
|
||||
expect(loadStateFileSync(StateFileKey.var)).toBeNull();
|
||||
expect(dispatchSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('returns null when the requested state file cannot be found', () => {
|
||||
vi.spyOn(store, 'getState').mockReturnValue({
|
||||
...baseState,
|
||||
paths: {
|
||||
...baseState.paths,
|
||||
states: tempDir,
|
||||
},
|
||||
});
|
||||
const dispatchSpy = vi.spyOn(store, 'dispatch');
|
||||
|
||||
expect(loadStateFileSync(StateFileKey.var)).toBeNull();
|
||||
expect(dispatchSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
81
api/src/store/services/state-file-loader.ts
Normal file
81
api/src/store/services/state-file-loader.ts
Normal file
@@ -0,0 +1,81 @@
|
||||
import { join } from 'node:path';
|
||||
|
||||
import type { SliceState } from '@app/store/modules/emhttp.js';
|
||||
import type { StateFileToIniParserMap } from '@app/store/types.js';
|
||||
import { parseConfig } from '@app/core/utils/misc/parse-config.js';
|
||||
import { store } from '@app/store/index.js';
|
||||
import { updateEmhttpState } from '@app/store/modules/emhttp.js';
|
||||
import { parse as parseDevices } from '@app/store/state-parsers/devices.js';
|
||||
import { parse as parseNetwork } from '@app/store/state-parsers/network.js';
|
||||
import { parse as parseNfs } from '@app/store/state-parsers/nfs.js';
|
||||
import { parse as parseNginx } from '@app/store/state-parsers/nginx.js';
|
||||
import { parse as parseShares } from '@app/store/state-parsers/shares.js';
|
||||
import { parse as parseSlots } from '@app/store/state-parsers/slots.js';
|
||||
import { parse as parseSmb } from '@app/store/state-parsers/smb.js';
|
||||
import { parse as parseUsers } from '@app/store/state-parsers/users.js';
|
||||
import { parse as parseVar } from '@app/store/state-parsers/var.js';
|
||||
import { StateFileKey } from '@app/store/types.js';
|
||||
|
||||
type ParserReturnMap = {
|
||||
[StateFileKey.var]: ReturnType<typeof parseVar>;
|
||||
[StateFileKey.devs]: ReturnType<typeof parseDevices>;
|
||||
[StateFileKey.network]: ReturnType<typeof parseNetwork>;
|
||||
[StateFileKey.nginx]: ReturnType<typeof parseNginx>;
|
||||
[StateFileKey.shares]: ReturnType<typeof parseShares>;
|
||||
[StateFileKey.disks]: ReturnType<typeof parseSlots>;
|
||||
[StateFileKey.users]: ReturnType<typeof parseUsers>;
|
||||
[StateFileKey.sec]: ReturnType<typeof parseSmb>;
|
||||
[StateFileKey.sec_nfs]: ReturnType<typeof parseNfs>;
|
||||
};
|
||||
|
||||
const PARSER_MAP: { [K in StateFileKey]: StateFileToIniParserMap[K] } = {
|
||||
[StateFileKey.var]: parseVar,
|
||||
[StateFileKey.devs]: parseDevices,
|
||||
[StateFileKey.network]: parseNetwork,
|
||||
[StateFileKey.nginx]: parseNginx,
|
||||
[StateFileKey.shares]: parseShares,
|
||||
[StateFileKey.disks]: parseSlots,
|
||||
[StateFileKey.users]: parseUsers,
|
||||
[StateFileKey.sec]: parseSmb,
|
||||
[StateFileKey.sec_nfs]: parseNfs,
|
||||
};
|
||||
|
||||
/**
|
||||
* Synchronously loads an emhttp state file, updates the Redux store slice, and returns the parsed state.
|
||||
*
|
||||
* Designed for bootstrap contexts (CLI, plugin loading, etc.) where dispatching the async thunks is
|
||||
* impractical but we still need authoritative emhttp state from disk.
|
||||
*/
|
||||
export const loadStateFileSync = <K extends StateFileKey>(
|
||||
stateFileKey: K
|
||||
): ParserReturnMap[K] | null => {
|
||||
const state = store.getState();
|
||||
const statesDirectory = state.paths?.states;
|
||||
|
||||
if (!statesDirectory) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const filePath = join(statesDirectory, `${stateFileKey}.ini`);
|
||||
|
||||
try {
|
||||
const parser = PARSER_MAP[stateFileKey] as StateFileToIniParserMap[K];
|
||||
const rawConfig = parseConfig<Record<string, unknown>>({
|
||||
filePath,
|
||||
type: 'ini',
|
||||
});
|
||||
const config = rawConfig as Parameters<StateFileToIniParserMap[K]>[0];
|
||||
const parsed = (parser as (input: any) => ParserReturnMap[K])(config);
|
||||
|
||||
store.dispatch(
|
||||
updateEmhttpState({
|
||||
field: stateFileKey,
|
||||
state: parsed as Partial<SliceState[keyof SliceState]>,
|
||||
})
|
||||
);
|
||||
|
||||
return parsed;
|
||||
} catch (error) {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
@@ -1,17 +0,0 @@
|
||||
import { watch } from 'chokidar';
|
||||
|
||||
import { loadDynamixConfigFile } from '@app/store/actions/load-dynamix-config-file.js';
|
||||
import { getters, store } from '@app/store/index.js';
|
||||
|
||||
export const setupDynamixConfigWatch = () => {
|
||||
const configPath = getters.paths()?.['dynamix-config'];
|
||||
|
||||
// Update store when cfg changes
|
||||
watch(configPath, {
|
||||
persistent: true,
|
||||
ignoreInitial: true,
|
||||
}).on('change', async () => {
|
||||
// Load updated dynamix config file into store
|
||||
await store.dispatch(loadDynamixConfigFile());
|
||||
});
|
||||
};
|
||||
40
api/src/types/jsonforms-i18n.d.ts
vendored
Normal file
40
api/src/types/jsonforms-i18n.d.ts
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
import '@jsonforms/core/lib/models/jsonSchema4';
|
||||
import '@jsonforms/core/lib/models/jsonSchema7';
|
||||
import '@jsonforms/core/src/models/jsonSchema4';
|
||||
import '@jsonforms/core/src/models/jsonSchema7';
|
||||
|
||||
declare module '@jsonforms/core/lib/models/jsonSchema4' {
|
||||
interface JsonSchema4 {
|
||||
i18n?: string;
|
||||
}
|
||||
}
|
||||
|
||||
declare module '@jsonforms/core/lib/models/jsonSchema7' {
|
||||
interface JsonSchema7 {
|
||||
i18n?: string;
|
||||
}
|
||||
}
|
||||
|
||||
declare module '@jsonforms/core/src/models/jsonSchema4' {
|
||||
interface JsonSchema4 {
|
||||
i18n?: string;
|
||||
}
|
||||
}
|
||||
|
||||
declare module '@jsonforms/core/src/models/jsonSchema7' {
|
||||
interface JsonSchema7 {
|
||||
i18n?: string;
|
||||
}
|
||||
}
|
||||
|
||||
declare module '@jsonforms/core/lib/models/jsonSchema4.js' {
|
||||
interface JsonSchema4 {
|
||||
i18n?: string;
|
||||
}
|
||||
}
|
||||
|
||||
declare module '@jsonforms/core/lib/models/jsonSchema7.js' {
|
||||
interface JsonSchema7 {
|
||||
i18n?: string;
|
||||
}
|
||||
}
|
||||
@@ -6,8 +6,7 @@ import { AuthZGuard } from 'nest-authz';
|
||||
import request from 'supertest';
|
||||
import { afterAll, beforeAll, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { loadDynamixConfigFile } from '@app/store/actions/load-dynamix-config-file.js';
|
||||
import { store } from '@app/store/index.js';
|
||||
import { loadDynamixConfig, store } from '@app/store/index.js';
|
||||
import { loadStateFiles } from '@app/store/modules/emhttp.js';
|
||||
import { AppModule } from '@app/unraid-api/app/app.module.js';
|
||||
import { AuthService } from '@app/unraid-api/auth/auth.service.js';
|
||||
@@ -111,8 +110,8 @@ describe('AppModule Integration Tests', () => {
|
||||
|
||||
beforeAll(async () => {
|
||||
// Initialize the dynamix config and state files before creating the module
|
||||
await store.dispatch(loadDynamixConfigFile());
|
||||
await store.dispatch(loadStateFiles());
|
||||
loadDynamixConfig();
|
||||
|
||||
// Debug: Log the CSRF token from the store
|
||||
const { getters } = await import('@app/store/index.js');
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { CacheModule } from '@nestjs/cache-manager';
|
||||
import { ConfigModule } from '@nestjs/config';
|
||||
import { Test } from '@nestjs/testing';
|
||||
|
||||
import { describe, expect, it } from 'vitest';
|
||||
@@ -10,7 +11,11 @@ describe('Module Dependencies Integration', () => {
|
||||
let module;
|
||||
try {
|
||||
module = await Test.createTestingModule({
|
||||
imports: [CacheModule.register({ isGlobal: true }), RestModule],
|
||||
imports: [
|
||||
ConfigModule.forRoot({ ignoreEnvFile: true, isGlobal: true }),
|
||||
CacheModule.register({ isGlobal: true }),
|
||||
RestModule,
|
||||
],
|
||||
}).compile();
|
||||
|
||||
expect(module).toBeDefined();
|
||||
|
||||
@@ -8,6 +8,7 @@ import { AuthService } from '@app/unraid-api/auth/auth.service.js';
|
||||
import { CasbinModule } from '@app/unraid-api/auth/casbin/casbin.module.js';
|
||||
import { CasbinService } from '@app/unraid-api/auth/casbin/casbin.service.js';
|
||||
import { BASE_POLICY, CASBIN_MODEL } from '@app/unraid-api/auth/casbin/index.js';
|
||||
import { resolveSubjectFromUser } from '@app/unraid-api/auth/casbin/resolve-subject.util.js';
|
||||
import { CookieService, SESSION_COOKIE_CONFIG } from '@app/unraid-api/auth/cookie.service.js';
|
||||
import { UserCookieStrategy } from '@app/unraid-api/auth/cookie.strategy.js';
|
||||
import { ServerHeaderStrategy } from '@app/unraid-api/auth/header.strategy.js';
|
||||
@@ -41,13 +42,7 @@ import { getRequest } from '@app/utils.js';
|
||||
|
||||
try {
|
||||
const request = getRequest(ctx);
|
||||
const roles = request?.user?.roles || [];
|
||||
|
||||
if (!Array.isArray(roles)) {
|
||||
throw new UnauthorizedException('User roles must be an array');
|
||||
}
|
||||
|
||||
return roles.join(',');
|
||||
return resolveSubjectFromUser(request?.user);
|
||||
} catch (error) {
|
||||
logger.error('Failed to extract user context', error);
|
||||
throw new UnauthorizedException('Failed to authenticate user');
|
||||
|
||||
133
api/src/unraid-api/auth/casbin/authz.guard.integration.spec.ts
Normal file
133
api/src/unraid-api/auth/casbin/authz.guard.integration.spec.ts
Normal file
@@ -0,0 +1,133 @@
|
||||
import { ExecutionContext, Type } from '@nestjs/common';
|
||||
import { Reflector } from '@nestjs/core';
|
||||
import { ExecutionContextHost } from '@nestjs/core/helpers/execution-context-host.js';
|
||||
|
||||
import type { Enforcer } from 'casbin';
|
||||
import { AuthAction, Resource, Role } from '@unraid/shared/graphql.model.js';
|
||||
import { AuthZGuard, BatchApproval } from 'nest-authz';
|
||||
import { beforeAll, describe, expect, it } from 'vitest';
|
||||
|
||||
import { CasbinService } from '@app/unraid-api/auth/casbin/casbin.service.js';
|
||||
import { CASBIN_MODEL } from '@app/unraid-api/auth/casbin/model.js';
|
||||
import { BASE_POLICY } from '@app/unraid-api/auth/casbin/policy.js';
|
||||
import { resolveSubjectFromUser } from '@app/unraid-api/auth/casbin/resolve-subject.util.js';
|
||||
import { DockerMutationsResolver } from '@app/unraid-api/graph/resolvers/docker/docker.mutations.resolver.js';
|
||||
import { DockerResolver } from '@app/unraid-api/graph/resolvers/docker/docker.resolver.js';
|
||||
import { VmMutationsResolver } from '@app/unraid-api/graph/resolvers/vms/vms.mutations.resolver.js';
|
||||
import { MeResolver } from '@app/unraid-api/graph/user/user.resolver.js';
|
||||
import { getRequest } from '@app/utils.js';
|
||||
|
||||
type Handler = (...args: any[]) => unknown;
|
||||
|
||||
type TestUser = {
|
||||
id?: string;
|
||||
roles?: Role[];
|
||||
};
|
||||
|
||||
type TestRequest = {
|
||||
user?: TestUser;
|
||||
};
|
||||
|
||||
function createExecutionContext(
|
||||
handler: Handler,
|
||||
classRef: Type<unknown> | null,
|
||||
roles: Role[],
|
||||
userId = 'api-key-viewer'
|
||||
): ExecutionContext {
|
||||
const request: TestRequest = {
|
||||
user: {
|
||||
id: userId,
|
||||
roles: [...roles],
|
||||
},
|
||||
};
|
||||
|
||||
const graphqlContextHost = new ExecutionContextHost(
|
||||
[undefined, undefined, { req: request }, undefined],
|
||||
classRef,
|
||||
handler
|
||||
);
|
||||
|
||||
graphqlContextHost.setType('graphql');
|
||||
|
||||
return graphqlContextHost as unknown as ExecutionContext;
|
||||
}
|
||||
|
||||
describe('AuthZGuard + Casbin policies', () => {
|
||||
let guard: AuthZGuard;
|
||||
let enforcer: Enforcer;
|
||||
|
||||
beforeAll(async () => {
|
||||
const casbinService = new CasbinService();
|
||||
enforcer = await casbinService.initializeEnforcer(CASBIN_MODEL, BASE_POLICY);
|
||||
|
||||
await enforcer.addGroupingPolicy('api-key-viewer', Role.VIEWER);
|
||||
await enforcer.addGroupingPolicy('api-key-admin', Role.ADMIN);
|
||||
|
||||
guard = new AuthZGuard(new Reflector(), enforcer, {
|
||||
enablePossession: false,
|
||||
batchApproval: BatchApproval.ALL,
|
||||
userFromContext: (ctx: ExecutionContext) => {
|
||||
const request = getRequest(ctx) as TestRequest | undefined;
|
||||
|
||||
return resolveSubjectFromUser(request?.user);
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('denies viewer role from stopping docker containers', async () => {
|
||||
const context = createExecutionContext(
|
||||
DockerMutationsResolver.prototype.stop,
|
||||
DockerMutationsResolver,
|
||||
[Role.VIEWER],
|
||||
'api-key-viewer'
|
||||
);
|
||||
|
||||
await expect(guard.canActivate(context)).resolves.toBe(false);
|
||||
});
|
||||
|
||||
it('allows admin role to stop docker containers', async () => {
|
||||
const context = createExecutionContext(
|
||||
DockerMutationsResolver.prototype.stop,
|
||||
DockerMutationsResolver,
|
||||
[Role.ADMIN],
|
||||
'api-key-admin'
|
||||
);
|
||||
|
||||
await expect(guard.canActivate(context)).resolves.toBe(true);
|
||||
});
|
||||
|
||||
it('denies viewer role from stopping virtual machines', async () => {
|
||||
const context = createExecutionContext(
|
||||
VmMutationsResolver.prototype.stop,
|
||||
VmMutationsResolver,
|
||||
[Role.VIEWER],
|
||||
'api-key-viewer'
|
||||
);
|
||||
|
||||
await expect(guard.canActivate(context)).resolves.toBe(false);
|
||||
});
|
||||
|
||||
it('allows viewer role to read docker data', async () => {
|
||||
const context = createExecutionContext(
|
||||
DockerResolver.prototype.containers,
|
||||
DockerResolver,
|
||||
[Role.VIEWER],
|
||||
'api-key-viewer'
|
||||
);
|
||||
|
||||
await expect(guard.canActivate(context)).resolves.toBe(true);
|
||||
});
|
||||
|
||||
it('allows API key with explicit permission to access ME resource', async () => {
|
||||
await enforcer.addPolicy('api-key-custom', Resource.ME, AuthAction.READ_ANY);
|
||||
|
||||
const context = createExecutionContext(
|
||||
MeResolver.prototype.me,
|
||||
MeResolver,
|
||||
[],
|
||||
'api-key-custom'
|
||||
);
|
||||
|
||||
await expect(guard.canActivate(context)).resolves.toBe(true);
|
||||
});
|
||||
});
|
||||
@@ -2,7 +2,7 @@ import { Injectable, InternalServerErrorException, Logger, OnModuleInit } from '
|
||||
|
||||
import { Model as CasbinModel, Enforcer, newEnforcer, StringAdapter } from 'casbin';
|
||||
|
||||
import { LOG_LEVEL } from '@app/environment.js';
|
||||
import { LOG_CASBIN, LOG_LEVEL } from '@app/environment.js';
|
||||
|
||||
@Injectable()
|
||||
export class CasbinService {
|
||||
@@ -20,9 +20,8 @@ export class CasbinService {
|
||||
const casbinPolicy = new StringAdapter(policy);
|
||||
try {
|
||||
const enforcer = await newEnforcer(casbinModel, casbinPolicy);
|
||||
if (LOG_LEVEL === 'TRACE') {
|
||||
enforcer.enableLog(true);
|
||||
}
|
||||
// Casbin request logging is extremely verbose; keep it off unless explicitly enabled.
|
||||
enforcer.enableLog(LOG_CASBIN && LOG_LEVEL === 'TRACE');
|
||||
|
||||
return enforcer;
|
||||
} catch (error: unknown) {
|
||||
|
||||
43
api/src/unraid-api/auth/casbin/resolve-subject.util.spec.ts
Normal file
43
api/src/unraid-api/auth/casbin/resolve-subject.util.spec.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import { UnauthorizedException } from '@nestjs/common';
|
||||
|
||||
import { describe, expect, it } from 'vitest';
|
||||
|
||||
import { resolveSubjectFromUser } from '@app/unraid-api/auth/casbin/resolve-subject.util.js';
|
||||
|
||||
describe('resolveSubjectFromUser', () => {
|
||||
it('returns trimmed user id when available', () => {
|
||||
const subject = resolveSubjectFromUser({ id: ' user-123 ', roles: ['viewer'] });
|
||||
|
||||
expect(subject).toBe('user-123');
|
||||
});
|
||||
|
||||
it('falls back to a single non-empty role', () => {
|
||||
const subject = resolveSubjectFromUser({ roles: [' viewer '] });
|
||||
|
||||
expect(subject).toBe('viewer');
|
||||
});
|
||||
|
||||
it('throws when role list is empty', () => {
|
||||
expect(() => resolveSubjectFromUser({ roles: [] })).toThrow(UnauthorizedException);
|
||||
});
|
||||
|
||||
it('throws when multiple roles are present', () => {
|
||||
expect(() => resolveSubjectFromUser({ roles: ['viewer', 'admin'] })).toThrow(
|
||||
UnauthorizedException
|
||||
);
|
||||
});
|
||||
|
||||
it('throws when roles is not an array', () => {
|
||||
expect(() => resolveSubjectFromUser({ roles: 'viewer' as unknown })).toThrow(
|
||||
UnauthorizedException
|
||||
);
|
||||
});
|
||||
|
||||
it('throws when role subject is blank', () => {
|
||||
expect(() => resolveSubjectFromUser({ roles: [' '] })).toThrow(UnauthorizedException);
|
||||
});
|
||||
|
||||
it('throws when user is missing', () => {
|
||||
expect(() => resolveSubjectFromUser(undefined)).toThrow(UnauthorizedException);
|
||||
});
|
||||
});
|
||||
46
api/src/unraid-api/auth/casbin/resolve-subject.util.ts
Normal file
46
api/src/unraid-api/auth/casbin/resolve-subject.util.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
import { UnauthorizedException } from '@nestjs/common';
|
||||
|
||||
type CasbinUser = {
|
||||
id?: unknown;
|
||||
roles?: unknown;
|
||||
};
|
||||
|
||||
/**
|
||||
* Determine the Casbin subject for a request user.
|
||||
*
|
||||
* Prefers a non-empty `user.id`, otherwise falls back to a single non-empty role.
|
||||
* Throws when the subject cannot be resolved.
|
||||
*/
|
||||
export function resolveSubjectFromUser(user: CasbinUser | undefined): string {
|
||||
if (!user) {
|
||||
throw new UnauthorizedException('Request user context missing');
|
||||
}
|
||||
|
||||
const roles = user.roles ?? [];
|
||||
|
||||
if (!Array.isArray(roles)) {
|
||||
throw new UnauthorizedException('User roles must be an array');
|
||||
}
|
||||
|
||||
const userId = typeof user.id === 'string' ? user.id.trim() : '';
|
||||
|
||||
if (userId.length > 0) {
|
||||
return userId;
|
||||
}
|
||||
|
||||
if (roles.length === 1) {
|
||||
const [role] = roles;
|
||||
|
||||
if (typeof role === 'string') {
|
||||
const trimmedRole = role.trim();
|
||||
|
||||
if (trimmedRole.length > 0) {
|
||||
return trimmedRole;
|
||||
}
|
||||
}
|
||||
|
||||
throw new UnauthorizedException('Role subject must be a non-empty string');
|
||||
}
|
||||
|
||||
throw new UnauthorizedException('Unable to determine subject from user context');
|
||||
}
|
||||
@@ -36,6 +36,7 @@ const mockPluginManagementService = {
|
||||
addPlugin: vi.fn(),
|
||||
addBundledPlugin: vi.fn(),
|
||||
removePlugin: vi.fn(),
|
||||
removePluginConfigOnly: vi.fn(),
|
||||
removeBundledPlugin: vi.fn(),
|
||||
plugins: [] as string[],
|
||||
};
|
||||
@@ -147,6 +148,7 @@ describe('Plugin Commands', () => {
|
||||
'@unraid/plugin-example',
|
||||
'@unraid/plugin-test'
|
||||
);
|
||||
expect(mockPluginManagementService.removePluginConfigOnly).not.toHaveBeenCalled();
|
||||
expect(mockLogger.log).toHaveBeenCalledWith('Removed plugin @unraid/plugin-example');
|
||||
expect(mockLogger.log).toHaveBeenCalledWith('Removed plugin @unraid/plugin-test');
|
||||
expect(mockApiConfigPersistence.persist).toHaveBeenCalled();
|
||||
@@ -178,9 +180,72 @@ describe('Plugin Commands', () => {
|
||||
expect(mockPluginManagementService.removePlugin).toHaveBeenCalledWith(
|
||||
'@unraid/plugin-example'
|
||||
);
|
||||
expect(mockPluginManagementService.removePluginConfigOnly).not.toHaveBeenCalled();
|
||||
expect(mockApiConfigPersistence.persist).toHaveBeenCalled();
|
||||
expect(mockRestartCommand.run).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should bypass npm uninstall when bypass flag is provided', async () => {
|
||||
mockInquirerService.prompt.mockResolvedValue({
|
||||
plugins: ['@unraid/plugin-example'],
|
||||
restart: true,
|
||||
bypassNpm: true,
|
||||
});
|
||||
|
||||
await command.run([], { restart: true, bypassNpm: true });
|
||||
|
||||
expect(mockPluginManagementService.removePluginConfigOnly).toHaveBeenCalledWith(
|
||||
'@unraid/plugin-example'
|
||||
);
|
||||
expect(mockPluginManagementService.removePlugin).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should preserve cli flags when prompt supplies plugins', async () => {
|
||||
mockInquirerService.prompt.mockResolvedValue({
|
||||
plugins: ['@unraid/plugin-example'],
|
||||
});
|
||||
|
||||
await command.run([], { restart: false, bypassNpm: true });
|
||||
|
||||
expect(mockPluginManagementService.removePluginConfigOnly).toHaveBeenCalledWith(
|
||||
'@unraid/plugin-example'
|
||||
);
|
||||
expect(mockPluginManagementService.removePlugin).not.toHaveBeenCalled();
|
||||
expect(mockRestartCommand.run).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should honor prompt restart value when cli flag not provided', async () => {
|
||||
mockInquirerService.prompt.mockResolvedValue({
|
||||
plugins: ['@unraid/plugin-example'],
|
||||
restart: false,
|
||||
});
|
||||
|
||||
await command.run([], {});
|
||||
|
||||
expect(mockPluginManagementService.removePlugin).toHaveBeenCalledWith(
|
||||
'@unraid/plugin-example'
|
||||
);
|
||||
expect(mockRestartCommand.run).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should respect passed params and skip inquirer', async () => {
|
||||
await command.run(['@unraid/plugin-example'], { restart: true, bypassNpm: false });
|
||||
|
||||
expect(mockInquirerService.prompt).not.toHaveBeenCalled();
|
||||
expect(mockPluginManagementService.removePlugin).toHaveBeenCalledWith(
|
||||
'@unraid/plugin-example'
|
||||
);
|
||||
});
|
||||
|
||||
it('should bypass npm when flag provided with passed params', async () => {
|
||||
await command.run(['@unraid/plugin-example'], { restart: true, bypassNpm: true });
|
||||
|
||||
expect(mockInquirerService.prompt).not.toHaveBeenCalled();
|
||||
expect(mockPluginManagementService.removePluginConfigOnly).toHaveBeenCalledWith(
|
||||
'@unraid/plugin-example'
|
||||
);
|
||||
expect(mockPluginManagementService.removePlugin).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('ListPluginCommand', () => {
|
||||
|
||||
@@ -26,10 +26,10 @@ const mockApiReportService = {
|
||||
generateReport: vi.fn(),
|
||||
};
|
||||
|
||||
// Mock PM2 check
|
||||
// Mock process manager check
|
||||
const mockIsUnraidApiRunning = vi.fn().mockResolvedValue(true);
|
||||
|
||||
vi.mock('@app/core/utils/pm2/unraid-api-running.js', () => ({
|
||||
vi.mock('@app/core/utils/process/unraid-api-running.js', () => ({
|
||||
isUnraidApiRunning: () => mockIsUnraidApiRunning(),
|
||||
}));
|
||||
|
||||
@@ -50,7 +50,7 @@ describe('ReportCommand', () => {
|
||||
// Clear mocks
|
||||
vi.clearAllMocks();
|
||||
|
||||
// Reset PM2 mock to default
|
||||
// Reset nodemon mock to default
|
||||
mockIsUnraidApiRunning.mockResolvedValue(true);
|
||||
});
|
||||
|
||||
@@ -150,7 +150,7 @@ describe('ReportCommand', () => {
|
||||
// Reset mocks
|
||||
vi.clearAllMocks();
|
||||
|
||||
// Test with API running but PM2 check returns true
|
||||
// Test with API running but status check returns true
|
||||
mockIsUnraidApiRunning.mockResolvedValue(true);
|
||||
await reportCommand.report();
|
||||
expect(mockApiReportService.generateReport).toHaveBeenCalledWith(true);
|
||||
|
||||
@@ -4,7 +4,7 @@ import { DependencyService } from '@app/unraid-api/app/dependency.service.js';
|
||||
import { ApiKeyService } from '@app/unraid-api/auth/api-key.service.js';
|
||||
import { ApiReportService } from '@app/unraid-api/cli/api-report.service.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
import { PM2Service } from '@app/unraid-api/cli/pm2.service.js';
|
||||
import { NodemonService } from '@app/unraid-api/cli/nodemon.service.js';
|
||||
import { ApiConfigModule } from '@app/unraid-api/config/api-config.module.js';
|
||||
import { LegacyConfigModule } from '@app/unraid-api/config/legacy-config.module.js';
|
||||
import { GlobalDepsModule } from '@app/unraid-api/plugin/global-deps.module.js';
|
||||
@@ -21,7 +21,7 @@ import { UnraidFileModifierModule } from '@app/unraid-api/unraid-file-modifier/u
|
||||
PluginCliModule.register(),
|
||||
UnraidFileModifierModule,
|
||||
],
|
||||
providers: [LogService, PM2Service, ApiKeyService, DependencyService, ApiReportService],
|
||||
providers: [LogService, NodemonService, ApiKeyService, DependencyService, ApiReportService],
|
||||
exports: [ApiReportService, LogService, ApiKeyService],
|
||||
})
|
||||
export class CliServicesModule {}
|
||||
|
||||
@@ -13,6 +13,7 @@ import { DeveloperCommand } from '@app/unraid-api/cli/developer/developer.comman
|
||||
import { DeveloperQuestions } from '@app/unraid-api/cli/developer/developer.questions.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
import { LogsCommand } from '@app/unraid-api/cli/logs.command.js';
|
||||
import { NodemonService } from '@app/unraid-api/cli/nodemon.service.js';
|
||||
import {
|
||||
InstallPluginCommand,
|
||||
ListPluginCommand,
|
||||
@@ -20,7 +21,6 @@ import {
|
||||
RemovePluginCommand,
|
||||
} from '@app/unraid-api/cli/plugins/plugin.command.js';
|
||||
import { RemovePluginQuestionSet } from '@app/unraid-api/cli/plugins/remove-plugin.questions.js';
|
||||
import { PM2Service } from '@app/unraid-api/cli/pm2.service.js';
|
||||
import { ReportCommand } from '@app/unraid-api/cli/report.command.js';
|
||||
import { RestartCommand } from '@app/unraid-api/cli/restart.command.js';
|
||||
import { SSOCommand } from '@app/unraid-api/cli/sso/sso.command.js';
|
||||
@@ -64,7 +64,7 @@ const DEFAULT_PROVIDERS = [
|
||||
DeveloperQuestions,
|
||||
DeveloperToolsService,
|
||||
LogService,
|
||||
PM2Service,
|
||||
NodemonService,
|
||||
ApiKeyService,
|
||||
DependencyService,
|
||||
ApiReportService,
|
||||
|
||||
@@ -559,6 +559,17 @@ export type CpuLoad = {
|
||||
percentUser: Scalars['Float']['output'];
|
||||
};
|
||||
|
||||
export type CpuPackages = Node & {
|
||||
__typename?: 'CpuPackages';
|
||||
id: Scalars['PrefixedID']['output'];
|
||||
/** Power draw per package (W) */
|
||||
power: Array<Scalars['Float']['output']>;
|
||||
/** Temperature per package (°C) */
|
||||
temp: Array<Scalars['Float']['output']>;
|
||||
/** Total CPU package power draw (W) */
|
||||
totalPower: Scalars['Float']['output'];
|
||||
};
|
||||
|
||||
export type CpuUtilization = Node & {
|
||||
__typename?: 'CpuUtilization';
|
||||
/** CPU load for each core */
|
||||
@@ -869,6 +880,7 @@ export type InfoCpu = Node & {
|
||||
manufacturer?: Maybe<Scalars['String']['output']>;
|
||||
/** CPU model */
|
||||
model?: Maybe<Scalars['String']['output']>;
|
||||
packages: CpuPackages;
|
||||
/** Number of physical processors */
|
||||
processors?: Maybe<Scalars['Int']['output']>;
|
||||
/** CPU revision */
|
||||
@@ -885,6 +897,8 @@ export type InfoCpu = Node & {
|
||||
stepping?: Maybe<Scalars['Int']['output']>;
|
||||
/** Number of CPU threads */
|
||||
threads?: Maybe<Scalars['Int']['output']>;
|
||||
/** Per-package array of core/thread pairs, e.g. [[[0,1],[2,3]], [[4,5],[6,7]]] */
|
||||
topology: Array<Array<Array<Scalars['Int']['output']>>>;
|
||||
/** CPU vendor */
|
||||
vendor?: Maybe<Scalars['String']['output']>;
|
||||
/** CPU voltage */
|
||||
@@ -1531,14 +1545,14 @@ export type PackageVersions = {
|
||||
nginx?: Maybe<Scalars['String']['output']>;
|
||||
/** Node.js version */
|
||||
node?: Maybe<Scalars['String']['output']>;
|
||||
/** nodemon version */
|
||||
nodemon?: Maybe<Scalars['String']['output']>;
|
||||
/** npm version */
|
||||
npm?: Maybe<Scalars['String']['output']>;
|
||||
/** OpenSSL version */
|
||||
openssl?: Maybe<Scalars['String']['output']>;
|
||||
/** PHP version */
|
||||
php?: Maybe<Scalars['String']['output']>;
|
||||
/** pm2 version */
|
||||
pm2?: Maybe<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
export type ParityCheck = {
|
||||
@@ -2053,6 +2067,7 @@ export type Subscription = {
|
||||
parityHistorySubscription: ParityCheck;
|
||||
serversSubscription: Server;
|
||||
systemMetricsCpu: CpuUtilization;
|
||||
systemMetricsCpuTelemetry: CpuPackages;
|
||||
systemMetricsMemory: MemoryUtilization;
|
||||
upsUpdates: UpsDevice;
|
||||
};
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { Command, CommandRunner, Option } from 'nest-commander';
|
||||
|
||||
import { PM2Service } from '@app/unraid-api/cli/pm2.service.js';
|
||||
import { NodemonService } from '@app/unraid-api/cli/nodemon.service.js';
|
||||
|
||||
interface LogsOptions {
|
||||
lines: number;
|
||||
@@ -8,7 +8,7 @@ interface LogsOptions {
|
||||
|
||||
@Command({ name: 'logs', description: 'View logs' })
|
||||
export class LogsCommand extends CommandRunner {
|
||||
constructor(private readonly pm2: PM2Service) {
|
||||
constructor(private readonly nodemon: NodemonService) {
|
||||
super();
|
||||
}
|
||||
|
||||
@@ -20,13 +20,6 @@ export class LogsCommand extends CommandRunner {
|
||||
|
||||
async run(_: string[], options?: LogsOptions): Promise<void> {
|
||||
const lines = options?.lines ?? 100;
|
||||
await this.pm2.run(
|
||||
{ tag: 'PM2 Logs', stdio: 'inherit' },
|
||||
'logs',
|
||||
'unraid-api',
|
||||
'--lines',
|
||||
lines.toString(),
|
||||
'--raw'
|
||||
);
|
||||
await this.nodemon.logs(lines);
|
||||
}
|
||||
}
|
||||
|
||||
142
api/src/unraid-api/cli/nodemon.service.integration.spec.ts
Normal file
142
api/src/unraid-api/cli/nodemon.service.integration.spec.ts
Normal file
@@ -0,0 +1,142 @@
|
||||
import { mkdtemp, readFile, rm, stat, writeFile } from 'node:fs/promises';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
|
||||
import { afterAll, beforeAll, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
|
||||
const logger = {
|
||||
clear: vi.fn(),
|
||||
shouldLog: vi.fn(() => true),
|
||||
table: vi.fn(),
|
||||
trace: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
log: vi.fn(),
|
||||
info: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
always: vi.fn(),
|
||||
} as unknown as LogService;
|
||||
|
||||
describe('NodemonService (real nodemon)', () => {
|
||||
const tmpRoot = join(tmpdir(), 'nodemon-service-');
|
||||
let workdir: string;
|
||||
let scriptPath: string;
|
||||
let configPath: string;
|
||||
let appLogPath: string;
|
||||
let nodemonLogPath: string;
|
||||
let pidPath: string;
|
||||
const nodemonPath = join(process.cwd(), 'node_modules', 'nodemon', 'bin', 'nodemon.js');
|
||||
|
||||
beforeAll(async () => {
|
||||
workdir = await mkdtemp(tmpRoot);
|
||||
scriptPath = join(workdir, 'app.js');
|
||||
configPath = join(workdir, 'nodemon.json');
|
||||
appLogPath = join(workdir, 'app.log');
|
||||
nodemonLogPath = join(workdir, 'nodemon.log');
|
||||
pidPath = join(workdir, 'nodemon.pid');
|
||||
|
||||
await writeFile(
|
||||
scriptPath,
|
||||
[
|
||||
"const { appendFileSync } = require('node:fs');",
|
||||
"const appLog = process.env.PATHS_LOGS_FILE || './app.log';",
|
||||
"const nodemonLog = process.env.PATHS_NODEMON_LOG_FILE || './nodemon.log';",
|
||||
"appendFileSync(appLog, 'app-log-entry\\n');",
|
||||
"appendFileSync(nodemonLog, 'nodemon-log-entry\\n');",
|
||||
"console.log('nodemon-integration-start');",
|
||||
'setInterval(() => {}, 1000);',
|
||||
].join('\n')
|
||||
);
|
||||
|
||||
await writeFile(
|
||||
configPath,
|
||||
JSON.stringify(
|
||||
{
|
||||
watch: ['app.js'],
|
||||
exec: 'node ./app.js',
|
||||
signal: 'SIGTERM',
|
||||
ext: 'js',
|
||||
},
|
||||
null,
|
||||
2
|
||||
)
|
||||
);
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await rm(workdir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('starts and stops real nodemon and writes logs', async () => {
|
||||
vi.resetModules();
|
||||
vi.doMock('@app/environment.js', () => ({
|
||||
LOG_LEVEL: 'INFO',
|
||||
LOG_TYPE: 'pretty',
|
||||
SUPPRESS_LOGS: false,
|
||||
API_VERSION: 'test-version',
|
||||
NODEMON_CONFIG_PATH: configPath,
|
||||
NODEMON_LOCK_PATH: join(workdir, 'nodemon.lock'),
|
||||
NODEMON_PATH: nodemonPath,
|
||||
NODEMON_PID_PATH: pidPath,
|
||||
PATHS_LOGS_DIR: workdir,
|
||||
PATHS_LOGS_FILE: appLogPath,
|
||||
PATHS_NODEMON_LOG_FILE: nodemonLogPath,
|
||||
UNRAID_API_CWD: workdir,
|
||||
UNRAID_API_SERVER_ENTRYPOINT: join(workdir, 'app.js'),
|
||||
}));
|
||||
|
||||
const { NodemonService } = await import('./nodemon.service.js');
|
||||
const service = new NodemonService(logger);
|
||||
|
||||
await service.start();
|
||||
|
||||
const pidText = (await readFile(pidPath, 'utf-8')).trim();
|
||||
const pid = Number.parseInt(pidText, 10);
|
||||
expect(Number.isInteger(pid) && pid > 0).toBe(true);
|
||||
|
||||
const nodemonLogStats = await stat(nodemonLogPath);
|
||||
expect(nodemonLogStats.isFile()).toBe(true);
|
||||
await waitForLogEntry(nodemonLogPath, 'Starting nodemon');
|
||||
await waitForLogEntry(appLogPath, 'app-log-entry');
|
||||
|
||||
await service.stop();
|
||||
await waitForExit(pid);
|
||||
await expect(stat(pidPath)).rejects.toThrow();
|
||||
}, 20_000);
|
||||
});
|
||||
|
||||
async function waitForLogEntry(path: string, needle: string, timeoutMs = 5000) {
|
||||
const deadline = Date.now() + timeoutMs;
|
||||
|
||||
while (true) {
|
||||
try {
|
||||
const contents = await readFile(path, 'utf-8');
|
||||
if (contents.includes(needle)) return contents;
|
||||
} catch {
|
||||
// ignore until timeout
|
||||
}
|
||||
|
||||
if (Date.now() > deadline) {
|
||||
throw new Error(`Log entry "${needle}" not found in ${path} within ${timeoutMs}ms`);
|
||||
}
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
}
|
||||
}
|
||||
|
||||
async function waitForExit(pid: number, timeoutMs = 5000) {
|
||||
const deadline = Date.now() + timeoutMs;
|
||||
|
||||
while (true) {
|
||||
try {
|
||||
process.kill(pid, 0);
|
||||
} catch {
|
||||
return;
|
||||
}
|
||||
if (Date.now() > deadline) {
|
||||
throw new Error(`Process ${pid} did not exit within ${timeoutMs}ms`);
|
||||
}
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
}
|
||||
}
|
||||
569
api/src/unraid-api/cli/nodemon.service.spec.ts
Normal file
569
api/src/unraid-api/cli/nodemon.service.spec.ts
Normal file
@@ -0,0 +1,569 @@
|
||||
import { spawn } from 'node:child_process';
|
||||
import { createWriteStream, openSync } from 'node:fs';
|
||||
import * as fs from 'node:fs/promises';
|
||||
|
||||
import { execa } from 'execa';
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { fileExists, fileExistsSync } from '@app/core/utils/files/file-exists.js';
|
||||
import { NodemonService } from '@app/unraid-api/cli/nodemon.service.js';
|
||||
|
||||
const createLogStreamMock = (fd = 42, autoOpen = true) => {
|
||||
const listeners: Record<string, Array<(...args: any[]) => void>> = {};
|
||||
const stream: any = {
|
||||
fd,
|
||||
close: vi.fn(),
|
||||
destroy: vi.fn(),
|
||||
write: vi.fn(),
|
||||
once: vi.fn(),
|
||||
off: vi.fn(),
|
||||
};
|
||||
|
||||
stream.once.mockImplementation((event: string, cb: (...args: any[]) => void) => {
|
||||
listeners[event] = listeners[event] ?? [];
|
||||
listeners[event].push(cb);
|
||||
if (event === 'open' && autoOpen) cb();
|
||||
return stream;
|
||||
});
|
||||
stream.off.mockImplementation((event: string, cb: (...args: any[]) => void) => {
|
||||
listeners[event] = (listeners[event] ?? []).filter((fn) => fn !== cb);
|
||||
return stream;
|
||||
});
|
||||
stream.emit = (event: string, ...args: any[]) => {
|
||||
(listeners[event] ?? []).forEach((fn) => fn(...args));
|
||||
};
|
||||
|
||||
return stream as ReturnType<typeof createWriteStream> & {
|
||||
emit: (event: string, ...args: any[]) => void;
|
||||
};
|
||||
};
|
||||
|
||||
const createSpawnMock = (pid?: number) => {
|
||||
const unref = vi.fn();
|
||||
return {
|
||||
pid,
|
||||
unref,
|
||||
} as unknown as ReturnType<typeof spawn>;
|
||||
};
|
||||
|
||||
vi.mock('node:child_process', () => ({
|
||||
spawn: vi.fn(),
|
||||
}));
|
||||
vi.mock('node:fs', () => ({
|
||||
createWriteStream: vi.fn(),
|
||||
openSync: vi.fn().mockReturnValue(42),
|
||||
writeSync: vi.fn(),
|
||||
}));
|
||||
vi.mock('node:fs/promises', async (importOriginal) => {
|
||||
const actual = await importOriginal<typeof fs>();
|
||||
return {
|
||||
...actual,
|
||||
mkdir: vi.fn(),
|
||||
writeFile: vi.fn(),
|
||||
rm: vi.fn(),
|
||||
readFile: vi.fn(),
|
||||
appendFile: vi.fn(),
|
||||
};
|
||||
});
|
||||
vi.mock('execa', () => ({ execa: vi.fn() }));
|
||||
vi.mock('proper-lockfile', () => ({
|
||||
lock: vi.fn().mockResolvedValue(vi.fn().mockResolvedValue(undefined)),
|
||||
}));
|
||||
vi.mock('@app/core/utils/files/file-exists.js', () => ({
|
||||
fileExists: vi.fn().mockResolvedValue(false),
|
||||
fileExistsSync: vi.fn().mockReturnValue(true),
|
||||
}));
|
||||
vi.mock('@app/environment.js', () => ({
|
||||
LOG_LEVEL: 'INFO',
|
||||
SUPPRESS_LOGS: false,
|
||||
NODEMON_CONFIG_PATH: '/etc/unraid-api/nodemon.json',
|
||||
NODEMON_LOCK_PATH: '/var/run/unraid-api/nodemon.lock',
|
||||
NODEMON_PATH: '/usr/bin/nodemon',
|
||||
NODEMON_PID_PATH: '/var/run/unraid-api/nodemon.pid',
|
||||
PATHS_LOGS_DIR: '/var/log/unraid-api',
|
||||
PATHS_LOGS_FILE: '/var/log/graphql-api.log',
|
||||
PATHS_NODEMON_LOG_FILE: '/var/log/unraid-api/nodemon.log',
|
||||
UNRAID_API_CWD: '/usr/local/unraid-api',
|
||||
UNRAID_API_SERVER_ENTRYPOINT: '/usr/local/unraid-api/dist/main.js',
|
||||
}));
|
||||
|
||||
describe('NodemonService', () => {
|
||||
const logger = {
|
||||
trace: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
log: vi.fn(),
|
||||
info: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
} as unknown as NodemonService['logger'];
|
||||
|
||||
const mockMkdir = vi.mocked(fs.mkdir);
|
||||
const mockWriteFile = vi.mocked(fs.writeFile);
|
||||
const mockRm = vi.mocked(fs.rm);
|
||||
const killSpy = vi.spyOn(process, 'kill');
|
||||
const stopPm2Spy = vi.spyOn(
|
||||
NodemonService.prototype as unknown as { stopPm2IfRunning: () => Promise<void> },
|
||||
'stopPm2IfRunning'
|
||||
);
|
||||
const findMatchingSpy = vi.spyOn(
|
||||
NodemonService.prototype as unknown as { findMatchingNodemonPids: () => Promise<number[]> },
|
||||
'findMatchingNodemonPids'
|
||||
);
|
||||
const findDirectMainSpy = vi.spyOn(
|
||||
NodemonService.prototype as unknown as { findDirectMainPids: () => Promise<number[]> },
|
||||
'findDirectMainPids'
|
||||
);
|
||||
const terminateSpy = vi.spyOn(
|
||||
NodemonService.prototype as unknown as { terminatePids: (pids: number[]) => Promise<void> },
|
||||
'terminatePids'
|
||||
);
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
vi.mocked(createWriteStream).mockImplementation(() => createLogStreamMock());
|
||||
vi.mocked(openSync).mockReturnValue(42);
|
||||
vi.mocked(spawn).mockReturnValue(createSpawnMock(123));
|
||||
mockMkdir.mockResolvedValue(undefined);
|
||||
mockWriteFile.mockResolvedValue(undefined as unknown as void);
|
||||
mockRm.mockResolvedValue(undefined as unknown as void);
|
||||
vi.mocked(fileExists).mockResolvedValue(false);
|
||||
vi.mocked(fileExistsSync).mockReturnValue(true);
|
||||
killSpy.mockReturnValue(true);
|
||||
findMatchingSpy.mockResolvedValue([]);
|
||||
findDirectMainSpy.mockResolvedValue([]);
|
||||
terminateSpy.mockResolvedValue();
|
||||
stopPm2Spy.mockResolvedValue();
|
||||
});
|
||||
|
||||
it('ensures directories needed by nodemon exist', async () => {
|
||||
const service = new NodemonService(logger);
|
||||
|
||||
await service.ensureNodemonDependencies();
|
||||
|
||||
expect(mockMkdir).toHaveBeenCalledWith('/var/log/unraid-api', { recursive: true });
|
||||
expect(mockMkdir).toHaveBeenCalledWith('/var/log', { recursive: true });
|
||||
expect(mockMkdir).toHaveBeenCalledWith('/var/run/unraid-api', { recursive: true });
|
||||
});
|
||||
|
||||
it('throws error when directory creation fails', async () => {
|
||||
const service = new NodemonService(logger);
|
||||
const error = new Error('Permission denied');
|
||||
mockMkdir.mockRejectedValue(error);
|
||||
|
||||
await expect(service.ensureNodemonDependencies()).rejects.toThrow('Permission denied');
|
||||
expect(mockMkdir).toHaveBeenCalledWith('/var/log/unraid-api', { recursive: true });
|
||||
});
|
||||
|
||||
it('starts nodemon and writes pid file', async () => {
|
||||
const service = new NodemonService(logger);
|
||||
const spawnMock = createSpawnMock(123);
|
||||
vi.mocked(spawn).mockReturnValue(spawnMock);
|
||||
killSpy.mockReturnValue(true);
|
||||
findMatchingSpy.mockResolvedValue([]);
|
||||
|
||||
await service.start({ env: { LOG_LEVEL: 'DEBUG' } });
|
||||
|
||||
expect(stopPm2Spy).toHaveBeenCalled();
|
||||
expect(spawn).toHaveBeenCalledWith(
|
||||
process.execPath,
|
||||
['/usr/bin/nodemon', '--config', '/etc/unraid-api/nodemon.json', '--quiet'],
|
||||
{
|
||||
cwd: '/usr/local/unraid-api',
|
||||
env: expect.objectContaining({ LOG_LEVEL: 'DEBUG' }),
|
||||
detached: true,
|
||||
stdio: ['ignore', 42, 42],
|
||||
}
|
||||
);
|
||||
expect(openSync).toHaveBeenCalledWith('/var/log/unraid-api/nodemon.log', 'a');
|
||||
expect(spawnMock.unref).toHaveBeenCalled();
|
||||
expect(mockWriteFile).toHaveBeenCalledWith('/var/run/unraid-api/nodemon.pid', '123');
|
||||
expect(logger.info).toHaveBeenCalledWith('Started nodemon (pid 123)');
|
||||
});
|
||||
|
||||
it('throws error and aborts start when directory creation fails', async () => {
|
||||
const service = new NodemonService(logger);
|
||||
const error = new Error('Permission denied');
|
||||
mockMkdir.mockRejectedValue(error);
|
||||
|
||||
await expect(service.start()).rejects.toThrow('Permission denied');
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
'Failed to ensure nodemon dependencies: Permission denied'
|
||||
);
|
||||
expect(spawn).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('throws error when spawn fails', async () => {
|
||||
const service = new NodemonService(logger);
|
||||
const error = new Error('Command not found');
|
||||
vi.mocked(spawn).mockImplementation(() => {
|
||||
throw error;
|
||||
});
|
||||
|
||||
await expect(service.start()).rejects.toThrow('Failed to start nodemon: Command not found');
|
||||
expect(mockWriteFile).not.toHaveBeenCalledWith(
|
||||
'/var/run/unraid-api/nodemon.pid',
|
||||
expect.anything()
|
||||
);
|
||||
expect(logger.info).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('throws a clear error when the log file cannot be opened', async () => {
|
||||
const service = new NodemonService(logger);
|
||||
const openError = new Error('EACCES: permission denied');
|
||||
vi.mocked(openSync).mockImplementation(() => {
|
||||
throw openError;
|
||||
});
|
||||
|
||||
await expect(service.start()).rejects.toThrow(
|
||||
'Failed to start nodemon: EACCES: permission denied'
|
||||
);
|
||||
expect(spawn).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('throws error when pid is missing', async () => {
|
||||
const service = new NodemonService(logger);
|
||||
const spawnMock = createSpawnMock(undefined);
|
||||
vi.mocked(spawn).mockReturnValue(spawnMock);
|
||||
|
||||
await expect(service.start()).rejects.toThrow(
|
||||
'Failed to start nodemon: process spawned but no PID was assigned'
|
||||
);
|
||||
expect(mockWriteFile).not.toHaveBeenCalledWith(
|
||||
'/var/run/unraid-api/nodemon.pid',
|
||||
expect.anything()
|
||||
);
|
||||
expect(logger.info).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('throws when nodemon exits immediately after start', async () => {
|
||||
const service = new NodemonService(logger);
|
||||
const spawnMock = createSpawnMock(456);
|
||||
vi.mocked(spawn).mockReturnValue(spawnMock);
|
||||
killSpy.mockImplementation(() => {
|
||||
throw new Error('not running');
|
||||
});
|
||||
const logsSpy = vi.spyOn(service, 'logs').mockResolvedValue('recent log lines');
|
||||
|
||||
await expect(service.start()).rejects.toThrow(/Nodemon exited immediately/);
|
||||
expect(mockRm).toHaveBeenCalledWith('/var/run/unraid-api/nodemon.pid', { force: true });
|
||||
expect(logsSpy).toHaveBeenCalledWith(50);
|
||||
});
|
||||
|
||||
it('restarts when a recorded nodemon pid is already running', async () => {
|
||||
const service = new NodemonService(logger);
|
||||
const stopSpy = vi.spyOn(service, 'stop').mockResolvedValue();
|
||||
vi.spyOn(
|
||||
service as unknown as { waitForNodemonExit: () => Promise<void> },
|
||||
'waitForNodemonExit'
|
||||
).mockResolvedValue();
|
||||
vi.spyOn(
|
||||
service as unknown as { getStoredPid: () => Promise<number | null> },
|
||||
'getStoredPid'
|
||||
).mockResolvedValue(999);
|
||||
vi.spyOn(
|
||||
service as unknown as { isPidRunning: (pid: number) => Promise<boolean> },
|
||||
'isPidRunning'
|
||||
).mockResolvedValue(true);
|
||||
|
||||
const spawnMock = createSpawnMock(456);
|
||||
vi.mocked(spawn).mockReturnValue(spawnMock);
|
||||
|
||||
await service.start();
|
||||
|
||||
expect(stopSpy).toHaveBeenCalledWith({ quiet: true });
|
||||
expect(mockRm).toHaveBeenCalledWith('/var/run/unraid-api/nodemon.pid', { force: true });
|
||||
expect(spawn).toHaveBeenCalled();
|
||||
expect(logger.info).toHaveBeenCalledWith(
|
||||
'unraid-api already running under nodemon (pid 999); restarting for a fresh start.'
|
||||
);
|
||||
});
|
||||
|
||||
it('removes stale pid file and starts when recorded pid is dead', async () => {
|
||||
const service = new NodemonService(logger);
|
||||
const spawnMock = createSpawnMock(111);
|
||||
vi.mocked(spawn).mockReturnValue(spawnMock);
|
||||
vi.spyOn(
|
||||
service as unknown as { getStoredPid: () => Promise<number | null> },
|
||||
'getStoredPid'
|
||||
).mockResolvedValue(555);
|
||||
vi.spyOn(
|
||||
service as unknown as { isPidRunning: (pid: number) => Promise<boolean> },
|
||||
'isPidRunning'
|
||||
)
|
||||
.mockResolvedValueOnce(false)
|
||||
.mockResolvedValue(true);
|
||||
vi.spyOn(service, 'logs').mockResolvedValue('recent log lines');
|
||||
findMatchingSpy.mockResolvedValue([]);
|
||||
|
||||
await service.start();
|
||||
|
||||
expect(mockRm).toHaveBeenCalledWith('/var/run/unraid-api/nodemon.pid', { force: true });
|
||||
expect(spawn).toHaveBeenCalled();
|
||||
expect(mockWriteFile).toHaveBeenCalledWith('/var/run/unraid-api/nodemon.pid', '111');
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
'Found nodemon pid file (555) but the process is not running. Cleaning up.'
|
||||
);
|
||||
});
|
||||
|
||||
it('cleans up stray nodemon when no pid file exists', async () => {
|
||||
const service = new NodemonService(logger);
|
||||
findMatchingSpy.mockResolvedValue([888]);
|
||||
vi.spyOn(
|
||||
service as unknown as { isPidRunning: (pid: number) => Promise<boolean> },
|
||||
'isPidRunning'
|
||||
).mockResolvedValue(true);
|
||||
vi.spyOn(
|
||||
service as unknown as { waitForNodemonExit: () => Promise<void> },
|
||||
'waitForNodemonExit'
|
||||
).mockResolvedValue();
|
||||
|
||||
const spawnMock = createSpawnMock(222);
|
||||
vi.mocked(spawn).mockReturnValue(spawnMock);
|
||||
|
||||
await service.start();
|
||||
|
||||
expect(terminateSpy).toHaveBeenCalledWith([888]);
|
||||
expect(spawn).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('terminates direct main.js processes before starting nodemon', async () => {
|
||||
const service = new NodemonService(logger);
|
||||
findMatchingSpy.mockResolvedValue([]);
|
||||
findDirectMainSpy.mockResolvedValue([321, 654]);
|
||||
|
||||
const spawnMock = createSpawnMock(777);
|
||||
vi.mocked(spawn).mockReturnValue(spawnMock);
|
||||
|
||||
await service.start();
|
||||
|
||||
expect(terminateSpy).toHaveBeenCalledWith([321, 654]);
|
||||
expect(spawn).toHaveBeenCalledWith(
|
||||
process.execPath,
|
||||
['/usr/bin/nodemon', '--config', '/etc/unraid-api/nodemon.json', '--quiet'],
|
||||
expect.objectContaining({ cwd: '/usr/local/unraid-api' })
|
||||
);
|
||||
});
|
||||
|
||||
it('returns not running when pid file is missing and no orphans', async () => {
|
||||
const service = new NodemonService(logger);
|
||||
vi.mocked(fileExists).mockResolvedValue(false);
|
||||
findMatchingSpy.mockResolvedValue([]);
|
||||
findDirectMainSpy.mockResolvedValue([]);
|
||||
|
||||
const result = await service.status();
|
||||
|
||||
expect(result).toBe(false);
|
||||
expect(logger.info).toHaveBeenCalledWith('unraid-api is not running (no pid file).');
|
||||
});
|
||||
|
||||
it('returns running and warns when orphan processes found without pid file', async () => {
|
||||
const service = new NodemonService(logger);
|
||||
vi.mocked(fileExists).mockResolvedValue(false);
|
||||
findMatchingSpy.mockResolvedValue([]);
|
||||
findDirectMainSpy.mockResolvedValue([123, 456]);
|
||||
|
||||
const result = await service.status();
|
||||
|
||||
expect(result).toBe(true);
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
'No PID file, but found orphaned processes: nodemon=none, main.js=123,456'
|
||||
);
|
||||
});
|
||||
|
||||
it('returns running and warns when orphan nodemon found without pid file', async () => {
|
||||
const service = new NodemonService(logger);
|
||||
vi.mocked(fileExists).mockResolvedValue(false);
|
||||
findMatchingSpy.mockResolvedValue([789]);
|
||||
findDirectMainSpy.mockResolvedValue([]);
|
||||
|
||||
const result = await service.status();
|
||||
|
||||
expect(result).toBe(true);
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
'No PID file, but found orphaned processes: nodemon=789, main.js=none'
|
||||
);
|
||||
});
|
||||
|
||||
it('stop: sends SIGTERM to nodemon and waits for exit', async () => {
|
||||
const service = new NodemonService(logger);
|
||||
vi.mocked(fileExists).mockResolvedValue(true);
|
||||
vi.mocked(fs.readFile).mockResolvedValue('100');
|
||||
findDirectMainSpy.mockResolvedValue([200]);
|
||||
const waitForPidsToExitSpy = vi
|
||||
.spyOn(
|
||||
service as unknown as {
|
||||
waitForPidsToExit: (pids: number[], timeoutMs?: number) => Promise<number[]>;
|
||||
},
|
||||
'waitForPidsToExit'
|
||||
)
|
||||
.mockResolvedValue([]);
|
||||
|
||||
await service.stop();
|
||||
|
||||
expect(killSpy).toHaveBeenCalledWith(100, 'SIGTERM');
|
||||
expect(waitForPidsToExitSpy).toHaveBeenCalledWith([100, 200], 5000);
|
||||
expect(mockRm).toHaveBeenCalledWith('/var/run/unraid-api/nodemon.pid', { force: true });
|
||||
});
|
||||
|
||||
it('stop: force kills remaining processes after timeout', async () => {
|
||||
const service = new NodemonService(logger);
|
||||
vi.mocked(fileExists).mockResolvedValue(true);
|
||||
vi.mocked(fs.readFile).mockResolvedValue('100');
|
||||
findDirectMainSpy.mockResolvedValue([200]);
|
||||
vi.spyOn(
|
||||
service as unknown as {
|
||||
waitForPidsToExit: (pids: number[], timeoutMs?: number) => Promise<number[]>;
|
||||
},
|
||||
'waitForPidsToExit'
|
||||
).mockResolvedValue([100, 200]);
|
||||
const terminatePidsWithForceSpy = vi
|
||||
.spyOn(
|
||||
service as unknown as {
|
||||
terminatePidsWithForce: (pids: number[], gracePeriodMs?: number) => Promise<void>;
|
||||
},
|
||||
'terminatePidsWithForce'
|
||||
)
|
||||
.mockResolvedValue();
|
||||
|
||||
await service.stop();
|
||||
|
||||
expect(logger.warn).toHaveBeenCalledWith('Force killing remaining processes: 100, 200');
|
||||
expect(terminatePidsWithForceSpy).toHaveBeenCalledWith([100, 200]);
|
||||
});
|
||||
|
||||
it('stop: cleans up orphaned main.js when no pid file exists', async () => {
|
||||
const service = new NodemonService(logger);
|
||||
vi.mocked(fileExists).mockResolvedValue(false);
|
||||
findDirectMainSpy.mockResolvedValue([300, 400]);
|
||||
const terminatePidsWithForceSpy = vi
|
||||
.spyOn(
|
||||
service as unknown as {
|
||||
terminatePidsWithForce: (pids: number[], gracePeriodMs?: number) => Promise<void>;
|
||||
},
|
||||
'terminatePidsWithForce'
|
||||
)
|
||||
.mockResolvedValue();
|
||||
|
||||
await service.stop();
|
||||
|
||||
expect(logger.warn).toHaveBeenCalledWith('No nodemon pid file found.');
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
'Found orphaned main.js processes: 300, 400. Terminating.'
|
||||
);
|
||||
expect(terminatePidsWithForceSpy).toHaveBeenCalledWith([300, 400]);
|
||||
});
|
||||
|
||||
it('stop --force: skips graceful wait', async () => {
|
||||
const service = new NodemonService(logger);
|
||||
vi.mocked(fileExists).mockResolvedValue(true);
|
||||
vi.mocked(fs.readFile).mockResolvedValue('100');
|
||||
findDirectMainSpy.mockResolvedValue([]);
|
||||
const waitForPidsToExitSpy = vi
|
||||
.spyOn(
|
||||
service as unknown as {
|
||||
waitForPidsToExit: (pids: number[], timeoutMs?: number) => Promise<number[]>;
|
||||
},
|
||||
'waitForPidsToExit'
|
||||
)
|
||||
.mockResolvedValue([100]);
|
||||
vi.spyOn(
|
||||
service as unknown as {
|
||||
terminatePidsWithForce: (pids: number[], gracePeriodMs?: number) => Promise<void>;
|
||||
},
|
||||
'terminatePidsWithForce'
|
||||
).mockResolvedValue();
|
||||
|
||||
await service.stop({ force: true });
|
||||
|
||||
expect(waitForPidsToExitSpy).toHaveBeenCalledWith([100], 0);
|
||||
});
|
||||
|
||||
it('logs stdout when tail succeeds', async () => {
|
||||
const service = new NodemonService(logger);
|
||||
vi.mocked(execa).mockResolvedValue({
|
||||
stdout: 'log line 1\nlog line 2',
|
||||
} as unknown as Awaited<ReturnType<typeof execa>>);
|
||||
|
||||
const result = await service.logs(50);
|
||||
|
||||
expect(execa).toHaveBeenCalledWith('tail', ['-n', '50', '/var/log/graphql-api.log']);
|
||||
expect(logger.log).toHaveBeenCalledWith('log line 1\nlog line 2');
|
||||
expect(result).toBe('log line 1\nlog line 2');
|
||||
});
|
||||
|
||||
it('handles ENOENT error when log file is missing', async () => {
|
||||
const service = new NodemonService(logger);
|
||||
const error = new Error('ENOENT: no such file or directory');
|
||||
(error as Error & { code?: string }).code = 'ENOENT';
|
||||
vi.mocked(execa).mockRejectedValue(error);
|
||||
|
||||
const result = await service.logs();
|
||||
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
'Log file not found: /var/log/graphql-api.log (ENOENT: no such file or directory)'
|
||||
);
|
||||
expect(result).toBe('');
|
||||
});
|
||||
|
||||
it('handles non-zero exit error from tail', async () => {
|
||||
const service = new NodemonService(logger);
|
||||
const error = new Error('Command failed with exit code 1');
|
||||
vi.mocked(execa).mockRejectedValue(error);
|
||||
|
||||
const result = await service.logs(100);
|
||||
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
'Failed to read logs from /var/log/graphql-api.log: Command failed with exit code 1'
|
||||
);
|
||||
expect(result).toBe('');
|
||||
});
|
||||
|
||||
it('waits for nodemon to exit during restart before starting again', async () => {
|
||||
const service = new NodemonService(logger);
|
||||
const stopSpy = vi.spyOn(service, 'stop').mockResolvedValue();
|
||||
const waitSpy = vi
|
||||
.spyOn(
|
||||
service as unknown as { waitForNodemonExit: () => Promise<void> },
|
||||
'waitForNodemonExit'
|
||||
)
|
||||
.mockResolvedValue();
|
||||
vi.spyOn(
|
||||
service as unknown as { getStoredPid: () => Promise<number | null> },
|
||||
'getStoredPid'
|
||||
).mockResolvedValue(123);
|
||||
vi.spyOn(
|
||||
service as unknown as { isPidRunning: (pid: number) => Promise<boolean> },
|
||||
'isPidRunning'
|
||||
).mockResolvedValue(true);
|
||||
const spawnMock = createSpawnMock(456);
|
||||
vi.mocked(spawn).mockReturnValue(spawnMock);
|
||||
|
||||
await service.restart({ env: { LOG_LEVEL: 'DEBUG' } });
|
||||
|
||||
expect(stopSpy).toHaveBeenCalledWith({ quiet: true });
|
||||
expect(waitSpy).toHaveBeenCalled();
|
||||
expect(spawn).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('performs clean start on restart when nodemon is not running', async () => {
|
||||
const service = new NodemonService(logger);
|
||||
const stopSpy = vi.spyOn(service, 'stop').mockResolvedValue();
|
||||
const startSpy = vi.spyOn(service, 'start').mockResolvedValue();
|
||||
const waitSpy = vi
|
||||
.spyOn(
|
||||
service as unknown as { waitForNodemonExit: () => Promise<void> },
|
||||
'waitForNodemonExit'
|
||||
)
|
||||
.mockResolvedValue();
|
||||
vi.spyOn(
|
||||
service as unknown as { getStoredPid: () => Promise<number | null> },
|
||||
'getStoredPid'
|
||||
).mockResolvedValue(null);
|
||||
|
||||
await service.restart();
|
||||
|
||||
expect(stopSpy).not.toHaveBeenCalled();
|
||||
expect(waitSpy).not.toHaveBeenCalled();
|
||||
expect(startSpy).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
534
api/src/unraid-api/cli/nodemon.service.ts
Normal file
534
api/src/unraid-api/cli/nodemon.service.ts
Normal file
@@ -0,0 +1,534 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { spawn } from 'node:child_process';
|
||||
import { openSync, writeSync } from 'node:fs';
|
||||
import { appendFile, mkdir, readFile, rm, writeFile } from 'node:fs/promises';
|
||||
import { dirname } from 'node:path';
|
||||
|
||||
import { execa } from 'execa';
|
||||
import { lock } from 'proper-lockfile';
|
||||
|
||||
import { fileExists, fileExistsSync } from '@app/core/utils/files/file-exists.js';
|
||||
import {
|
||||
NODEMON_CONFIG_PATH,
|
||||
NODEMON_LOCK_PATH,
|
||||
NODEMON_PATH,
|
||||
NODEMON_PID_PATH,
|
||||
PATHS_LOGS_DIR,
|
||||
PATHS_LOGS_FILE,
|
||||
PATHS_NODEMON_LOG_FILE,
|
||||
UNRAID_API_CWD,
|
||||
UNRAID_API_SERVER_ENTRYPOINT,
|
||||
} from '@app/environment.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
|
||||
const LOCK_TIMEOUT_SECONDS = 30;
|
||||
|
||||
type StartOptions = {
|
||||
env?: Record<string, string | undefined>;
|
||||
};
|
||||
|
||||
type StopOptions = {
|
||||
/** When true, uses SIGKILL instead of SIGTERM */
|
||||
force?: boolean;
|
||||
/** Suppress warnings when there is no pid file */
|
||||
quiet?: boolean;
|
||||
};
|
||||
|
||||
const BOOT_LOG_PATH = '/var/log/unraid-api/boot.log';
|
||||
|
||||
@Injectable()
|
||||
export class NodemonService {
|
||||
constructor(private readonly logger: LogService) {}
|
||||
|
||||
private async logToBootFile(message: string): Promise<void> {
|
||||
const timestamp = new Date().toISOString();
|
||||
const line = `[${timestamp}] [nodemon-service] ${message}\n`;
|
||||
try {
|
||||
await appendFile(BOOT_LOG_PATH, line);
|
||||
} catch {
|
||||
// Fallback to console if file write fails (e.g., directory doesn't exist yet)
|
||||
}
|
||||
}
|
||||
|
||||
private validatePaths(): { valid: boolean; errors: string[] } {
|
||||
const errors: string[] = [];
|
||||
|
||||
if (!fileExistsSync(NODEMON_PATH)) {
|
||||
errors.push(`NODEMON_PATH does not exist: ${NODEMON_PATH}`);
|
||||
}
|
||||
if (!fileExistsSync(NODEMON_CONFIG_PATH)) {
|
||||
errors.push(`NODEMON_CONFIG_PATH does not exist: ${NODEMON_CONFIG_PATH}`);
|
||||
}
|
||||
if (!fileExistsSync(UNRAID_API_CWD)) {
|
||||
errors.push(`UNRAID_API_CWD does not exist: ${UNRAID_API_CWD}`);
|
||||
}
|
||||
|
||||
return { valid: errors.length === 0, errors };
|
||||
}
|
||||
|
||||
async ensureNodemonDependencies() {
|
||||
await mkdir(PATHS_LOGS_DIR, { recursive: true });
|
||||
await mkdir(dirname(PATHS_LOGS_FILE), { recursive: true });
|
||||
await mkdir(dirname(PATHS_NODEMON_LOG_FILE), { recursive: true });
|
||||
await mkdir(dirname(NODEMON_PID_PATH), { recursive: true });
|
||||
await mkdir(dirname(NODEMON_LOCK_PATH), { recursive: true });
|
||||
await writeFile(NODEMON_LOCK_PATH, '', { flag: 'a' });
|
||||
}
|
||||
|
||||
private async withLock<T>(fn: () => Promise<T>): Promise<T> {
|
||||
let release: (() => Promise<void>) | null = null;
|
||||
try {
|
||||
release = await lock(NODEMON_LOCK_PATH, {
|
||||
stale: LOCK_TIMEOUT_SECONDS * 1000,
|
||||
retries: {
|
||||
retries: Math.floor(LOCK_TIMEOUT_SECONDS * 10),
|
||||
factor: 1,
|
||||
minTimeout: 100,
|
||||
maxTimeout: 100,
|
||||
},
|
||||
});
|
||||
return await fn();
|
||||
} finally {
|
||||
if (release) {
|
||||
await release().catch(() => {});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async stopPm2IfRunning() {
|
||||
const pm2PidPath = '/var/log/.pm2/pm2.pid';
|
||||
if (!(await fileExists(pm2PidPath))) return;
|
||||
|
||||
const pm2Candidates = ['/usr/bin/pm2', '/usr/local/bin/pm2'];
|
||||
const pm2Path =
|
||||
(
|
||||
await Promise.all(
|
||||
pm2Candidates.map(async (candidate) =>
|
||||
(await fileExists(candidate)) ? candidate : null
|
||||
)
|
||||
)
|
||||
).find(Boolean) ?? null;
|
||||
|
||||
if (pm2Path) {
|
||||
try {
|
||||
const { stdout } = await execa(pm2Path, ['jlist']);
|
||||
const processes = JSON.parse(stdout);
|
||||
const hasUnraid =
|
||||
Array.isArray(processes) && processes.some((proc) => proc?.name === 'unraid-api');
|
||||
if (hasUnraid) {
|
||||
await execa(pm2Path, ['delete', 'unraid-api']);
|
||||
this.logger.info('Stopped pm2-managed unraid-api before starting nodemon.');
|
||||
}
|
||||
} catch (error) {
|
||||
// PM2 may not be installed or responding; keep this quiet to avoid noisy startup.
|
||||
this.logger.debug?.('Skipping pm2 cleanup (not installed or not running).');
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: directly kill the pm2 daemon and remove its state, even if pm2 binary is missing.
|
||||
try {
|
||||
const pidText = (await readFile(pm2PidPath, 'utf-8')).trim();
|
||||
const pid = Number.parseInt(pidText, 10);
|
||||
if (!Number.isNaN(pid)) {
|
||||
process.kill(pid, 'SIGTERM');
|
||||
this.logger.debug?.(`Sent SIGTERM to pm2 daemon (pid ${pid}).`);
|
||||
}
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
try {
|
||||
await rm('/var/log/.pm2', { recursive: true, force: true });
|
||||
} catch {
|
||||
// Ignore errors when removing pm2 state - shouldn't block API startup
|
||||
}
|
||||
}
|
||||
|
||||
private async getStoredPid(): Promise<number | null> {
|
||||
if (!(await fileExists(NODEMON_PID_PATH))) return null;
|
||||
const contents = (await readFile(NODEMON_PID_PATH, 'utf-8')).trim();
|
||||
const pid = Number.parseInt(contents, 10);
|
||||
return Number.isNaN(pid) ? null : pid;
|
||||
}
|
||||
|
||||
private async isPidRunning(pid: number): Promise<boolean> {
|
||||
try {
|
||||
process.kill(pid, 0);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private async findMatchingNodemonPids(): Promise<number[]> {
|
||||
try {
|
||||
const { stdout } = await execa('ps', ['-eo', 'pid,args']);
|
||||
return stdout
|
||||
.split('\n')
|
||||
.map((line) => line.trim())
|
||||
.map((line) => line.match(/^(\d+)\s+(.*)$/))
|
||||
.filter((match): match is RegExpMatchArray => Boolean(match))
|
||||
.map(([, pid, cmd]) => ({ pid: Number.parseInt(pid, 10), cmd }))
|
||||
.filter(({ cmd }) => cmd.includes('nodemon') && cmd.includes(NODEMON_CONFIG_PATH))
|
||||
.map(({ pid }) => pid)
|
||||
.filter((pid) => Number.isInteger(pid));
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
private async findDirectMainPids(): Promise<number[]> {
|
||||
try {
|
||||
const { stdout } = await execa('ps', ['-eo', 'pid,args']);
|
||||
return stdout
|
||||
.split('\n')
|
||||
.map((line) => line.trim())
|
||||
.map((line) => line.match(/^(\d+)\s+(.*)$/))
|
||||
.filter((match): match is RegExpMatchArray => Boolean(match))
|
||||
.map(([, pid, cmd]) => ({ pid: Number.parseInt(pid, 10), cmd }))
|
||||
.filter(({ cmd }) => cmd.includes(UNRAID_API_SERVER_ENTRYPOINT))
|
||||
.map(({ pid }) => pid)
|
||||
.filter((pid) => Number.isInteger(pid));
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
private async terminatePids(pids: number[]) {
|
||||
for (const pid of pids) {
|
||||
try {
|
||||
process.kill(pid, 'SIGTERM');
|
||||
this.logger.debug?.(`Sent SIGTERM to existing unraid-api process (pid ${pid}).`);
|
||||
} catch (error) {
|
||||
this.logger.debug?.(
|
||||
`Failed to send SIGTERM to pid ${pid}: ${error instanceof Error ? error.message : error}`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async waitForNodemonExit(timeoutMs = 5000, pollIntervalMs = 100) {
|
||||
const deadline = Date.now() + timeoutMs;
|
||||
|
||||
// Poll for any remaining nodemon processes that match our config file
|
||||
while (Date.now() < deadline) {
|
||||
const pids = await this.findMatchingNodemonPids();
|
||||
if (pids.length === 0) return;
|
||||
|
||||
const runningFlags = await Promise.all(pids.map((pid) => this.isPidRunning(pid)));
|
||||
if (!runningFlags.some(Boolean)) return;
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, pollIntervalMs));
|
||||
}
|
||||
|
||||
this.logger.debug?.('Timed out waiting for nodemon to exit; continuing restart anyway.');
|
||||
}
|
||||
|
||||
/**
|
||||
* Wait for processes to exit, returns array of PIDs that didn't exit in time
|
||||
*/
|
||||
private async waitForPidsToExit(pids: number[], timeoutMs = 5000): Promise<number[]> {
|
||||
if (timeoutMs <= 0) return pids.filter((pid) => pid > 0);
|
||||
|
||||
const deadline = Date.now() + timeoutMs;
|
||||
const remaining = new Set(pids.filter((pid) => pid > 0));
|
||||
|
||||
while (remaining.size > 0 && Date.now() < deadline) {
|
||||
for (const pid of remaining) {
|
||||
if (!(await this.isPidRunning(pid))) {
|
||||
remaining.delete(pid);
|
||||
}
|
||||
}
|
||||
if (remaining.size > 0) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
}
|
||||
}
|
||||
|
||||
return [...remaining];
|
||||
}
|
||||
|
||||
/**
|
||||
* Terminate PIDs with SIGTERM, then SIGKILL after timeout
|
||||
*/
|
||||
private async terminatePidsWithForce(pids: number[], gracePeriodMs = 2000): Promise<void> {
|
||||
// Send SIGTERM to all
|
||||
for (const pid of pids) {
|
||||
try {
|
||||
process.kill(pid, 'SIGTERM');
|
||||
} catch {
|
||||
// Process may have already exited
|
||||
}
|
||||
}
|
||||
|
||||
// Wait for graceful exit
|
||||
const remaining = await this.waitForPidsToExit(pids, gracePeriodMs);
|
||||
|
||||
// Force kill any that didn't exit
|
||||
for (const pid of remaining) {
|
||||
try {
|
||||
process.kill(pid, 'SIGKILL');
|
||||
this.logger.debug?.(`Sent SIGKILL to pid ${pid}`);
|
||||
} catch {
|
||||
// Process may have already exited
|
||||
}
|
||||
}
|
||||
|
||||
// Brief wait for SIGKILL to take effect
|
||||
if (remaining.length > 0) {
|
||||
await this.waitForPidsToExit(remaining, 1000);
|
||||
}
|
||||
}
|
||||
|
||||
async start(options: StartOptions = {}) {
|
||||
// Log boot attempt with diagnostic info
|
||||
await this.logToBootFile('=== Starting unraid-api via nodemon ===');
|
||||
await this.logToBootFile(`NODEMON_PATH: ${NODEMON_PATH}`);
|
||||
await this.logToBootFile(`NODEMON_CONFIG_PATH: ${NODEMON_CONFIG_PATH}`);
|
||||
await this.logToBootFile(`UNRAID_API_CWD: ${UNRAID_API_CWD}`);
|
||||
await this.logToBootFile(`NODEMON_PID_PATH: ${NODEMON_PID_PATH}`);
|
||||
await this.logToBootFile(`process.cwd(): ${process.cwd()}`);
|
||||
await this.logToBootFile(`process.execPath: ${process.execPath}`);
|
||||
await this.logToBootFile(`PATH: ${process.env.PATH}`);
|
||||
|
||||
// Validate paths before proceeding
|
||||
const { valid, errors } = this.validatePaths();
|
||||
if (!valid) {
|
||||
for (const error of errors) {
|
||||
await this.logToBootFile(`ERROR: ${error}`);
|
||||
this.logger.error(error);
|
||||
}
|
||||
throw new Error(`Path validation failed: ${errors.join('; ')}`);
|
||||
}
|
||||
await this.logToBootFile('Path validation passed');
|
||||
|
||||
try {
|
||||
await this.ensureNodemonDependencies();
|
||||
await this.logToBootFile('Dependencies ensured');
|
||||
} catch (error) {
|
||||
const msg = `Failed to ensure nodemon dependencies: ${error instanceof Error ? error.message : error}`;
|
||||
await this.logToBootFile(`ERROR: ${msg}`);
|
||||
this.logger.error(msg);
|
||||
throw error;
|
||||
}
|
||||
|
||||
await this.withLock(() => this.startInternal(options));
|
||||
}
|
||||
|
||||
private async startInternal(options: StartOptions = {}) {
|
||||
await this.stopPm2IfRunning();
|
||||
await this.logToBootFile('PM2 cleanup complete');
|
||||
|
||||
const existingPid = await this.getStoredPid();
|
||||
if (existingPid) {
|
||||
const running = await this.isPidRunning(existingPid);
|
||||
if (running) {
|
||||
await this.logToBootFile(`Found running nodemon (pid ${existingPid}), restarting`);
|
||||
this.logger.info(
|
||||
`unraid-api already running under nodemon (pid ${existingPid}); restarting for a fresh start.`
|
||||
);
|
||||
await this.stop({ quiet: true });
|
||||
await this.waitForNodemonExit();
|
||||
await rm(NODEMON_PID_PATH, { force: true });
|
||||
} else {
|
||||
await this.logToBootFile(`Found stale pid file (${existingPid}), cleaning up`);
|
||||
this.logger.warn(
|
||||
`Found nodemon pid file (${existingPid}) but the process is not running. Cleaning up.`
|
||||
);
|
||||
await rm(NODEMON_PID_PATH, { force: true });
|
||||
}
|
||||
}
|
||||
|
||||
const discoveredPids = await this.findMatchingNodemonPids();
|
||||
const liveDiscoveredPids = await Promise.all(
|
||||
discoveredPids.map(async (pid) => ((await this.isPidRunning(pid)) ? pid : null))
|
||||
).then((pids) => pids.filter((pid): pid is number => pid !== null));
|
||||
if (liveDiscoveredPids.length > 0) {
|
||||
await this.logToBootFile(`Found orphan nodemon processes: ${liveDiscoveredPids.join(', ')}`);
|
||||
this.logger.info(
|
||||
`Found nodemon process(es) (${liveDiscoveredPids.join(', ')}) without a pid file; restarting for a fresh start.`
|
||||
);
|
||||
await this.terminatePids(liveDiscoveredPids);
|
||||
await this.waitForNodemonExit();
|
||||
}
|
||||
|
||||
const directMainPids = await this.findDirectMainPids();
|
||||
if (directMainPids.length > 0) {
|
||||
await this.logToBootFile(`Found direct main.js processes: ${directMainPids.join(', ')}`);
|
||||
this.logger.warn(
|
||||
`Found existing unraid-api process(es) running directly: ${directMainPids.join(', ')}. Stopping them before starting nodemon.`
|
||||
);
|
||||
await this.terminatePids(directMainPids);
|
||||
}
|
||||
|
||||
const overrides = Object.fromEntries(
|
||||
Object.entries(options.env ?? {}).filter(([, value]) => value !== undefined)
|
||||
);
|
||||
const env = {
|
||||
...process.env,
|
||||
// Ensure PATH includes standard locations for boot-time reliability
|
||||
PATH: `/usr/local/bin:/usr/bin:/bin:${process.env.PATH || ''}`,
|
||||
NODE_ENV: 'production',
|
||||
PATHS_LOGS_FILE,
|
||||
PATHS_NODEMON_LOG_FILE,
|
||||
NODEMON_CONFIG_PATH,
|
||||
NODEMON_PID_PATH,
|
||||
UNRAID_API_CWD,
|
||||
UNRAID_API_SERVER_ENTRYPOINT,
|
||||
...overrides,
|
||||
} as Record<string, string>;
|
||||
|
||||
await this.logToBootFile(
|
||||
`Spawning: ${process.execPath} ${NODEMON_PATH} --config ${NODEMON_CONFIG_PATH}`
|
||||
);
|
||||
|
||||
let logFd: number | null = null;
|
||||
try {
|
||||
// Use file descriptor for stdio - more reliable for detached processes at boot
|
||||
logFd = openSync(PATHS_NODEMON_LOG_FILE, 'a');
|
||||
|
||||
// Write initial message to nodemon log
|
||||
writeSync(logFd, 'Starting nodemon...\n');
|
||||
|
||||
// Use native spawn instead of execa for more reliable detached process handling
|
||||
const nodemonProcess = spawn(
|
||||
process.execPath, // Use current node executable path
|
||||
[NODEMON_PATH, '--config', NODEMON_CONFIG_PATH, '--quiet'],
|
||||
{
|
||||
cwd: UNRAID_API_CWD,
|
||||
env,
|
||||
detached: true,
|
||||
stdio: ['ignore', logFd, logFd],
|
||||
}
|
||||
);
|
||||
|
||||
nodemonProcess.unref();
|
||||
|
||||
if (!nodemonProcess.pid) {
|
||||
await this.logToBootFile('ERROR: Failed to spawn nodemon - no PID assigned');
|
||||
throw new Error('Failed to start nodemon: process spawned but no PID was assigned');
|
||||
}
|
||||
|
||||
await writeFile(NODEMON_PID_PATH, `${nodemonProcess.pid}`);
|
||||
await this.logToBootFile(`Spawned nodemon with PID: ${nodemonProcess.pid}`);
|
||||
|
||||
// Multiple verification checks with increasing delays for boot-time reliability
|
||||
const verificationDelays = [200, 500, 1000];
|
||||
for (const delay of verificationDelays) {
|
||||
await new Promise((resolve) => setTimeout(resolve, delay));
|
||||
const stillRunning = await this.isPidRunning(nodemonProcess.pid);
|
||||
if (!stillRunning) {
|
||||
const recentLogs = await this.logs(50);
|
||||
await rm(NODEMON_PID_PATH, { force: true });
|
||||
const logMessage = recentLogs ? ` Recent logs:\n${recentLogs}` : '';
|
||||
await this.logToBootFile(`ERROR: Nodemon exited after ${delay}ms`);
|
||||
await this.logToBootFile(`Recent logs: ${recentLogs}`);
|
||||
throw new Error(`Nodemon exited immediately after start.${logMessage}`);
|
||||
}
|
||||
await this.logToBootFile(`Verification passed after ${delay}ms`);
|
||||
}
|
||||
|
||||
await this.logToBootFile(`Successfully started nodemon (pid ${nodemonProcess.pid})`);
|
||||
this.logger.info(`Started nodemon (pid ${nodemonProcess.pid})`);
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
await this.logToBootFile(`ERROR: ${errorMessage}`);
|
||||
throw new Error(`Failed to start nodemon: ${errorMessage}`);
|
||||
}
|
||||
}
|
||||
|
||||
async stop(options: StopOptions = {}) {
|
||||
const nodemonPid = await this.getStoredPid();
|
||||
|
||||
// Find child processes BEFORE sending any signals
|
||||
const childPids = await this.findDirectMainPids();
|
||||
|
||||
if (!nodemonPid) {
|
||||
if (!options.quiet) {
|
||||
this.logger.warn('No nodemon pid file found.');
|
||||
}
|
||||
// Clean up orphaned children if any exist
|
||||
if (childPids.length > 0) {
|
||||
this.logger.warn(
|
||||
`Found orphaned main.js processes: ${childPids.join(', ')}. Terminating.`
|
||||
);
|
||||
await this.terminatePidsWithForce(childPids);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// Step 1: SIGTERM to nodemon (will forward to child)
|
||||
try {
|
||||
process.kill(nodemonPid, 'SIGTERM');
|
||||
this.logger.trace(`Sent SIGTERM to nodemon (pid ${nodemonPid})`);
|
||||
} catch (error) {
|
||||
// Process may have already exited
|
||||
this.logger.debug?.(`nodemon (pid ${nodemonPid}) already gone: ${error}`);
|
||||
}
|
||||
|
||||
// Step 2: Wait for both nodemon and children to exit
|
||||
const allPids = [nodemonPid, ...childPids];
|
||||
const gracefulTimeout = options.force ? 0 : 5000;
|
||||
const remainingPids = await this.waitForPidsToExit(allPids, gracefulTimeout);
|
||||
|
||||
// Step 3: Force kill any remaining processes
|
||||
if (remainingPids.length > 0) {
|
||||
this.logger.warn(`Force killing remaining processes: ${remainingPids.join(', ')}`);
|
||||
await this.terminatePidsWithForce(remainingPids);
|
||||
}
|
||||
|
||||
// Step 4: Clean up PID file
|
||||
await rm(NODEMON_PID_PATH, { force: true });
|
||||
}
|
||||
|
||||
async restart(options: StartOptions = {}) {
|
||||
// Delegate to start so both commands share identical logic
|
||||
await this.start(options);
|
||||
}
|
||||
|
||||
async status(): Promise<boolean> {
|
||||
const pid = await this.getStoredPid();
|
||||
|
||||
// Check for orphaned processes even without PID file
|
||||
const orphanNodemonPids = await this.findMatchingNodemonPids();
|
||||
const orphanMainPids = await this.findDirectMainPids();
|
||||
|
||||
if (!pid) {
|
||||
if (orphanNodemonPids.length > 0 || orphanMainPids.length > 0) {
|
||||
this.logger.warn(
|
||||
`No PID file, but found orphaned processes: nodemon=${orphanNodemonPids.join(',') || 'none'}, main.js=${orphanMainPids.join(',') || 'none'}`
|
||||
);
|
||||
return true; // Processes ARE running, just not tracked
|
||||
}
|
||||
this.logger.info('unraid-api is not running (no pid file).');
|
||||
return false;
|
||||
}
|
||||
|
||||
const running = await this.isPidRunning(pid);
|
||||
if (running) {
|
||||
this.logger.info(`unraid-api is running under nodemon (pid ${pid}).`);
|
||||
} else {
|
||||
this.logger.warn(`Found nodemon pid file (${pid}) but the process is not running.`);
|
||||
await rm(NODEMON_PID_PATH, { force: true });
|
||||
}
|
||||
return running;
|
||||
}
|
||||
|
||||
async logs(lines = 100): Promise<string> {
|
||||
try {
|
||||
const { stdout } = await execa('tail', ['-n', `${lines}`, PATHS_LOGS_FILE]);
|
||||
this.logger.log(stdout);
|
||||
return stdout;
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
const isFileNotFound =
|
||||
errorMessage.includes('ENOENT') ||
|
||||
(error instanceof Error && 'code' in error && error.code === 'ENOENT');
|
||||
|
||||
if (isFileNotFound) {
|
||||
this.logger.error(`Log file not found: ${PATHS_LOGS_FILE} (${errorMessage})`);
|
||||
} else {
|
||||
this.logger.error(`Failed to read logs from ${PATHS_LOGS_FILE}: ${errorMessage}`);
|
||||
}
|
||||
return '';
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -74,13 +74,15 @@ export class InstallPluginCommand extends CommandRunner {
|
||||
|
||||
interface RemovePluginCommandOptions {
|
||||
plugins?: string[];
|
||||
restart: boolean;
|
||||
restart?: boolean;
|
||||
bypassNpm?: boolean;
|
||||
}
|
||||
|
||||
@SubCommand({
|
||||
name: 'remove',
|
||||
aliases: ['rm'],
|
||||
description: 'Remove plugin peer dependencies.',
|
||||
arguments: '[plugins...]',
|
||||
})
|
||||
export class RemovePluginCommand extends CommandRunner {
|
||||
constructor(
|
||||
@@ -93,9 +95,83 @@ export class RemovePluginCommand extends CommandRunner {
|
||||
super();
|
||||
}
|
||||
|
||||
async run(_passedParams: string[], options?: RemovePluginCommandOptions): Promise<void> {
|
||||
async run(passedParams: string[], options?: RemovePluginCommandOptions): Promise<void> {
|
||||
const cliBypass = options?.bypassNpm;
|
||||
const cliRestart = options?.restart;
|
||||
const mergedOptions: RemovePluginCommandOptions = {
|
||||
bypassNpm: cliBypass ?? false,
|
||||
restart: cliRestart ?? true,
|
||||
plugins: passedParams.length > 0 ? passedParams : options?.plugins,
|
||||
};
|
||||
|
||||
let resolvedOptions = mergedOptions;
|
||||
if (!mergedOptions.plugins?.length) {
|
||||
const promptOptions = await this.promptForPlugins(mergedOptions);
|
||||
if (!promptOptions) {
|
||||
return;
|
||||
}
|
||||
resolvedOptions = {
|
||||
// precedence: cli > prompt > default (fallback)
|
||||
bypassNpm: cliBypass ?? promptOptions.bypassNpm ?? mergedOptions.bypassNpm,
|
||||
restart: cliRestart ?? promptOptions.restart ?? mergedOptions.restart,
|
||||
// precedence: prompt > default (fallback)
|
||||
plugins: promptOptions.plugins ?? mergedOptions.plugins,
|
||||
};
|
||||
}
|
||||
|
||||
if (!resolvedOptions.plugins?.length) {
|
||||
this.logService.warn('No plugins selected for removal.');
|
||||
return;
|
||||
}
|
||||
|
||||
if (resolvedOptions.bypassNpm) {
|
||||
await this.pluginManagementService.removePluginConfigOnly(...resolvedOptions.plugins);
|
||||
} else {
|
||||
await this.pluginManagementService.removePlugin(...resolvedOptions.plugins);
|
||||
}
|
||||
for (const plugin of resolvedOptions.plugins) {
|
||||
this.logService.log(`Removed plugin ${plugin}`);
|
||||
}
|
||||
await this.apiConfigPersistence.persist();
|
||||
|
||||
if (resolvedOptions.restart) {
|
||||
await this.restartCommand.run();
|
||||
}
|
||||
}
|
||||
|
||||
@Option({
|
||||
flags: '--no-restart',
|
||||
description: 'do NOT restart the service after deploy',
|
||||
defaultValue: true,
|
||||
})
|
||||
parseRestart(): boolean {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Option({
|
||||
flags: '-b, --bypass-npm',
|
||||
description: 'Bypass npm uninstall and only update the config',
|
||||
defaultValue: false,
|
||||
name: 'bypassNpm',
|
||||
})
|
||||
parseBypass(): boolean {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Option({
|
||||
flags: '--npm',
|
||||
description: 'Run npm uninstall for unbundled plugins (default behavior)',
|
||||
name: 'bypassNpm',
|
||||
})
|
||||
parseRunNpm(): boolean {
|
||||
return false;
|
||||
}
|
||||
|
||||
private async promptForPlugins(
|
||||
initialOptions: RemovePluginCommandOptions
|
||||
): Promise<RemovePluginCommandOptions | undefined> {
|
||||
try {
|
||||
options = await this.inquirerService.prompt(RemovePluginQuestionSet.name, options);
|
||||
return await this.inquirerService.prompt(RemovePluginQuestionSet.name, initialOptions);
|
||||
} catch (error) {
|
||||
if (error instanceof NoPluginsFoundError) {
|
||||
this.logService.error(error.message);
|
||||
@@ -108,30 +184,6 @@ export class RemovePluginCommand extends CommandRunner {
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
if (!options.plugins || options.plugins.length === 0) {
|
||||
this.logService.warn('No plugins selected for removal.');
|
||||
return;
|
||||
}
|
||||
|
||||
await this.pluginManagementService.removePlugin(...options.plugins);
|
||||
for (const plugin of options.plugins) {
|
||||
this.logService.log(`Removed plugin ${plugin}`);
|
||||
}
|
||||
await this.apiConfigPersistence.persist();
|
||||
|
||||
if (options.restart) {
|
||||
await this.restartCommand.run();
|
||||
}
|
||||
}
|
||||
|
||||
@Option({
|
||||
flags: '--no-restart',
|
||||
description: 'do NOT restart the service after deploy',
|
||||
defaultValue: true,
|
||||
})
|
||||
parseRestart(): boolean {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,76 +0,0 @@
|
||||
import * as fs from 'node:fs/promises';
|
||||
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
import { PM2Service } from '@app/unraid-api/cli/pm2.service.js';
|
||||
|
||||
vi.mock('node:fs/promises');
|
||||
vi.mock('execa');
|
||||
vi.mock('@app/core/utils/files/file-exists.js', () => ({
|
||||
fileExists: vi.fn().mockResolvedValue(false),
|
||||
}));
|
||||
vi.mock('@app/environment.js', () => ({
|
||||
PATHS_LOGS_DIR: '/var/log/unraid-api',
|
||||
PM2_HOME: '/var/log/.pm2',
|
||||
PM2_PATH: '/path/to/pm2',
|
||||
ECOSYSTEM_PATH: '/path/to/ecosystem.config.json',
|
||||
SUPPRESS_LOGS: false,
|
||||
LOG_LEVEL: 'info',
|
||||
}));
|
||||
|
||||
describe('PM2Service', () => {
|
||||
let pm2Service: PM2Service;
|
||||
let logService: LogService;
|
||||
const mockMkdir = vi.mocked(fs.mkdir);
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
logService = {
|
||||
trace: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
log: vi.fn(),
|
||||
info: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
} as unknown as LogService;
|
||||
pm2Service = new PM2Service(logService);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
describe('ensurePm2Dependencies', () => {
|
||||
it('should create logs directory and log that PM2 will handle its own directory', async () => {
|
||||
mockMkdir.mockResolvedValue(undefined);
|
||||
|
||||
await pm2Service.ensurePm2Dependencies();
|
||||
|
||||
expect(mockMkdir).toHaveBeenCalledWith('/var/log/unraid-api', { recursive: true });
|
||||
expect(mockMkdir).toHaveBeenCalledTimes(1); // Only logs directory, not PM2_HOME
|
||||
expect(logService.trace).toHaveBeenCalledWith(
|
||||
'PM2_HOME will be created at /var/log/.pm2 when PM2 daemon starts'
|
||||
);
|
||||
});
|
||||
|
||||
it('should log error but not throw when logs directory creation fails', async () => {
|
||||
mockMkdir.mockRejectedValue(new Error('Disk full'));
|
||||
|
||||
await expect(pm2Service.ensurePm2Dependencies()).resolves.not.toThrow();
|
||||
|
||||
expect(logService.error).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Failed to fully ensure PM2 dependencies: Disk full')
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle mkdir with recursive flag for nested logs path', async () => {
|
||||
mockMkdir.mockResolvedValue(undefined);
|
||||
|
||||
await pm2Service.ensurePm2Dependencies();
|
||||
|
||||
expect(mockMkdir).toHaveBeenCalledWith('/var/log/unraid-api', { recursive: true });
|
||||
expect(mockMkdir).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,133 +0,0 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { mkdir, rm } from 'node:fs/promises';
|
||||
import { join } from 'node:path';
|
||||
|
||||
import type { Options, Result, ResultPromise } from 'execa';
|
||||
import { execa, ExecaError } from 'execa';
|
||||
|
||||
import { fileExists } from '@app/core/utils/files/file-exists.js';
|
||||
import { PATHS_LOGS_DIR, PM2_HOME, PM2_PATH } from '@app/environment.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
|
||||
type CmdContext = Options & {
|
||||
/** A tag for logging & debugging purposes. Should represent the operation being performed. */
|
||||
tag: string;
|
||||
/** Default: false.
|
||||
*
|
||||
* When true, results will not be automatically handled and logged.
|
||||
* The caller must handle desired effects, such as logging, error handling, etc.
|
||||
*/
|
||||
raw?: boolean;
|
||||
};
|
||||
|
||||
@Injectable()
|
||||
export class PM2Service {
|
||||
constructor(private readonly logger: LogService) {}
|
||||
|
||||
// Type Overload: if raw is true, return an execa ResultPromise (which is a Promise with extra properties)
|
||||
/**
|
||||
* Executes a PM2 command with the specified context and arguments.
|
||||
* Handles logging automatically (stdout -> trace, stderr -> error), unless the `raw` flag is
|
||||
* set to true, in which case the caller must handle desired effects.
|
||||
*
|
||||
* @param context - Execa Options for command execution, such as a unique tag for logging
|
||||
* and whether the result should be handled raw.
|
||||
* @param args - The arguments to pass to the PM2 command.
|
||||
* @returns ResultPromise\<@param context\> When raw is true
|
||||
* @returns Promise\<Result\> When raw is false
|
||||
*/
|
||||
run<T extends CmdContext>(context: T & { raw: true }, ...args: string[]): ResultPromise<T>;
|
||||
|
||||
run(context: CmdContext & { raw?: false }, ...args: string[]): Promise<Result>;
|
||||
|
||||
async run(context: CmdContext, ...args: string[]) {
|
||||
const { tag, raw, ...execOptions } = context;
|
||||
// Default to true to match execa's default behavior
|
||||
execOptions.extendEnv ??= true;
|
||||
execOptions.shell ??= 'bash';
|
||||
|
||||
// Ensure /usr/local/bin is in PATH for Node.js
|
||||
const currentPath = execOptions.env?.PATH || process.env.PATH || '/usr/bin:/bin:/usr/sbin:/sbin';
|
||||
const needsPathUpdate = !currentPath.includes('/usr/local/bin');
|
||||
const finalPath = needsPathUpdate ? `/usr/local/bin:${currentPath}` : currentPath;
|
||||
|
||||
// Always ensure PM2_HOME is set in the environment for every PM2 command
|
||||
execOptions.env = {
|
||||
...execOptions.env,
|
||||
PM2_HOME,
|
||||
...(needsPathUpdate && { PATH: finalPath }),
|
||||
};
|
||||
|
||||
const runCommand = () => execa(PM2_PATH, [...args], execOptions satisfies Options);
|
||||
if (raw) {
|
||||
return runCommand();
|
||||
}
|
||||
return runCommand()
|
||||
.then((result) => {
|
||||
this.logger.trace(result.stdout);
|
||||
return result;
|
||||
})
|
||||
.catch((result: Result) => {
|
||||
this.logger.error(`PM2 error occurred from tag "${tag}": ${result.stdout}\n`);
|
||||
return result;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes the PM2 dump file.
|
||||
*
|
||||
* This method removes the PM2 dump file located at `~/.pm2/dump.pm2` by default.
|
||||
* It logs a message indicating that the PM2 dump has been cleared.
|
||||
*
|
||||
* @returns A promise that resolves once the dump file is removed.
|
||||
*/
|
||||
async deleteDump(dumpFile = join(PM2_HOME, 'dump.pm2')) {
|
||||
await rm(dumpFile, { force: true });
|
||||
this.logger.trace('PM2 dump cleared.');
|
||||
}
|
||||
|
||||
async forceKillPm2Daemon() {
|
||||
try {
|
||||
// Find all PM2 daemon processes and kill them
|
||||
const pids = (await execa('pgrep', ['-i', 'PM2'])).stdout.split('\n').filter(Boolean);
|
||||
if (pids.length > 0) {
|
||||
await execa('kill', ['-9', ...pids]);
|
||||
this.logger.trace(`Killed PM2 daemon processes: ${pids.join(', ')}`);
|
||||
}
|
||||
} catch (err) {
|
||||
if (err instanceof ExecaError && err.exitCode === 1) {
|
||||
this.logger.trace('No PM2 daemon processes found.');
|
||||
} else {
|
||||
this.logger.error(`Error force killing PM2 daemon: ${err}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async deletePm2Home() {
|
||||
if ((await fileExists(PM2_HOME)) && (await fileExists(join(PM2_HOME, 'pm2.log')))) {
|
||||
await rm(PM2_HOME, { recursive: true, force: true });
|
||||
this.logger.trace('PM2 home directory cleared.');
|
||||
} else {
|
||||
this.logger.trace('PM2 home directory does not exist.');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensures that the dependencies necessary for PM2 to start and operate are present.
|
||||
* Creates PM2_HOME directory with proper permissions if it doesn't exist.
|
||||
*/
|
||||
async ensurePm2Dependencies() {
|
||||
try {
|
||||
// Create logs directory
|
||||
await mkdir(PATHS_LOGS_DIR, { recursive: true });
|
||||
|
||||
// PM2 automatically creates and manages its home directory when the daemon starts
|
||||
this.logger.trace(`PM2_HOME will be created at ${PM2_HOME} when PM2 daemon starts`);
|
||||
} catch (error) {
|
||||
// Log error but don't throw - let PM2 fail with its own error messages if the setup is incomplete
|
||||
this.logger.error(
|
||||
`Failed to fully ensure PM2 dependencies: ${error instanceof Error ? error.message : error}. PM2 may encounter issues during operation.`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -33,9 +33,9 @@ export class ReportCommand extends CommandRunner {
|
||||
async report(): Promise<string | void> {
|
||||
try {
|
||||
// Check if API is running
|
||||
const { isUnraidApiRunning } = await import('@app/core/utils/pm2/unraid-api-running.js');
|
||||
const { isUnraidApiRunning } = await import('@app/core/utils/process/unraid-api-running.js');
|
||||
const apiRunning = await isUnraidApiRunning().catch((err) => {
|
||||
this.logger.debug('failed to get PM2 state with error: ' + err);
|
||||
this.logger.debug('failed to check nodemon state with error: ' + err);
|
||||
return false;
|
||||
});
|
||||
|
||||
|
||||
@@ -2,9 +2,9 @@ import { Command, CommandRunner, Option } from 'nest-commander';
|
||||
|
||||
import type { LogLevel } from '@app/core/log.js';
|
||||
import { levels } from '@app/core/log.js';
|
||||
import { ECOSYSTEM_PATH, LOG_LEVEL } from '@app/environment.js';
|
||||
import { LOG_LEVEL } from '@app/environment.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
import { PM2Service } from '@app/unraid-api/cli/pm2.service.js';
|
||||
import { NodemonService } from '@app/unraid-api/cli/nodemon.service.js';
|
||||
|
||||
export interface LogLevelOptions {
|
||||
logLevel?: LogLevel;
|
||||
@@ -22,7 +22,7 @@ export function parseLogLevelOption(val: string, allowedLevels: string[] = [...l
|
||||
export class RestartCommand extends CommandRunner {
|
||||
constructor(
|
||||
private readonly logger: LogService,
|
||||
private readonly pm2: PM2Service
|
||||
private readonly nodemon: NodemonService
|
||||
) {
|
||||
super();
|
||||
}
|
||||
@@ -30,23 +30,9 @@ export class RestartCommand extends CommandRunner {
|
||||
async run(_?: string[], options: LogLevelOptions = {}): Promise<void> {
|
||||
try {
|
||||
this.logger.info('Restarting the Unraid API...');
|
||||
const env = { LOG_LEVEL: options.logLevel };
|
||||
const { stderr, stdout } = await this.pm2.run(
|
||||
{ tag: 'PM2 Restart', raw: true, extendEnv: true, env },
|
||||
'restart',
|
||||
ECOSYSTEM_PATH,
|
||||
'--update-env',
|
||||
'--mini-list'
|
||||
);
|
||||
|
||||
if (stderr) {
|
||||
this.logger.error(stderr.toString());
|
||||
process.exit(1);
|
||||
} else if (stdout) {
|
||||
this.logger.info(stdout.toString());
|
||||
} else {
|
||||
this.logger.info('Unraid API restarted');
|
||||
}
|
||||
const env = { LOG_LEVEL: options.logLevel?.toUpperCase() };
|
||||
await this.nodemon.restart({ env });
|
||||
this.logger.info('Unraid API restarted');
|
||||
} catch (error) {
|
||||
if (error instanceof Error) {
|
||||
this.logger.error(error.message);
|
||||
|
||||
@@ -3,46 +3,23 @@ import { Command, CommandRunner, Option } from 'nest-commander';
|
||||
import type { LogLevel } from '@app/core/log.js';
|
||||
import type { LogLevelOptions } from '@app/unraid-api/cli/restart.command.js';
|
||||
import { levels } from '@app/core/log.js';
|
||||
import { ECOSYSTEM_PATH, LOG_LEVEL } from '@app/environment.js';
|
||||
import { LOG_LEVEL } from '@app/environment.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
import { PM2Service } from '@app/unraid-api/cli/pm2.service.js';
|
||||
import { NodemonService } from '@app/unraid-api/cli/nodemon.service.js';
|
||||
import { parseLogLevelOption } from '@app/unraid-api/cli/restart.command.js';
|
||||
|
||||
@Command({ name: 'start', description: 'Start the Unraid API' })
|
||||
export class StartCommand extends CommandRunner {
|
||||
constructor(
|
||||
private readonly logger: LogService,
|
||||
private readonly pm2: PM2Service
|
||||
private readonly nodemon: NodemonService
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
async cleanupPM2State() {
|
||||
await this.pm2.ensurePm2Dependencies();
|
||||
await this.pm2.run({ tag: 'PM2 Stop' }, 'stop', ECOSYSTEM_PATH);
|
||||
await this.pm2.run({ tag: 'PM2 Update' }, 'update');
|
||||
await this.pm2.deleteDump();
|
||||
await this.pm2.run({ tag: 'PM2 Delete' }, 'delete', ECOSYSTEM_PATH);
|
||||
}
|
||||
|
||||
async run(_: string[], options: LogLevelOptions): Promise<void> {
|
||||
this.logger.info('Starting the Unraid API');
|
||||
await this.cleanupPM2State();
|
||||
const env = { LOG_LEVEL: options.logLevel };
|
||||
const { stderr, stdout } = await this.pm2.run(
|
||||
{ tag: 'PM2 Start', raw: true, extendEnv: true, env },
|
||||
'start',
|
||||
ECOSYSTEM_PATH,
|
||||
'--update-env',
|
||||
'--mini-list'
|
||||
);
|
||||
if (stdout) {
|
||||
this.logger.log(stdout.toString());
|
||||
}
|
||||
if (stderr) {
|
||||
this.logger.error(stderr.toString());
|
||||
process.exit(1);
|
||||
}
|
||||
await this.nodemon.start({ env: { LOG_LEVEL: options.logLevel?.toUpperCase() } });
|
||||
}
|
||||
|
||||
@Option({
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user