Compare commits

..

26 Commits

Author SHA1 Message Date
Eli Bosley
81d8d3ef62 commit changes 2025-09-03 15:50:01 -04:00
Michael Datelle
5d4a16fe8f feat: build docker card layout (#1572)
<!-- This is an auto-generated comment: release notes by coderabbit.ai
-->

## Summary by CodeRabbit

* **New Features**
* Introduced a comprehensive Docker management interface with detail and
card views, including new components for container overview, logs,
console, editing, and web preview.
* Added a new layout and navigation system for detailed item views with
tabbed content and groupable card layouts.
* Enabled dynamic loading and registration of Unraid UI web components,
including a new `<unraid-detail-test />` web component.
  * Added new page and layout components for enhanced UI flexibility.

* **Enhancements**
* Updated environment variable handling and documentation, including
production license key support.
* Switched to "@nuxt/ui-pro" for advanced UI features and updated
related configuration.
  * Improved theme initialization and UI configuration injection.

* **Chores**
* Added development dependencies and updated ignore rules for
environment files.
  * Adjusted ESLint configuration for component definition checks.

* **Style**
  * Minor improvements to import statements and style tag formatting.

* **Documentation**
  * Updated example environment variable files and comments for clarity.

<!-- end of auto-generated comment: release notes by coderabbit.ai -->

---------

Co-authored-by: mdatelle <mike@datelle.net>
2025-09-03 15:48:50 -04:00
mdatelle
8a3c1b3ba8 fix: include nuxt ui configuration files in web-components-plugins 2025-09-03 15:48:50 -04:00
mdatelle
59b257a50f refactor: update dropdown menus 2025-09-03 15:48:38 -04:00
mdatelle
4f8fd18a39 refactor: use Drawer component for mobile view navigation 2025-09-03 15:48:38 -04:00
mdatelle
95eb841110 feat: add responsive styles 2025-09-03 15:48:38 -04:00
mdatelle
d06b0db923 refactor: consolidate interfaces, fix typescript error, and include styles in unraid-next layout 2025-09-03 15:48:38 -04:00
mdatelle
1c4dc154e8 refactor: remove old Detail component and update test component 2025-09-03 15:48:38 -04:00
mdatelle
1f67f63513 refactor: use more generic naming with item prop 2025-09-03 15:48:36 -04:00
mdatelle
761b3964a9 fix: fix typescript error 2025-09-03 15:47:10 -04:00
mdatelle
b9a4d4a864 refactor: break up Detail component into seperate components 2025-09-03 15:47:10 -04:00
mdatelle
c82e3d8427 feat: set up base layout and content test 2025-09-03 15:47:07 -04:00
Eli Bosley
3211312b0e chore: update dependencies and clean up unused imports in components 2025-09-03 15:44:47 -04:00
mdatelle
fb575acc4f chore: register DetailTest in config 2025-09-03 15:44:43 -04:00
mdatelle
4a0b481a2d test: create proper test setup for Detail layout 2025-09-03 15:44:16 -04:00
mdatelle
cd15e12cdd refactor: update main.yml to include .env.production 2025-09-03 15:44:16 -04:00
mdatelle
0e20fd0ab0 update main.yml and remove cat command 2025-09-03 15:44:16 -04:00
mdatelle
f44b4a87e9 test: create detail page and web component to test in webgui 2025-09-03 15:44:16 -04:00
mdatelle
4986f4251d feat: add navigation header controls, custom group dropdown, and spacing adjustments 2025-09-03 15:44:16 -04:00
mdatelle
8213738e26 refactor: get menu children working 2025-09-03 15:44:16 -04:00
mdatelle
f32493e728 feat: use and customize NavigationMenu and update status badges 2025-09-03 15:44:16 -04:00
mdatelle
78ce64e357 feat: create base Detail component and placeholder tab components 2025-09-03 15:44:16 -04:00
mdatelle
bb8c4a133e chore: fix typo 2025-09-03 15:44:16 -04:00
mdatelle
c3222cc6c4 chore: ignore all .env unless force tracked and align example with production 2025-09-03 15:44:16 -04:00
mdatelle
71621072f8 chore: update example andn remove env.production from tracking 2025-09-03 15:44:16 -04:00
mdatelle
d4a8edab49 feat: set up base layout and content test 2025-09-03 15:44:13 -04:00
403 changed files with 10819 additions and 18291 deletions

View File

@@ -1,3 +1,123 @@
{
"permissions": {}
"permissions": {
"allow": [
"# Development Commands",
"Bash(pnpm install)",
"Bash(pnpm dev)",
"Bash(pnpm build)",
"Bash(pnpm test)",
"Bash(pnpm test:*)",
"Bash(pnpm lint)",
"Bash(pnpm lint:fix)",
"Bash(pnpm type-check)",
"Bash(pnpm codegen)",
"Bash(pnpm storybook)",
"Bash(pnpm --filter * dev)",
"Bash(pnpm --filter * build)",
"Bash(pnpm --filter * test)",
"Bash(pnpm --filter * lint)",
"Bash(pnpm --filter * codegen)",
"# Git Commands (read-only)",
"Bash(git status)",
"Bash(git diff)",
"Bash(git log)",
"Bash(git branch)",
"Bash(git remote -v)",
"# Search Commands",
"Bash(rg *)",
"# File System (read-only)",
"Bash(ls)",
"Bash(ls -la)",
"Bash(pwd)",
"Bash(find . -name)",
"Bash(find . -type)",
"# Node/NPM Commands",
"Bash(node --version)",
"Bash(pnpm --version)",
"Bash(npx --version)",
"# Environment Commands",
"Bash(echo $*)",
"Bash(which *)",
"# Process Commands",
"Bash(ps aux | grep)",
"Bash(lsof -i)",
"# Documentation Domains",
"WebFetch(domain:tailwindcss.com)",
"WebFetch(domain:github.com)",
"WebFetch(domain:reka-ui.com)",
"WebFetch(domain:nodejs.org)",
"WebFetch(domain:pnpm.io)",
"WebFetch(domain:vitejs.dev)",
"WebFetch(domain:nuxt.com)",
"WebFetch(domain:nestjs.com)",
"# IDE Integration",
"mcp__ide__getDiagnostics",
"# Browser MCP (for testing)",
"mcp__browsermcp__browser_navigate",
"mcp__browsermcp__browser_click",
"mcp__browsermcp__browser_screenshot"
],
"deny": [
"# Dangerous Commands",
"Bash(rm -rf)",
"Bash(chmod 777)",
"Bash(curl)",
"Bash(wget)",
"Bash(ssh)",
"Bash(scp)",
"Bash(sudo)",
"Bash(su)",
"Bash(pkill)",
"Bash(kill)",
"Bash(killall)",
"Bash(python)",
"Bash(python3)",
"Bash(pip)",
"Bash(npm)",
"Bash(yarn)",
"Bash(apt)",
"Bash(brew)",
"Bash(systemctl)",
"Bash(service)",
"Bash(docker)",
"Bash(docker-compose)",
"# File Modification (use Edit/Write tools instead)",
"Bash(sed)",
"Bash(awk)",
"Bash(perl)",
"Bash(echo > *)",
"Bash(echo >> *)",
"Bash(cat > *)",
"Bash(cat >> *)",
"Bash(tee)",
"# Git Write Commands (require explicit user action)",
"Bash(git add)",
"Bash(git commit)",
"Bash(git push)",
"Bash(git pull)",
"Bash(git merge)",
"Bash(git rebase)",
"Bash(git checkout)",
"Bash(git reset)",
"Bash(git clean)",
"# Package Management Write Commands",
"Bash(pnpm add)",
"Bash(pnpm remove)",
"Bash(pnpm update)",
"Bash(pnpm upgrade)"
]
},
"enableAllProjectMcpServers": false
}

View File

@@ -36,8 +36,6 @@ on:
required: true
CF_ENDPOINT:
required: true
UNRAID_BOT_GITHUB_ADMIN_TOKEN:
required: false
jobs:
build-plugin:
name: Build and Deploy Plugin
@@ -99,7 +97,7 @@ jobs:
uses: actions/download-artifact@v5
with:
pattern: unraid-wc-rich
path: ${{ github.workspace }}/plugin/source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components/standalone
path: ${{ github.workspace }}/plugin/source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components/nuxt
merge-multiple: true
- name: Download Unraid API
uses: actions/download-artifact@v5
@@ -153,7 +151,7 @@ jobs:
uses: the-actions-org/workflow-dispatch@v4.0.0
with:
workflow: release-production.yml
inputs: '{ "version": "v${{ steps.vars.outputs.API_VERSION }}" }'
inputs: '{ "version": "${{ steps.vars.outputs.API_VERSION }}" }'
token: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN }}
- name: Upload to Cloudflare
@@ -183,40 +181,3 @@ jobs:
```
${{ inputs.BASE_URL }}/tag/${{ inputs.TAG }}/dynamix.unraid.net.plg
```
- name: Clean up old preview builds
if: inputs.RELEASE_CREATED == 'false' && github.event_name == 'push'
continue-on-error: true
env:
AWS_ACCESS_KEY_ID: ${{ secrets.CF_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: auto
run: |
echo "🧹 Cleaning up old preview builds (keeping last 7 days)..."
# Calculate cutoff date (7 days ago)
CUTOFF_DATE=$(date -d "7 days ago" +"%Y.%m.%d")
echo "Deleting builds older than: ${CUTOFF_DATE}"
# List and delete old timestamped .txz files
OLD_FILES=$(aws s3 ls "s3://${{ secrets.CF_BUCKET_PREVIEW }}/unraid-api/" \
--endpoint-url ${{ secrets.CF_ENDPOINT }} --recursive | \
grep -E "dynamix\.unraid\.net-[0-9]{4}\.[0-9]{2}\.[0-9]{2}\.[0-9]{4}\.txz" | \
awk '{print $4}' || true)
DELETED_COUNT=0
if [ -n "$OLD_FILES" ]; then
while IFS= read -r file; do
if [[ $file =~ ([0-9]{4}\.[0-9]{2}\.[0-9]{2})\.[0-9]{4}\.txz ]]; then
FILE_DATE="${BASH_REMATCH[1]}"
if [[ "$FILE_DATE" < "$CUTOFF_DATE" ]]; then
echo "Deleting old build: $(basename "$file")"
aws s3 rm "s3://${{ secrets.CF_BUCKET_PREVIEW }}/${file}" \
--endpoint-url ${{ secrets.CF_ENDPOINT }} || true
((DELETED_COUNT++))
fi
fi
done <<< "$OLD_FILES"
fi
echo "✅ Deleted ${DELETED_COUNT} old builds"

View File

@@ -65,7 +65,7 @@ jobs:
- name: Comment PR with deployment URL
if: github.event_name == 'pull_request'
uses: actions/github-script@v8
uses: actions/github-script@v7
with:
script: |
github.rest.issues.createComment({

View File

@@ -8,9 +8,27 @@ on:
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
cancel-in-progress: true
jobs:
release-please:
name: Release Please
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- name: Checkout
uses: actions/checkout@v5
# Only run release-please on pushes to main
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
- id: release
uses: googleapis/release-please-action@v4
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
outputs:
releases_created: ${{ steps.release.outputs.releases_created || 'false' }}
tag_name: ${{ steps.release.outputs.tag_name || '' }}
test-api:
name: Test API
defaults:
@@ -316,6 +334,9 @@ jobs:
echo VITE_UNRAID_NET=${{ secrets.VITE_UNRAID_NET }} >> .env
echo VITE_CALLBACK_KEY=${{ secrets.VITE_CALLBACK_KEY }} >> .env
touch .env.production
echo NUXT_UI_PRO_LICENSE=${{ secrets.NUXT_UI_PRO_LICENSE }} >> .env.production
- name: Install Node
uses: actions/setup-node@v4
with:
@@ -366,34 +387,12 @@ jobs:
uses: actions/upload-artifact@v4
with:
name: unraid-wc-rich
path: web/dist
release-please:
name: Release Please
runs-on: ubuntu-latest
# Only run on pushes to main AND after tests pass
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
needs:
- test-api
- build-api
- build-web
- build-unraid-ui-webcomponents
permissions:
contents: write
pull-requests: write
steps:
- name: Checkout
uses: actions/checkout@v5
- id: release
uses: googleapis/release-please-action@v4
outputs:
releases_created: ${{ steps.release.outputs.releases_created || 'false' }}
tag_name: ${{ steps.release.outputs.tag_name || '' }}
path: web/.nuxt/standalone-apps
build-plugin-staging-pr:
name: Build and Deploy Plugin
needs:
- release-please
- build-api
- build-web
- build-unraid-ui-webcomponents
@@ -417,6 +416,9 @@ jobs:
needs:
- release-please
- build-api
- build-web
- build-unraid-ui-webcomponents
- test-api
uses: ./.github/workflows/build-plugin.yml
with:
RELEASE_CREATED: true
@@ -430,4 +432,3 @@ jobs:
CF_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
CF_BUCKET_PREVIEW: ${{ secrets.CF_BUCKET_PREVIEW }}
CF_ENDPOINT: ${{ secrets.CF_ENDPOINT }}
UNRAID_BOT_GITHUB_ADMIN_TOKEN: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN }}

View File

@@ -1,9 +1,4 @@
name: Replace PR Plugin with Staging Redirect on Merge
# This workflow runs when a PR is merged and replaces the PR-specific plugin
# with a redirect version that points to the main staging URL.
# This ensures users who installed the PR version will automatically
# update to the staging version on their next update check.
name: Push Staging Plugin on PR Close
on:
pull_request:
@@ -22,7 +17,7 @@ on:
default: true
jobs:
push-staging-redirect:
push-staging:
if: (github.event_name == 'pull_request' && github.event.pull_request.merged == true) || (github.event_name == 'workflow_dispatch' && inputs.pr_merged == true)
runs-on: ubuntu-latest
permissions:
@@ -50,12 +45,11 @@ jobs:
name: unraid-plugin-.*
path: connect-files
pr: ${{ steps.pr_number.outputs.pr_number }}
workflow: main.yml
workflow_conclusion: success
workflow_search: true
search_artifacts: true
if_no_artifact_found: fail
- name: Update Downloaded Plugin to Redirect to Staging
- name: Update Downloaded Staging Plugin to New Date
run: |
# Find the .plg file in the downloaded artifact
plgfile=$(find connect-files -name "*.plg" -type f | head -1)
@@ -66,82 +60,23 @@ jobs:
fi
echo "Found plugin file: $plgfile"
# Get current version and bump it with current timestamp
current_version=$(grep '<!ENTITY version' "${plgfile}" | sed -E 's/.*"(.*)".*/\1/')
echo "Current version: ${current_version}"
# Create new version with current timestamp (ensures it's newer)
new_version=$(date +"%Y.%m.%d.%H%M")
echo "New redirect version: ${new_version}"
# Update version to trigger update
sed -i -E "s#(<!ENTITY version \").*(\">)#\1${new_version}\2#g" "${plgfile}" || exit 1
version=$(date +"%Y.%m.%d.%H%M")
sed -i -E "s#(<!ENTITY version \").*(\">)#\1${version}\2#g" "${plgfile}" || exit 1
# Change the plugin url to point to staging - users will switch to staging on next update
# Change the plugin url to point to staging
url="https://preview.dl.unraid.net/unraid-api/dynamix.unraid.net.plg"
sed -i -E "s#(<!ENTITY plugin_url \").*?(\">)#\1${url}\2#g" "${plgfile}" || exit 1
echo "Modified plugin to redirect to: ${url}"
echo "Version bumped from ${current_version} to ${new_version}"
cat "${plgfile}"
mkdir -p pr-release
mv "${plgfile}" pr-release/dynamix.unraid.net.plg
- name: Clean up old PR artifacts from Cloudflare
- name: Upload to Cloudflare
uses: jakejarvis/s3-sync-action@v0.5.1
env:
AWS_S3_ENDPOINT: ${{ secrets.CF_ENDPOINT }}
AWS_S3_BUCKET: ${{ secrets.CF_BUCKET_PREVIEW }}
AWS_ACCESS_KEY_ID: ${{ secrets.CF_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: auto
run: |
# Delete all existing files in the PR directory first (txz, plg, etc.)
aws s3 rm s3://${{ secrets.CF_BUCKET_PREVIEW }}/unraid-api/tag/PR${{ steps.pr_number.outputs.pr_number }}/ \
--recursive \
--endpoint-url ${{ secrets.CF_ENDPOINT }}
echo "✅ Cleaned up old PR artifacts"
- name: Upload PR Redirect Plugin to Cloudflare
env:
AWS_ACCESS_KEY_ID: ${{ secrets.CF_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: auto
run: |
# Upload only the redirect plugin file
aws s3 cp pr-release/dynamix.unraid.net.plg \
s3://${{ secrets.CF_BUCKET_PREVIEW }}/unraid-api/tag/PR${{ steps.pr_number.outputs.pr_number }}/dynamix.unraid.net.plg \
--endpoint-url ${{ secrets.CF_ENDPOINT }} \
--content-encoding none \
--acl public-read
echo "✅ Uploaded redirect plugin"
- name: Output redirect information
run: |
echo "✅ PR plugin replaced with staging redirect version"
echo "PR URL remains: https://preview.dl.unraid.net/unraid-api/tag/PR${{ steps.pr_number.outputs.pr_number }}/dynamix.unraid.net.plg"
echo "Redirects users to staging: https://preview.dl.unraid.net/unraid-api/dynamix.unraid.net.plg"
echo "Users updating from this PR version will automatically switch to staging"
- name: Comment on PR about staging redirect
if: github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v3
with:
comment-tag: pr-closed-staging
mode: recreate
message: |
## 🔄 PR Merged - Plugin Redirected to Staging
This PR has been merged and the preview plugin has been updated to redirect to the staging version.
**For users testing this PR:**
- Your plugin will automatically update to the staging version on the next update check
- The staging version includes all merged changes from this PR
- No manual intervention required
**Staging URL:**
```
https://preview.dl.unraid.net/unraid-api/dynamix.unraid.net.plg
```
Thank you for testing! 🚀
AWS_REGION: "auto"
SOURCE_DIR: pr-release
DEST_DIR: unraid-api/tag/PR${{ steps.pr_number.outputs.pr_number }}

View File

@@ -37,7 +37,7 @@ jobs:
EOF
- run: npm install html-escaper@2 xml2js
- name: Update Plugin Changelog
uses: actions/github-script@v8
uses: actions/github-script@v7
with:
script: |
const fs = require('fs');
@@ -124,16 +124,3 @@ jobs:
--no-guess-mime-type \
--content-encoding none \
--acl public-read
- name: Actions for Discord
uses: Ilshidur/action-discord@0.4.0
env:
DISCORD_WEBHOOK: ${{ secrets.PUBLIC_DISCORD_RELEASE_ENDPOINT }}
with:
args: |
🚀 **Unraid API Release ${{ inputs.version }}**
View Release: https://github.com/${{ github.repository }}/releases/tag/${{ inputs.version }}
**Changelog:**
${{ steps.release-info.outputs.body }}

6
.gitignore vendored
View File

@@ -29,10 +29,6 @@ unraid-ui/node_modules/
# TypeScript v1 declaration files
typings/
# Auto-generated type declarations for Nuxt UI
auto-imports.d.ts
components.d.ts
# Optional npm cache directory
.npm
@@ -123,3 +119,5 @@ api/dev/Unraid.net/myservers.cfg
# local Mise settings
.mise.toml
# environment variables
web/.env.production

View File

@@ -1 +1 @@
{".":"4.21.0"}
{".":"4.18.2"}

View File

@@ -76,21 +76,4 @@ body {
button:not(:disabled),
[role='button']:not(:disabled) {
cursor: pointer;
}
/* Font size overrides for SSO button component */
unraid-sso-button {
--text-xs: 0.75rem;
--text-sm: 0.875rem;
--text-base: 1rem;
--text-lg: 1.125rem;
--text-xl: 1.25rem;
--text-2xl: 1.5rem;
--text-3xl: 1.875rem;
--text-4xl: 2.25rem;
--text-5xl: 3rem;
--text-6xl: 3.75rem;
--text-7xl: 4.5rem;
--text-8xl: 6rem;
--text-9xl: 8rem;
}

View File

@@ -2,59 +2,9 @@
/* Light mode defaults */
:root {
/* Nuxt UI Color System - Primary (Orange for Unraid) */
--ui-color-primary-50: #fff7ed;
--ui-color-primary-100: #ffedd5;
--ui-color-primary-200: #fed7aa;
--ui-color-primary-300: #fdba74;
--ui-color-primary-400: #fb923c;
--ui-color-primary-500: #ff8c2f;
--ui-color-primary-600: #ea580c;
--ui-color-primary-700: #c2410c;
--ui-color-primary-800: #9a3412;
--ui-color-primary-900: #7c2d12;
--ui-color-primary-950: #431407;
/* Nuxt UI Color System - Neutral (True Gray) */
--ui-color-neutral-50: #fafafa;
--ui-color-neutral-100: #f5f5f5;
--ui-color-neutral-200: #e5e5e5;
--ui-color-neutral-300: #d4d4d4;
--ui-color-neutral-400: #a3a3a3;
--ui-color-neutral-500: #737373;
--ui-color-neutral-600: #525252;
--ui-color-neutral-700: #404040;
--ui-color-neutral-800: #262626;
--ui-color-neutral-900: #171717;
--ui-color-neutral-950: #0a0a0a;
/* Nuxt UI Default color shades */
--ui-primary: var(--ui-color-primary-500);
--ui-secondary: var(--ui-color-neutral-500);
/* Nuxt UI Design Tokens - Text */
--ui-text-dimmed: var(--ui-color-neutral-400);
--ui-text-muted: var(--ui-color-neutral-500);
--ui-text-toned: var(--ui-color-neutral-600);
--ui-text: var(--ui-color-neutral-700);
--ui-text-highlighted: var(--ui-color-neutral-900);
--ui-text-inverted: white;
/* Nuxt UI Design Tokens - Background */
--ui-bg: white;
--ui-bg-muted: var(--ui-color-neutral-50);
--ui-bg-elevated: var(--ui-color-neutral-100);
--ui-bg-accented: var(--ui-color-neutral-200);
--ui-bg-inverted: var(--ui-color-neutral-900);
/* Nuxt UI Design Tokens - Border */
--ui-border: var(--ui-color-neutral-200);
--ui-border-muted: var(--ui-color-neutral-200);
--ui-border-accented: var(--ui-color-neutral-300);
--ui-border-inverted: var(--ui-color-neutral-900);
/* Nuxt UI Radius */
--ui-radius: 0.5rem;
/* Override Tailwind v4 global styles to use webgui variables */
--ui-bg: var(--background-color) !important;
--ui-text: var(--text-color) !important;
--background: 0 0% 100%;
--foreground: 0 0% 3.9%;
@@ -66,7 +16,7 @@
--card-foreground: 0 0% 3.9%;
--border: 0 0% 89.8%;
--input: 0 0% 89.8%;
--primary: 24 100% 50%; /* Orange #ff8c2f in HSL */
--primary: 0 0% 9%;
--primary-foreground: 0 0% 98%;
--secondary: 0 0% 96.1%;
--secondary-foreground: 0 0% 9%;
@@ -74,7 +24,7 @@
--accent-foreground: 0 0% 9%;
--destructive: 0 84.2% 60.2%;
--destructive-foreground: 0 0% 98%;
--ring: 24 100% 50%; /* Orange ring to match primary */
--ring: 0 0% 3.9%;
--chart-1: 12 76% 61%;
--chart-2: 173 58% 39%;
--chart-3: 197 37% 24%;
@@ -84,30 +34,9 @@
/* Dark mode */
.dark {
/* Nuxt UI Default color shades - Dark mode */
--ui-primary: var(--ui-color-primary-400);
--ui-secondary: var(--ui-color-neutral-400);
/* Nuxt UI Design Tokens - Text (Dark) */
--ui-text-dimmed: var(--ui-color-neutral-500);
--ui-text-muted: var(--ui-color-neutral-400);
--ui-text-toned: var(--ui-color-neutral-300);
--ui-text: var(--ui-color-neutral-200);
--ui-text-highlighted: white;
--ui-text-inverted: var(--ui-color-neutral-900);
/* Nuxt UI Design Tokens - Background (Dark) */
--ui-bg: var(--ui-color-neutral-900);
--ui-bg-muted: var(--ui-color-neutral-800);
--ui-bg-elevated: var(--ui-color-neutral-800);
--ui-bg-accented: var(--ui-color-neutral-700);
--ui-bg-inverted: white;
/* Nuxt UI Design Tokens - Border (Dark) */
--ui-border: var(--ui-color-neutral-800);
--ui-border-muted: var(--ui-color-neutral-700);
--ui-border-accented: var(--ui-color-neutral-700);
--ui-border-inverted: white;
/* Override Tailwind v4 global styles to use webgui variables */
--ui-bg: var(--background-color) !important;
--ui-text: var(--text-color) !important;
--background: 0 0% 3.9%;
--foreground: 0 0% 98%;
@@ -119,15 +48,15 @@
--card-foreground: 0 0% 98%;
--border: 0 0% 14.9%;
--input: 0 0% 14.9%;
--primary: 24 100% 50%; /* Orange #ff8c2f in HSL */
--primary-foreground: 0 0% 98%;
--primary: 0 0% 98%;
--primary-foreground: 0 0% 9%;
--secondary: 0 0% 14.9%;
--secondary-foreground: 0 0% 98%;
--accent: 0 0% 14.9%;
--accent-foreground: 0 0% 98%;
--destructive: 0 62.8% 30.6%;
--destructive-foreground: 0 0% 98%;
--ring: 24 100% 50%; /* Orange ring to match primary */
--ring: 0 0% 83.1%;
--chart-1: 220 70% 50%;
--chart-2: 160 60% 45%;
--chart-3: 30 80% 55%;

View File

@@ -1,6 +1,5 @@
/* Tailwind Shared Styles - Single entry point for all shared CSS */
@import './css-variables.css';
@import './unraid-theme.css';
@import './theme-variants.css';
@import './base-utilities.css';
@import './sonner.css';

View File

@@ -229,8 +229,6 @@
top: 0;
height: 20px;
width: 20px;
min-width: inherit !important;
margin: 0 !important;
display: flex;
justify-content: center;
align-items: center;
@@ -698,11 +696,4 @@
.sonner-loader[data-visible='false'] {
opacity: 0;
transform: scale(0.8) translate(-50%, -50%);
}
/* Override Unraid webgui docker icon styles on sonner containers */
[data-sonner-toast] [data-icon]:before,
[data-sonner-toast] .fa-docker:before {
font-family: inherit !important;
content: '' !important;
}

View File

@@ -1,97 +0,0 @@
/**
* Tailwind v4 Theme Variants
* Defines theme-specific CSS variables that can be switched via classes
* These are applied dynamically based on the theme selected in GraphQL
*/
/* Default/White Theme */
:root,
.theme-white {
--header-text-primary: #ffffff;
--header-text-secondary: #999999;
--header-background-color: #1c1b1b;
--header-gradient-start: rgba(28, 27, 27, 0);
--header-gradient-end: rgba(28, 27, 27, 0.7);
--ui-border-muted: hsl(240 5% 20%);
--color-border: #383735;
--color-alpha: #ff8c2f;
--color-beta: #1c1b1b;
--color-gamma: #ffffff;
--color-gamma-opaque: rgba(255, 255, 255, 0.3);
}
/* Black Theme */
.theme-black,
.theme-black.dark {
--header-text-primary: #1c1b1b;
--header-text-secondary: #999999;
--header-background-color: #f2f2f2;
--header-gradient-start: rgba(242, 242, 242, 0);
--header-gradient-end: rgba(242, 242, 242, 0.7);
--ui-border-muted: hsl(240 5.9% 90%);
--color-border: #e0e0e0;
--color-alpha: #ff8c2f;
--color-beta: #f2f2f2;
--color-gamma: #1c1b1b;
--color-gamma-opaque: rgba(28, 27, 27, 0.3);
}
/* Gray Theme */
.theme-gray {
--header-text-primary: #ffffff;
--header-text-secondary: #999999;
--header-background-color: #1c1b1b;
--header-gradient-start: rgba(28, 27, 27, 0);
--header-gradient-end: rgba(28, 27, 27, 0.7);
--ui-border-muted: hsl(240 5% 25%);
--color-border: #383735;
--color-alpha: #ff8c2f;
--color-beta: #383735;
--color-gamma: #ffffff;
--color-gamma-opaque: rgba(255, 255, 255, 0.3);
}
/* Azure Theme */
.theme-azure {
--header-text-primary: #1c1b1b;
--header-text-secondary: #999999;
--header-background-color: #f2f2f2;
--header-gradient-start: rgba(242, 242, 242, 0);
--header-gradient-end: rgba(242, 242, 242, 0.7);
--ui-border-muted: hsl(210 40% 80%);
--color-border: #5a8bb8;
--color-alpha: #ff8c2f;
--color-beta: #e7f2f8;
--color-gamma: #336699;
--color-gamma-opaque: rgba(51, 102, 153, 0.3);
}
/* Dark Mode Overrides */
.dark {
--ui-border-muted: hsl(240 5% 20%);
--color-border: #383735;
}
/*
* Dynamic color variables for user overrides from GraphQL
* These are set via JavaScript and override the theme defaults
* Using :root with class for higher specificity to override theme classes
*/
:root.has-custom-header-text {
--header-text-primary: var(--custom-header-text-primary);
--color-header-text-primary: var(--custom-header-text-primary);
}
:root.has-custom-header-meta {
--header-text-secondary: var(--custom-header-text-secondary);
--color-header-text-secondary: var(--custom-header-text-secondary);
}
:root.has-custom-header-bg {
--header-background-color: var(--custom-header-background-color);
--color-header-background: var(--custom-header-background-color);
--header-gradient-start: var(--custom-header-gradient-start);
--header-gradient-end: var(--custom-header-gradient-end);
--color-header-gradient-start: var(--custom-header-gradient-start);
--color-header-gradient-end: var(--custom-header-gradient-end);
}

View File

@@ -84,23 +84,23 @@
--color-primary-900: #7c2d12;
--color-primary-950: #431407;
/* Header colors - defaults will be overridden by theme */
--color-header-text-primary: var(--header-text-primary, #1c1c1c);
--color-header-text-secondary: var(--header-text-secondary, #999999);
--color-header-background: var(--header-background-color, #f2f2f2);
/* Header colors */
--color-header-text-primary: var(--header-text-primary);
--color-header-text-secondary: var(--header-text-secondary);
--color-header-background-color: var(--header-background-color);
/* Legacy colors - defaults (overridden by theme-variants.css) */
--color-alpha: #ff8c2f;
--color-beta: #f2f2f2;
--color-gamma: #999999;
--color-gamma-opaque: rgba(153, 153, 153, 0.5);
--color-customgradient-start: rgba(242, 242, 242, 0);
--color-customgradient-end: rgba(242, 242, 242, 0.85);
/* Legacy colors */
--color-alpha: var(--color-alpha);
--color-beta: var(--color-beta);
--color-gamma: var(--color-gamma);
--color-gamma-opaque: var(--color-gamma-opaque);
--color-customgradient-start: var(--color-customgradient-start);
--color-customgradient-end: var(--color-customgradient-end);
/* Gradients - defaults (overridden by theme-variants.css) */
--color-header-gradient-start: rgba(242, 242, 242, 0);
--color-header-gradient-end: rgba(242, 242, 242, 0.85);
--color-banner-gradient: none;
/* Gradients */
--color-header-gradient-start: var(--header-gradient-start);
--color-header-gradient-end: var(--header-gradient-end);
--color-banner-gradient: var(--banner-gradient);
/* Font sizes */
--font-10px: 10px;
@@ -167,27 +167,6 @@
--max-width-800px: 800px;
--max-width-1024px: 1024px;
/* Container sizes adjusted for 10px base font size (1.6x scale) */
--container-xs: 32rem;
--container-sm: 38.4rem;
--container-md: 44.8rem;
--container-lg: 51.2rem;
--container-xl: 57.6rem;
--container-2xl: 67.2rem;
--container-3xl: 76.8rem;
--container-4xl: 89.6rem;
--container-5xl: 102.4rem;
--container-6xl: 115.2rem;
--container-7xl: 128rem;
/* Extended width scale for max-w-* utilities */
--width-5xl: 102.4rem;
--width-6xl: 115.2rem;
--width-7xl: 128rem;
--width-8xl: 140.8rem;
--width-9xl: 153.6rem;
--width-10xl: 166.4rem;
/* Animations */
--animate-mark-2: mark-2 1.5s ease infinite;
--animate-mark-3: mark-3 1.5s ease infinite;

View File

@@ -161,3 +161,4 @@ Enables GraphQL playground at `http://tower.local/graphql`
- Never use the `any` type. Always prefer proper typing
- Avoid using casting whenever possible, prefer proper typing from the start
- **IMPORTANT:** cache-manager v7 expects TTL values in **milliseconds**, not seconds. Always use milliseconds when setting cache TTL (e.g., 600000 for 10 minutes, not 600)
- Use the nuxt UI library in VUE MODE NOT IN NUXT MODE

View File

@@ -31,4 +31,3 @@ BYPASS_CORS_CHECKS=true
CHOKIDAR_USEPOLLING=true
LOG_TRANSPORT=console
LOG_LEVEL=trace
ENABLE_NEXT_DOCKER_RELEASE=true

3
api/.gitignore vendored
View File

@@ -93,6 +93,3 @@ dev/local-session
# local OIDC config for testing - contains secrets
dev/configs/oidc.local.json
# local api keys
dev/keys/*

View File

@@ -1,89 +1,5 @@
# Changelog
## [4.21.0](https://github.com/unraid/api/compare/v4.20.4...v4.21.0) (2025-09-10)
### Features
* add zsh shell detection to install script ([#1539](https://github.com/unraid/api/issues/1539)) ([50ea2a3](https://github.com/unraid/api/commit/50ea2a3ffb82b30152fb85e0fb9b0d178d596efe))
* **api:** determine if docker container has update ([#1582](https://github.com/unraid/api/issues/1582)) ([e57d81e](https://github.com/unraid/api/commit/e57d81e0735772758bb85e0b3c89dce15c56635e))
### Bug Fixes
* white on white login text ([ae4d3ec](https://github.com/unraid/api/commit/ae4d3ecbc417454ae3c6e02018f8e4c49bbfc902))
## [4.20.4](https://github.com/unraid/api/compare/v4.20.3...v4.20.4) (2025-09-09)
### Bug Fixes
* staging PR plugin fixes + UI issues on 7.2 beta ([b79b44e](https://github.com/unraid/api/commit/b79b44e95c65a124313814ab55b0d0a745a799c7))
## [4.20.3](https://github.com/unraid/api/compare/v4.20.2...v4.20.3) (2025-09-09)
### Bug Fixes
* header background color issues fixed on 7.2 - thanks Nick! ([73c1100](https://github.com/unraid/api/commit/73c1100d0ba396fe4342f8ce7561017ab821e68b))
## [4.20.2](https://github.com/unraid/api/compare/v4.20.1...v4.20.2) (2025-09-09)
### Bug Fixes
* trigger deployment ([a27453f](https://github.com/unraid/api/commit/a27453fda81e4eeb07f257e60516bebbbc27cf7a))
## [4.20.1](https://github.com/unraid/api/compare/v4.20.0...v4.20.1) (2025-09-09)
### Bug Fixes
* adjust header styles to fix flashing and width issues - thanks ZarZ ([4759b3d](https://github.com/unraid/api/commit/4759b3d0b3fb6bc71636f75f807cd6f4f62305d1))
## [4.20.0](https://github.com/unraid/api/compare/v4.19.1...v4.20.0) (2025-09-08)
### Features
* **disks:** add isSpinning field to Disk type ([#1527](https://github.com/unraid/api/issues/1527)) ([193be3d](https://github.com/unraid/api/commit/193be3df3672514be9904e3d4fbdff776470afc0))
### Bug Fixes
* better component loading to prevent per-page strange behavior ([095c222](https://github.com/unraid/api/commit/095c2221c94f144f8ad410a69362b15803765531))
* **deps:** pin dependencies ([#1669](https://github.com/unraid/api/issues/1669)) ([413db4b](https://github.com/unraid/api/commit/413db4bd30a06aa69d3ca86e793782854f822589))
* **plugin:** add fallback for unraid-api stop in deprecation cleanup ([#1668](https://github.com/unraid/api/issues/1668)) ([797bf50](https://github.com/unraid/api/commit/797bf50ec702ebc8244ff71a8ef1a80ea5cd2169))
* prepend 'v' to API version in workflow dispatch inputs ([f0cffbd](https://github.com/unraid/api/commit/f0cffbdc7ac36e7037ab60fe9dddbb2cab4a5e10))
* progress frame background color fix ([#1672](https://github.com/unraid/api/issues/1672)) ([785f1f5](https://github.com/unraid/api/commit/785f1f5eb1a1cc8b41f6eb502e4092d149cfbd80))
* properly override header values ([#1673](https://github.com/unraid/api/issues/1673)) ([aecf70f](https://github.com/unraid/api/commit/aecf70ffad60c83074347d3d6ec23f73acbd1aee))
## [4.19.1](https://github.com/unraid/api/compare/v4.19.0...v4.19.1) (2025-09-05)
### Bug Fixes
* custom path detection to fix setup issues ([#1664](https://github.com/unraid/api/issues/1664)) ([2ecdb99](https://github.com/unraid/api/commit/2ecdb99052f39d89af21bbe7ad3f80b83bb1eaa9))
## [4.19.0](https://github.com/unraid/api/compare/v4.18.2...v4.19.0) (2025-09-04)
### Features
* mount vue apps, not web components ([#1639](https://github.com/unraid/api/issues/1639)) ([88087d5](https://github.com/unraid/api/commit/88087d5201992298cdafa791d5d1b5bb23dcd72b))
### Bug Fixes
* api version json response ([#1653](https://github.com/unraid/api/issues/1653)) ([292bc0f](https://github.com/unraid/api/commit/292bc0fc810a0d0f0cce6813b0631ff25099cc05))
* enhance DOM validation and cleanup in vue-mount-app ([6cf7c88](https://github.com/unraid/api/commit/6cf7c88242f2f4fe9f83871560039767b5b90273))
* enhance getKeyFile function to handle missing key file gracefully ([#1659](https://github.com/unraid/api/issues/1659)) ([728b38a](https://github.com/unraid/api/commit/728b38ac11faeacd39ce9d0157024ad140e29b36))
* info alert docker icon ([#1661](https://github.com/unraid/api/issues/1661)) ([239cdd6](https://github.com/unraid/api/commit/239cdd6133690699348e61f68e485d2b54fdcbdb))
* oidc cache busting issues fixed ([#1656](https://github.com/unraid/api/issues/1656)) ([e204eb8](https://github.com/unraid/api/commit/e204eb80a00ab9242e3dca4ccfc3e1b55a7694b7))
* **plugin:** restore cleanup behavior for unsupported unraid versions ([#1658](https://github.com/unraid/api/issues/1658)) ([534a077](https://github.com/unraid/api/commit/534a07788b76de49e9ba14059a9aed0bf16e02ca))
* UnraidToaster component and update dialog close button ([#1657](https://github.com/unraid/api/issues/1657)) ([44774d0](https://github.com/unraid/api/commit/44774d0acdd25aa33cb60a5d0b4f80777f4068e5))
* vue mounting logic with tests ([#1651](https://github.com/unraid/api/issues/1651)) ([33774aa](https://github.com/unraid/api/commit/33774aa596124a031a7452b62ca4c43743a09951))
## [4.18.2](https://github.com/unraid/api/compare/v4.18.1...v4.18.2) (2025-09-03)

View File

@@ -1,5 +1,5 @@
{
"version": "4.19.1",
"version": "4.18.1",
"extraOrigins": [],
"sandbox": true,
"ssoSubIds": [],

View File

@@ -1,247 +0,0 @@
# Feature Flags
Feature flags allow you to conditionally enable or disable functionality in the Unraid API. This is useful for gradually rolling out new features, A/B testing, or keeping experimental code behind flags during development.
## Setting Up Feature Flags
### 1. Define the Feature Flag
Feature flags are defined as environment variables and collected in `src/consts.ts`:
```typescript
// src/environment.ts
export const ENABLE_MY_NEW_FEATURE = process.env.ENABLE_MY_NEW_FEATURE === 'true';
// src/consts.ts
export const FeatureFlags = Object.freeze({
ENABLE_NEXT_DOCKER_RELEASE,
ENABLE_MY_NEW_FEATURE, // Add your new flag here
});
```
### 2. Set the Environment Variable
Set the environment variable when running the API:
```bash
ENABLE_MY_NEW_FEATURE=true unraid-api start
```
Or add it to your `.env` file:
```env
ENABLE_MY_NEW_FEATURE=true
```
## Using Feature Flags in GraphQL
### Method 1: @UseFeatureFlag Decorator (Schema-Level)
The `@UseFeatureFlag` decorator conditionally includes or excludes GraphQL fields, queries, and mutations from the schema based on feature flags. When a feature flag is disabled, the field won't appear in the GraphQL schema at all.
```typescript
import { UseFeatureFlag } from '@app/unraid-api/decorators/use-feature-flag.decorator.js';
import { Query, Mutation, ResolveField } from '@nestjs/graphql';
@Resolver()
export class MyResolver {
// Conditionally include a query
@UseFeatureFlag('ENABLE_MY_NEW_FEATURE')
@Query(() => String)
async experimentalQuery() {
return 'This query only exists when ENABLE_MY_NEW_FEATURE is true';
}
// Conditionally include a mutation
@UseFeatureFlag('ENABLE_MY_NEW_FEATURE')
@Mutation(() => Boolean)
async experimentalMutation() {
return true;
}
// Conditionally include a field resolver
@UseFeatureFlag('ENABLE_MY_NEW_FEATURE')
@ResolveField(() => String)
async experimentalField() {
return 'This field only exists when the flag is enabled';
}
}
```
**Benefits:**
- Clean schema - disabled features don't appear in GraphQL introspection
- No runtime overhead for disabled features
- Clear feature boundaries
**Use when:**
- You want to completely hide features from the GraphQL schema
- The feature is experimental or in beta
- You're doing a gradual rollout
### Method 2: checkFeatureFlag Function (Runtime)
The `checkFeatureFlag` function provides runtime feature flag checking within resolver methods. It throws a `ForbiddenException` if the feature is disabled.
```typescript
import { checkFeatureFlag } from '@app/unraid-api/utils/feature-flag.helper.js';
import { FeatureFlags } from '@app/consts.js';
import { Query, ResolveField } from '@nestjs/graphql';
@Resolver()
export class MyResolver {
@Query(() => String)
async myQuery(
@Args('useNewAlgorithm', { nullable: true }) useNewAlgorithm?: boolean
) {
// Conditionally use new logic based on feature flag
if (useNewAlgorithm) {
checkFeatureFlag(FeatureFlags, 'ENABLE_MY_NEW_FEATURE');
return this.newAlgorithm();
}
return this.oldAlgorithm();
}
@ResolveField(() => String)
async dataField() {
// Check flag at the start of the method
checkFeatureFlag(FeatureFlags, 'ENABLE_MY_NEW_FEATURE');
// Feature-specific logic here
return this.computeExperimentalData();
}
}
```
**Benefits:**
- More granular control within methods
- Can conditionally execute parts of a method
- Useful for A/B testing scenarios
- Good for gradual migration strategies
**Use when:**
- You need conditional logic within a method
- The field should exist but behavior changes based on the flag
- You're migrating from old to new implementation gradually
## Feature Flag Patterns
### Pattern 1: Complete Feature Toggle
Hide an entire feature behind a flag:
```typescript
@UseFeatureFlag('ENABLE_DOCKER_TEMPLATES')
@Resolver(() => DockerTemplate)
export class DockerTemplateResolver {
// All resolvers in this class are toggled by the flag
}
```
### Pattern 2: Gradual Migration
Migrate from old to new implementation:
```typescript
@Query(() => [Container])
async getContainers(@Args('version') version?: string) {
if (version === 'v2') {
checkFeatureFlag(FeatureFlags, 'ENABLE_CONTAINERS_V2');
return this.getContainersV2();
}
return this.getContainersV1();
}
```
### Pattern 3: Beta Features
Mark features as beta:
```typescript
@UseFeatureFlag('ENABLE_BETA_FEATURES')
@ResolveField(() => BetaMetrics, {
description: 'BETA: Advanced metrics (requires ENABLE_BETA_FEATURES flag)'
})
async betaMetrics() {
return this.computeBetaMetrics();
}
```
### Pattern 4: Performance Optimizations
Toggle expensive operations:
```typescript
@ResolveField(() => Statistics)
async statistics() {
const basicStats = await this.getBasicStats();
try {
checkFeatureFlag(FeatureFlags, 'ENABLE_ADVANCED_ANALYTICS');
const advancedStats = await this.getAdvancedStats();
return { ...basicStats, ...advancedStats };
} catch {
// Feature disabled, return only basic stats
return basicStats;
}
}
```
## Testing with Feature Flags
When writing tests for feature-flagged code, create a mock to control feature flag values:
```typescript
import { vi } from 'vitest';
// Mock the entire consts module
vi.mock('@app/consts.js', async () => {
const actual = await vi.importActual('@app/consts.js');
return {
...actual,
FeatureFlags: {
ENABLE_MY_NEW_FEATURE: true, // Set your test value
ENABLE_NEXT_DOCKER_RELEASE: false,
}
};
});
describe('MyResolver', () => {
it('should execute new logic when feature is enabled', async () => {
// Test new behavior with mocked flag
});
});
```
## Best Practices
1. **Naming Convention**: Use `ENABLE_` prefix for boolean feature flags
2. **Environment Variables**: Always use uppercase with underscores
3. **Documentation**: Document what each feature flag controls
4. **Cleanup**: Remove feature flags once features are stable and fully rolled out
5. **Default State**: New features should default to `false` (disabled)
6. **Granularity**: Keep feature flags focused on a single feature or capability
7. **Testing**: Always test both enabled and disabled states
## Common Use Cases
- **Experimental Features**: Hide unstable features in production
- **Gradual Rollouts**: Enable features for specific environments first
- **A/B Testing**: Toggle between different implementations
- **Performance**: Disable expensive operations when not needed
- **Breaking Changes**: Provide migration path with both old and new behavior
- **Debug Features**: Enable additional logging or debugging tools
## Checking Active Feature Flags
To see which feature flags are currently active:
```typescript
// Log all feature flags on startup
console.log('Active Feature Flags:', FeatureFlags);
```
Or check via GraphQL introspection to see which fields are available based on current flags.

View File

@@ -139,9 +139,6 @@ type ArrayDisk implements Node {
"""ata | nvme | usb | (others)"""
transport: String
color: ArrayDiskFsColor
"""Whether the disk is currently spinning"""
isSpinning: Boolean
}
interface Node {
@@ -349,9 +346,6 @@ type Disk implements Node {
"""The partitions on the disk"""
partitions: [DiskPartition!]!
"""Whether the disk is spinning or not"""
isSpinning: Boolean!
}
"""The type of interface the disk uses to connect to the system"""
@@ -1050,19 +1044,6 @@ enum ThemeName {
white
}
type ExplicitStatusItem {
name: String!
updateStatus: UpdateStatus!
}
"""Update status of a container."""
enum UpdateStatus {
UP_TO_DATE
UPDATE_AVAILABLE
REBUILD_READY
UNKNOWN
}
type ContainerPort {
ip: String
privatePort: Port
@@ -1102,8 +1083,6 @@ type DockerContainer implements Node {
networkSettings: JSON
mounts: [JSON!]
autoStart: Boolean!
isUpdateAvailable: Boolean
isRebuildReady: Boolean
}
enum ContainerState {
@@ -1134,7 +1113,6 @@ type Docker implements Node {
containers(skipCache: Boolean! = false): [DockerContainer!]!
networks(skipCache: Boolean! = false): [DockerNetwork!]!
organizer: ResolvedOrganizerV1!
containerUpdateStatuses: [ExplicitStatusItem!]!
}
type ResolvedOrganizerView {
@@ -1383,12 +1361,6 @@ type CpuLoad {
"""The percentage of time the CPU spent servicing hardware interrupts."""
percentIrq: Float!
"""The percentage of time the CPU spent running virtual machines (guest)."""
percentGuest: Float!
"""The percentage of CPU time stolen by the hypervisor."""
percentSteal: Float!
}
type CpuUtilization implements Node {
@@ -2435,7 +2407,6 @@ type Mutation {
setDockerFolderChildren(folderId: String, childrenIds: [String!]!): ResolvedOrganizerV1!
deleteDockerEntries(entryIds: [String!]!): ResolvedOrganizerV1!
moveDockerEntriesToFolder(sourceEntryIds: [String!]!, destinationFolderId: String!): ResolvedOrganizerV1!
refreshDockerDigests: Boolean!
"""Initiates a flash drive backup using a configured remote."""
initiateFlashBackup(input: InitiateFlashBackupInput!): FlashBackupStatus!

View File

@@ -1,6 +1,6 @@
{
"name": "@unraid/api",
"version": "4.21.0",
"version": "4.18.2",
"main": "src/cli/index.ts",
"type": "module",
"corepack": {
@@ -94,7 +94,7 @@
"command-exists": "1.2.9",
"convert": "5.12.0",
"cookie": "1.0.2",
"cron": "4.3.0",
"cron": "4.3.3",
"cross-fetch": "4.1.0",
"diff": "8.0.2",
"dockerode": "4.0.7",

View File

@@ -1,12 +1,11 @@
import { expect, test, vi } from 'vitest';
import { expect, test } from 'vitest';
import { store } from '@app/store/index.js';
import { FileLoadStatus, StateFileKey } from '@app/store/types.js';
import '@app/core/utils/misc/get-key-file.js';
import '@app/store/modules/emhttp.js';
vi.mock('fs/promises');
test('Before loading key returns null', async () => {
const { getKeyFile } = await import('@app/core/utils/misc/get-key-file.js');
const { status } = store.getState().registration;
@@ -49,70 +48,21 @@ test('Returns empty key if key location is empty', async () => {
await expect(getKeyFile()).resolves.toBe('');
});
test('Returns empty string when key file does not exist (ENOENT)', async () => {
const { readFile } = await import('fs/promises');
// Mock readFile to throw ENOENT error
const readFileMock = vi.mocked(readFile);
readFileMock.mockRejectedValueOnce(
Object.assign(new Error('ENOENT: no such file or directory'), { code: 'ENOENT' })
);
// Clear the module cache and re-import to get fresh module with mock
vi.resetModules();
const { getKeyFile } = await import('@app/core/utils/misc/get-key-file.js');
const { updateEmhttpState } = await import('@app/store/modules/emhttp.js');
const { store: freshStore } = await import('@app/store/index.js');
// Set key file location to a non-existent file
freshStore.dispatch(
updateEmhttpState({
field: StateFileKey.var,
state: {
regFile: '/boot/config/Pro.key',
},
})
);
// Should return empty string when file doesn't exist
await expect(getKeyFile()).resolves.toBe('');
// Clear mock
readFileMock.mockReset();
vi.resetModules();
});
test('Returns decoded key file if key location exists', async () => {
const { readFile } = await import('fs/promises');
// Mock a valid key file content
const mockKeyContent =
'hVs1tLjvC9FiiQsIwIQ7G1KszAcexf0IneThhnmf22SB0dGs5WzRkqMiSMmt2DtR5HOXFUD32YyxuzGeUXmky3zKpSu6xhZNKVg5atGM1OfvkzHBMldI3SeBLuUFSgejLbpNUMdTrbk64JJdbzle4O8wiQgkIpAMIGxeYLwLBD4zHBcfyzq40QnxG--HcX6j25eE0xqa2zWj-j0b0rCAXahJV2a3ySCbPzr1MvfPRTVb0rr7KJ-25R592hYrz4H7Sc1B3p0lr6QUxHE6o7bcYrWKDRtIVoZ8SMPpd1_0gzYIcl5GsDFzFumTXUh8NEnl0Q8hwW1YE-tRc6Y_rrvd7w==';
const binaryContent = Buffer.from(mockKeyContent, 'base64').toString('binary');
const readFileMock = vi.mocked(readFile);
readFileMock.mockResolvedValue(binaryContent);
// Clear the module cache and re-import to get fresh module with mock
vi.resetModules();
const { getKeyFile } = await import('@app/core/utils/misc/get-key-file.js');
const { loadStateFiles } = await import('@app/store/modules/emhttp.js');
const { loadRegistrationKey } = await import('@app/store/modules/registration.js');
const { store: freshStore } = await import('@app/store/index.js');
// Load state files into store
await freshStore.dispatch(loadStateFiles());
await freshStore.dispatch(loadRegistrationKey());
// Check if store has state files loaded
const { status } = freshStore.getState().registration;
expect(status).toBe(FileLoadStatus.LOADED);
const result = await getKeyFile();
expect(result).toBe(
'hVs1tLjvC9FiiQsIwIQ7G1KszAcexf0IneThhnmf22SB0dGs5WzRkqMiSMmt2DtR5HOXFUD32YyxuzGeUXmky3zKpSu6xhZNKVg5atGM1OfvkzHBMldI3SeBLuUFSgejLbpNUMdTrbk64JJdbzle4O8wiQgkIpAMIGxeYLwLBD4zHBcfyzq40QnxG--HcX6j25eE0xqa2zWj-j0b0rCAXahJV2a3ySCbPzr1MvfPRTVb0rr7KJ-25R592hYrz4H7Sc1B3p0lr6QUxHE6o7bcYrWKDRtIVoZ8SMPpd1_0gzYIcl5GsDFzFumTXUh8NEnl0Q8hwW1YE-tRc6Y_rrvd7w'
);
// Clear mock
readFileMock.mockReset();
vi.resetModules();
}, 10000);
test(
'Returns decoded key file if key location exists',
async () => {
const { getKeyFile } = await import('@app/core/utils/misc/get-key-file.js');
const { loadStateFiles } = await import('@app/store/modules/emhttp.js');
const { loadRegistrationKey } = await import('@app/store/modules/registration.js');
// Load state files into store
await store.dispatch(loadStateFiles());
await store.dispatch(loadRegistrationKey());
// Check if store has state files loaded
const { status } = store.getState().registration;
expect(status).toBe(FileLoadStatus.LOADED);
await expect(getKeyFile()).resolves.toMatchInlineSnapshot(
'"hVs1tLjvC9FiiQsIwIQ7G1KszAcexf0IneThhnmf22SB0dGs5WzRkqMiSMmt2DtR5HOXFUD32YyxuzGeUXmky3zKpSu6xhZNKVg5atGM1OfvkzHBMldI3SeBLuUFSgejLbpNUMdTrbk64JJdbzle4O8wiQgkIpAMIGxeYLwLBD4zHBcfyzq40QnxG--HcX6j25eE0xqa2zWj-j0b0rCAXahJV2a3ySCbPzr1MvfPRTVb0rr7KJ-25R592hYrz4H7Sc1B3p0lr6QUxHE6o7bcYrWKDRtIVoZ8SMPpd1_0gzYIcl5GsDFzFumTXUh8NEnl0Q8hwW1YE-tRc6Y_rrvd7w"'
);
},
{ timeout: 10000 }
);

View File

@@ -1,11 +1,10 @@
import { existsSync } from 'node:fs';
import { homedir } from 'node:os';
import { join } from 'node:path';
import { fileURLToPath } from 'node:url';
import { execa } from 'execa';
import pm2 from 'pm2';
import { afterAll, afterEach, beforeAll, describe, expect, it } from 'vitest';
import { afterAll, afterEach, beforeAll, beforeEach, describe, expect, it, vi } from 'vitest';
import { isUnraidApiRunning } from '@app/core/utils/pm2/unraid-api-running.js';
@@ -18,6 +17,11 @@ const TEST_PROCESS_NAME = 'test-unraid-api';
// Shared PM2 connection state
let pm2Connected = false;
// Helper function to run CLI command (assumes CLI is built)
async function runCliCommand(command: string, options: any = {}) {
return await execa('node', [CLI_PATH, command], options);
}
// Helper to ensure PM2 connection is established
async function ensurePM2Connection() {
if (pm2Connected) return;
@@ -53,7 +57,7 @@ async function deleteTestProcesses() {
}
const processName = processNames[deletedCount];
pm2.delete(processName, () => {
pm2.delete(processName, (deleteErr) => {
// Ignore errors, process might not exist
deletedCount++;
deleteNext();
@@ -88,7 +92,7 @@ async function cleanupAllPM2Processes() {
}
// Kill the daemon to ensure fresh state
pm2.killDaemon(() => {
pm2.killDaemon((killErr) => {
pm2.disconnect();
pm2Connected = false;
// Small delay to let PM2 fully shutdown
@@ -100,9 +104,6 @@ async function cleanupAllPM2Processes() {
describe.skipIf(!!process.env.CI)('PM2 integration tests', () => {
beforeAll(async () => {
// Set PM2_HOME to use home directory for testing (not /var/log)
process.env.PM2_HOME = join(homedir(), '.pm2');
// Build the CLI if it doesn't exist (only for CLI tests)
if (!existsSync(CLI_PATH)) {
console.log('Building CLI for integration tests...');
@@ -197,13 +198,6 @@ describe.skipIf(!!process.env.CI)('PM2 integration tests', () => {
}, 30000);
it('should handle PM2 connection errors gracefully', async () => {
// Disconnect PM2 first to ensure we're testing fresh connection
await new Promise<void>((resolve) => {
pm2.disconnect();
pm2Connected = false;
setTimeout(resolve, 100);
});
// Set an invalid PM2_HOME to force connection failure
const originalPM2Home = process.env.PM2_HOME;
process.env.PM2_HOME = '/invalid/path/that/does/not/exist';

View File

@@ -12,22 +12,7 @@ import {
UpdateRCloneRemoteDto,
} from '@app/unraid-api/graph/resolvers/rclone/rclone.model.js';
vi.mock('got', () => {
const mockPost = vi.fn();
const gotMock = {
post: mockPost,
};
return {
default: gotMock,
HTTPError: class HTTPError extends Error {
response?: any;
constructor(response?: any) {
super('HTTP Error');
this.response = response;
}
},
};
});
vi.mock('got');
vi.mock('execa');
vi.mock('p-retry');
vi.mock('node:fs', () => ({
@@ -75,7 +60,7 @@ vi.mock('@nestjs/common', async (importOriginal) => {
describe('RCloneApiService', () => {
let service: RCloneApiService;
let mockGotPost: any;
let mockGot: any;
let mockExeca: any;
let mockPRetry: any;
let mockExistsSync: any;
@@ -83,19 +68,19 @@ describe('RCloneApiService', () => {
beforeEach(async () => {
vi.clearAllMocks();
const got = await import('got');
const { default: got } = await import('got');
const { execa } = await import('execa');
const pRetry = await import('p-retry');
const { existsSync } = await import('node:fs');
const { fileExists } = await import('@app/core/utils/files/file-exists.js');
mockGotPost = vi.mocked(got.default.post);
mockGot = vi.mocked(got);
mockExeca = vi.mocked(execa);
mockPRetry = vi.mocked(pRetry.default);
mockExistsSync = vi.mocked(existsSync);
// Mock successful RClone API response for socket check
mockGotPost.mockResolvedValue({ body: { pid: 12345 } });
mockGot.post = vi.fn().mockResolvedValue({ body: { pid: 12345 } });
// Mock RClone binary exists check
vi.mocked(fileExists).mockResolvedValue(true);
@@ -112,10 +97,10 @@ describe('RCloneApiService', () => {
mockPRetry.mockResolvedValue(undefined);
service = new RCloneApiService();
await service.onApplicationBootstrap();
await service.onModuleInit();
// Reset the mock after initialization to prepare for test-specific responses
mockGotPost.mockClear();
mockGot.post.mockClear();
});
describe('getProviders', () => {
@@ -124,15 +109,15 @@ describe('RCloneApiService', () => {
{ name: 'aws', prefix: 's3', description: 'Amazon S3' },
{ name: 'google', prefix: 'drive', description: 'Google Drive' },
];
mockGotPost.mockResolvedValue({
mockGot.post.mockResolvedValue({
body: { providers: mockProviders },
});
const result = await service.getProviders();
expect(result).toEqual(mockProviders);
expect(mockGotPost).toHaveBeenCalledWith(
expect.stringMatching(/\/config\/providers$/),
expect(mockGot.post).toHaveBeenCalledWith(
'http://unix:/tmp/rclone.sock:/config/providers',
expect.objectContaining({
json: {},
responseType: 'json',
@@ -145,7 +130,7 @@ describe('RCloneApiService', () => {
});
it('should return empty array when no providers', async () => {
mockGotPost.mockResolvedValue({ body: {} });
mockGot.post.mockResolvedValue({ body: {} });
const result = await service.getProviders();
@@ -156,15 +141,15 @@ describe('RCloneApiService', () => {
describe('listRemotes', () => {
it('should return list of remotes', async () => {
const mockRemotes = ['backup-s3', 'drive-storage'];
mockGotPost.mockResolvedValue({
mockGot.post.mockResolvedValue({
body: { remotes: mockRemotes },
});
const result = await service.listRemotes();
expect(result).toEqual(mockRemotes);
expect(mockGotPost).toHaveBeenCalledWith(
expect.stringMatching(/\/config\/listremotes$/),
expect(mockGot.post).toHaveBeenCalledWith(
'http://unix:/tmp/rclone.sock:/config/listremotes',
expect.objectContaining({
json: {},
responseType: 'json',
@@ -177,7 +162,7 @@ describe('RCloneApiService', () => {
});
it('should return empty array when no remotes', async () => {
mockGotPost.mockResolvedValue({ body: {} });
mockGot.post.mockResolvedValue({ body: {} });
const result = await service.listRemotes();
@@ -189,13 +174,13 @@ describe('RCloneApiService', () => {
it('should return remote details', async () => {
const input: GetRCloneRemoteDetailsDto = { name: 'test-remote' };
const mockConfig = { type: 's3', provider: 'AWS' };
mockGotPost.mockResolvedValue({ body: mockConfig });
mockGot.post.mockResolvedValue({ body: mockConfig });
const result = await service.getRemoteDetails(input);
expect(result).toEqual(mockConfig);
expect(mockGotPost).toHaveBeenCalledWith(
expect.stringMatching(/\/config\/get$/),
expect(mockGot.post).toHaveBeenCalledWith(
'http://unix:/tmp/rclone.sock:/config/get',
expect.objectContaining({
json: { name: 'test-remote' },
responseType: 'json',
@@ -212,7 +197,7 @@ describe('RCloneApiService', () => {
it('should return remote configuration', async () => {
const input: GetRCloneRemoteConfigDto = { name: 'test-remote' };
const mockConfig = { type: 's3', access_key_id: 'AKIA...' };
mockGotPost.mockResolvedValue({ body: mockConfig });
mockGot.post.mockResolvedValue({ body: mockConfig });
const result = await service.getRemoteConfig(input);
@@ -228,13 +213,13 @@ describe('RCloneApiService', () => {
parameters: { access_key_id: 'AKIA...', secret_access_key: 'secret' },
};
const mockResponse = { success: true };
mockGotPost.mockResolvedValue({ body: mockResponse });
mockGot.post.mockResolvedValue({ body: mockResponse });
const result = await service.createRemote(input);
expect(result).toEqual(mockResponse);
expect(mockGotPost).toHaveBeenCalledWith(
expect.stringMatching(/\/config\/create$/),
expect(mockGot.post).toHaveBeenCalledWith(
'http://unix:/tmp/rclone.sock:/config/create',
expect.objectContaining({
json: {
name: 'new-remote',
@@ -258,13 +243,13 @@ describe('RCloneApiService', () => {
parameters: { access_key_id: 'NEW_AKIA...' },
};
const mockResponse = { success: true };
mockGotPost.mockResolvedValue({ body: mockResponse });
mockGot.post.mockResolvedValue({ body: mockResponse });
const result = await service.updateRemote(input);
expect(result).toEqual(mockResponse);
expect(mockGotPost).toHaveBeenCalledWith(
expect.stringMatching(/\/config\/update$/),
expect(mockGot.post).toHaveBeenCalledWith(
'http://unix:/tmp/rclone.sock:/config/update',
expect.objectContaining({
json: {
name: 'existing-remote',
@@ -284,13 +269,13 @@ describe('RCloneApiService', () => {
it('should delete a remote', async () => {
const input: DeleteRCloneRemoteDto = { name: 'remote-to-delete' };
const mockResponse = { success: true };
mockGotPost.mockResolvedValue({ body: mockResponse });
mockGot.post.mockResolvedValue({ body: mockResponse });
const result = await service.deleteRemote(input);
expect(result).toEqual(mockResponse);
expect(mockGotPost).toHaveBeenCalledWith(
expect.stringMatching(/\/config\/delete$/),
expect(mockGot.post).toHaveBeenCalledWith(
'http://unix:/tmp/rclone.sock:/config/delete',
expect.objectContaining({
json: { name: 'remote-to-delete' },
responseType: 'json',
@@ -311,13 +296,13 @@ describe('RCloneApiService', () => {
options: { delete_on: 'dst' },
};
const mockResponse = { jobid: 'job-123' };
mockGotPost.mockResolvedValue({ body: mockResponse });
mockGot.post.mockResolvedValue({ body: mockResponse });
const result = await service.startBackup(input);
expect(result).toEqual(mockResponse);
expect(mockGotPost).toHaveBeenCalledWith(
expect.stringMatching(/\/sync\/copy$/),
expect(mockGot.post).toHaveBeenCalledWith(
'http://unix:/tmp/rclone.sock:/sync/copy',
expect.objectContaining({
json: {
srcFs: '/source/path',
@@ -338,13 +323,13 @@ describe('RCloneApiService', () => {
it('should return job status', async () => {
const input: GetRCloneJobStatusDto = { jobId: 'job-123' };
const mockStatus = { status: 'running', progress: 0.5 };
mockGotPost.mockResolvedValue({ body: mockStatus });
mockGot.post.mockResolvedValue({ body: mockStatus });
const result = await service.getJobStatus(input);
expect(result).toEqual(mockStatus);
expect(mockGotPost).toHaveBeenCalledWith(
expect.stringMatching(/\/job\/status$/),
expect(mockGot.post).toHaveBeenCalledWith(
'http://unix:/tmp/rclone.sock:/job/status',
expect.objectContaining({
json: { jobid: 'job-123' },
responseType: 'json',
@@ -363,13 +348,13 @@ describe('RCloneApiService', () => {
{ id: 'job-1', status: 'running' },
{ id: 'job-2', status: 'finished' },
];
mockGotPost.mockResolvedValue({ body: mockJobs });
mockGot.post.mockResolvedValue({ body: mockJobs });
const result = await service.listRunningJobs();
expect(result).toEqual(mockJobs);
expect(mockGotPost).toHaveBeenCalledWith(
expect.stringMatching(/\/job\/list$/),
expect(mockGot.post).toHaveBeenCalledWith(
'http://unix:/tmp/rclone.sock:/job/list',
expect.objectContaining({
json: {},
responseType: 'json',
@@ -393,7 +378,7 @@ describe('RCloneApiService', () => {
},
};
Object.setPrototypeOf(httpError, HTTPError.prototype);
mockGotPost.mockRejectedValue(httpError);
mockGot.post.mockRejectedValue(httpError);
await expect(service.getProviders()).rejects.toThrow(
'Rclone API Error (config/providers, HTTP 500): Rclone Error: Internal server error'
@@ -410,7 +395,7 @@ describe('RCloneApiService', () => {
},
};
Object.setPrototypeOf(httpError, HTTPError.prototype);
mockGotPost.mockRejectedValue(httpError);
mockGot.post.mockRejectedValue(httpError);
await expect(service.getProviders()).rejects.toThrow(
'Rclone API Error (config/providers, HTTP 404): Failed to process error response body. Raw body:'
@@ -427,7 +412,7 @@ describe('RCloneApiService', () => {
},
};
Object.setPrototypeOf(httpError, HTTPError.prototype);
mockGotPost.mockRejectedValue(httpError);
mockGot.post.mockRejectedValue(httpError);
await expect(service.getProviders()).rejects.toThrow(
'Rclone API Error (config/providers, HTTP 400): Failed to process error response body. Raw body: invalid json'
@@ -436,108 +421,17 @@ describe('RCloneApiService', () => {
it('should handle non-HTTP errors', async () => {
const networkError = new Error('Network connection failed');
mockGotPost.mockRejectedValue(networkError);
mockGot.post.mockRejectedValue(networkError);
await expect(service.getProviders()).rejects.toThrow('Network connection failed');
});
it('should handle unknown errors', async () => {
mockGotPost.mockRejectedValue('unknown error');
mockGot.post.mockRejectedValue('unknown error');
await expect(service.getProviders()).rejects.toThrow(
'Unknown error calling RClone API (config/providers) with params {}: unknown error'
);
});
});
describe('checkRcloneBinaryExists', () => {
beforeEach(() => {
// Create a new service instance without initializing for these tests
service = new RCloneApiService();
});
it('should return true when rclone version is 1.70.0', async () => {
mockExeca.mockResolvedValueOnce({
stdout: 'rclone v1.70.0\n- os/version: darwin 14.0 (64 bit)\n- os/kernel: 23.0.0 (arm64)',
stderr: '',
} as any);
const result = await (service as any).checkRcloneBinaryExists();
expect(result).toBe(true);
});
it('should return true when rclone version is newer than 1.70.0', async () => {
mockExeca.mockResolvedValueOnce({
stdout: 'rclone v1.75.2\n- os/version: darwin 14.0 (64 bit)\n- os/kernel: 23.0.0 (arm64)',
stderr: '',
} as any);
const result = await (service as any).checkRcloneBinaryExists();
expect(result).toBe(true);
});
it('should return false when rclone version is older than 1.70.0', async () => {
mockExeca.mockResolvedValueOnce({
stdout: 'rclone v1.69.0\n- os/version: darwin 14.0 (64 bit)\n- os/kernel: 23.0.0 (arm64)',
stderr: '',
} as any);
const result = await (service as any).checkRcloneBinaryExists();
expect(result).toBe(false);
});
it('should return false when rclone version is much older', async () => {
mockExeca.mockResolvedValueOnce({
stdout: 'rclone v1.50.0\n- os/version: darwin 14.0 (64 bit)\n- os/kernel: 23.0.0 (arm64)',
stderr: '',
} as any);
const result = await (service as any).checkRcloneBinaryExists();
expect(result).toBe(false);
});
it('should return false when version cannot be parsed', async () => {
mockExeca.mockResolvedValueOnce({
stdout: 'rclone unknown version format',
stderr: '',
} as any);
const result = await (service as any).checkRcloneBinaryExists();
expect(result).toBe(false);
});
it('should return false when rclone binary is not found', async () => {
const error = new Error('Command not found') as any;
error.code = 'ENOENT';
mockExeca.mockRejectedValueOnce(error);
const result = await (service as any).checkRcloneBinaryExists();
expect(result).toBe(false);
});
it('should return false and log error for other exceptions', async () => {
mockExeca.mockRejectedValueOnce(new Error('Some other error'));
const result = await (service as any).checkRcloneBinaryExists();
expect(result).toBe(false);
});
it('should handle beta/rc versions correctly', async () => {
mockExeca.mockResolvedValueOnce({
stdout: 'rclone v1.70.0-beta.1\n- os/version: darwin 14.0 (64 bit)\n- os/kernel: 23.0.0 (arm64)',
stderr: '',
} as any);
const result = await (service as any).checkRcloneBinaryExists();
expect(result).toBe(true);
});
});
});

View File

@@ -211,7 +211,6 @@ test('After init returns values from cfg file for all fields', { timeout: 30000
"fsUsed": null,
"id": "ST18000NM000J-2TV103_ZR585CPY",
"idx": 0,
"isSpinning": true,
"name": "parity",
"numErrors": 0,
"numReads": 0,
@@ -236,7 +235,6 @@ test('After init returns values from cfg file for all fields', { timeout: 30000
"fsUsed": 4116003021,
"id": "ST18000NM000J-2TV103_ZR5B1W9X",
"idx": 1,
"isSpinning": true,
"name": "disk1",
"numErrors": 0,
"numReads": 0,
@@ -261,7 +259,6 @@ test('After init returns values from cfg file for all fields', { timeout: 30000
"fsUsed": 11904860828,
"id": "WDC_WD120EDAZ-11F3RA0_5PJRD45C",
"idx": 2,
"isSpinning": true,
"name": "disk2",
"numErrors": 0,
"numReads": 0,
@@ -286,7 +283,6 @@ test('After init returns values from cfg file for all fields', { timeout: 30000
"fsUsed": 6478056481,
"id": "WDC_WD120EMAZ-11BLFA0_5PH8BTYD",
"idx": 3,
"isSpinning": true,
"name": "disk3",
"numErrors": 0,
"numReads": 0,
@@ -311,7 +307,6 @@ test('After init returns values from cfg file for all fields', { timeout: 30000
"fsUsed": 137273827,
"id": "Samsung_SSD_850_EVO_250GB_S2R5NX0H643734Z",
"idx": 30,
"isSpinning": true,
"name": "cache",
"numErrors": 0,
"numReads": 0,
@@ -336,7 +331,6 @@ test('After init returns values from cfg file for all fields', { timeout: 30000
"fsUsed": null,
"id": "KINGSTON_SA2000M8250G_50026B7282669D9E",
"idx": 31,
"isSpinning": true,
"name": "cache2",
"numErrors": 0,
"numReads": 0,
@@ -361,7 +355,6 @@ test('After init returns values from cfg file for all fields', { timeout: 30000
"fsUsed": 851325,
"id": "Cruzer",
"idx": 32,
"isSpinning": true,
"name": "flash",
"numErrors": 0,
"numReads": 0,

View File

@@ -28,7 +28,6 @@ test('Returns parsed state file', async () => {
"fsUsed": null,
"id": "ST18000NM000J-2TV103_ZR585CPY",
"idx": 0,
"isSpinning": true,
"name": "parity",
"numErrors": 0,
"numReads": 0,
@@ -53,7 +52,6 @@ test('Returns parsed state file', async () => {
"fsUsed": 4116003021,
"id": "ST18000NM000J-2TV103_ZR5B1W9X",
"idx": 1,
"isSpinning": true,
"name": "disk1",
"numErrors": 0,
"numReads": 0,
@@ -78,7 +76,6 @@ test('Returns parsed state file', async () => {
"fsUsed": 11904860828,
"id": "WDC_WD120EDAZ-11F3RA0_5PJRD45C",
"idx": 2,
"isSpinning": true,
"name": "disk2",
"numErrors": 0,
"numReads": 0,
@@ -103,7 +100,6 @@ test('Returns parsed state file', async () => {
"fsUsed": 6478056481,
"id": "WDC_WD120EMAZ-11BLFA0_5PH8BTYD",
"idx": 3,
"isSpinning": true,
"name": "disk3",
"numErrors": 0,
"numReads": 0,
@@ -128,7 +124,6 @@ test('Returns parsed state file', async () => {
"fsUsed": 137273827,
"id": "Samsung_SSD_850_EVO_250GB_S2R5NX0H643734Z",
"idx": 30,
"isSpinning": true,
"name": "cache",
"numErrors": 0,
"numReads": 0,
@@ -153,7 +148,6 @@ test('Returns parsed state file', async () => {
"fsUsed": null,
"id": "KINGSTON_SA2000M8250G_50026B7282669D9E",
"idx": 31,
"isSpinning": true,
"name": "cache2",
"numErrors": 0,
"numReads": 0,
@@ -178,7 +172,6 @@ test('Returns parsed state file', async () => {
"fsUsed": 851325,
"id": "Cruzer",
"idx": 32,
"isSpinning": true,
"name": "flash",
"numErrors": 0,
"numReads": 0,

View File

@@ -2,7 +2,7 @@ import { join } from 'path';
import type { JSONWebKeySet } from 'jose';
import { ENABLE_NEXT_DOCKER_RELEASE, PORT } from '@app/environment.js';
import { PORT } from '@app/environment.js';
export const getInternalApiAddress = (isHttp = true, nginxPort = 80) => {
const envPort = PORT;
@@ -79,14 +79,3 @@ export const KEYSERVER_VALIDATION_ENDPOINT = 'https://keys.lime-technology.com/v
/** Set the max retries for the GraphQL Client */
export const MAX_RETRIES_FOR_LINEAR_BACKOFF = 100;
/**
* Feature flags are used to conditionally enable or disable functionality in the Unraid API.
*
* Keys are human readable feature flag names -- will be used to construct error messages.
*
* Values are boolean/truthy values.
*/
export const FeatureFlags = Object.freeze({
ENABLE_NEXT_DOCKER_RELEASE,
});

View File

@@ -16,22 +16,11 @@ export const getKeyFile = async function (appStore: RootState = store.getState()
const keyFileName = basename(emhttp.var?.regFile);
const registrationKeyFilePath = join(paths['keyfile-base'], keyFileName);
try {
const keyFile = await readFile(registrationKeyFilePath, 'binary');
return Buffer.from(keyFile, 'binary')
.toString('base64')
.trim()
.replace(/\+/g, '-')
.replace(/\//g, '_')
.replace(/=/g, '');
} catch (error) {
// Handle ENOENT error when Pro.key file doesn't exist
if (error instanceof Error && 'code' in error && error.code === 'ENOENT') {
// Return empty string when key file is missing (ENOKEYFILE state)
return '';
}
// Re-throw other errors
throw error;
}
const keyFile = await readFile(registrationKeyFilePath, 'binary');
return Buffer.from(keyFile, 'binary')
.toString('base64')
.trim()
.replace(/\+/g, '-')
.replace(/\//g, '_')
.replace(/=/g, '');
};

View File

@@ -2,6 +2,7 @@
// Non-function exports from this module are loaded into the NestJS Config at runtime.
import { readFileSync } from 'node:fs';
import { homedir } from 'node:os';
import { join } from 'node:path';
import { fileURLToPath } from 'node:url';
@@ -98,7 +99,7 @@ export const MOTHERSHIP_GRAPHQL_LINK = process.env.MOTHERSHIP_GRAPHQL_LINK
? 'https://staging.mothership.unraid.net/ws'
: 'https://mothership.unraid.net/ws';
export const PM2_HOME = process.env.PM2_HOME ?? '/var/log/.pm2';
export const PM2_HOME = process.env.PM2_HOME ?? join(homedir(), '.pm2');
export const PM2_PATH = join(import.meta.dirname, '../../', 'node_modules', 'pm2', 'bin', 'pm2');
export const ECOSYSTEM_PATH = join(import.meta.dirname, '../../', 'ecosystem.config.json');
export const PATHS_LOGS_DIR =
@@ -110,6 +111,3 @@ export const PATHS_CONFIG_MODULES =
export const PATHS_LOCAL_SESSION_FILE =
process.env.PATHS_LOCAL_SESSION_FILE ?? '/var/run/unraid-api/local-session';
/** feature flag for the upcoming docker release */
export const ENABLE_NEXT_DOCKER_RELEASE = process.env.ENABLE_NEXT_DOCKER_RELEASE === 'true';

View File

@@ -36,7 +36,6 @@ export type IniSlot = {
size: string;
sizeSb: string;
slots: string;
spundown: string;
status: SlotStatus;
temp: string;
type: SlotType;
@@ -83,7 +82,6 @@ export const parse: StateFileToIniParserMap['disks'] = (disksIni) =>
fsType: slot.fsType ?? null,
format: slot.format === '-' ? null : slot.format,
transport: slot.transport ?? null,
isSpinning: slot.spundown ? slot.spundown === '0' : null,
};
// @TODO Zod Parse This
return result;

View File

@@ -14,7 +14,6 @@ import { AuthModule } from '@app/unraid-api/auth/auth.module.js';
import { AuthenticationGuard } from '@app/unraid-api/auth/authentication.guard.js';
import { LegacyConfigModule } from '@app/unraid-api/config/legacy-config.module.js';
import { CronModule } from '@app/unraid-api/cron/cron.module.js';
import { JobModule } from '@app/unraid-api/cron/job.module.js';
import { GraphModule } from '@app/unraid-api/graph/graph.module.js';
import { GlobalDepsModule } from '@app/unraid-api/plugin/global-deps.module.js';
import { RestModule } from '@app/unraid-api/rest/rest.module.js';
@@ -25,7 +24,7 @@ import { UnraidFileModifierModule } from '@app/unraid-api/unraid-file-modifier/u
GlobalDepsModule,
LegacyConfigModule,
PubSubModule,
JobModule,
ScheduleModule.forRoot(),
LoggerModule.forRoot({
pinoHttp: {
logger: apiLogger,

View File

@@ -1,111 +0,0 @@
import { Test, TestingModule } from '@nestjs/testing';
import { afterEach, beforeEach, describe, expect, it, MockInstance, vi } from 'vitest';
import { LogService } from '@app/unraid-api/cli/log.service.js';
import { VersionCommand } from '@app/unraid-api/cli/version.command.js';
let API_VERSION_MOCK = '4.18.2+build123';
vi.mock('@app/environment.js', async (importOriginal) => {
const actual = (await importOriginal()) as any;
return {
...actual,
get API_VERSION() {
return API_VERSION_MOCK;
},
};
});
describe('VersionCommand', () => {
let command: VersionCommand;
let logService: LogService;
let consoleLogSpy: MockInstance<typeof console.log>;
beforeEach(async () => {
API_VERSION_MOCK = '4.18.2+build123'; // Reset to default before each test
consoleLogSpy = vi.spyOn(console, 'log').mockImplementation(() => {});
const module: TestingModule = await Test.createTestingModule({
providers: [
VersionCommand,
{
provide: LogService,
useValue: {
info: vi.fn(),
},
},
],
}).compile();
command = module.get<VersionCommand>(VersionCommand);
logService = module.get<LogService>(LogService);
});
afterEach(() => {
vi.restoreAllMocks();
});
describe('run', () => {
it('should output version with logger when no options provided', async () => {
await command.run([]);
expect(logService.info).toHaveBeenCalledWith('Unraid API v4.18.2+build123');
expect(consoleLogSpy).not.toHaveBeenCalled();
});
it('should output version with logger when json option is false', async () => {
await command.run([], { json: false });
expect(logService.info).toHaveBeenCalledWith('Unraid API v4.18.2+build123');
expect(consoleLogSpy).not.toHaveBeenCalled();
});
it('should output JSON when json option is true', async () => {
await command.run([], { json: true });
expect(logService.info).not.toHaveBeenCalled();
expect(consoleLogSpy).toHaveBeenCalledWith(
JSON.stringify({
version: '4.18.2',
build: 'build123',
combined: '4.18.2+build123',
})
);
});
it('should handle version without build info', async () => {
API_VERSION_MOCK = '4.18.2'; // Set version without build info
const module: TestingModule = await Test.createTestingModule({
providers: [
VersionCommand,
{
provide: LogService,
useValue: {
info: vi.fn(),
},
},
],
}).compile();
const commandWithoutBuild = module.get<VersionCommand>(VersionCommand);
await commandWithoutBuild.run([], { json: true });
expect(consoleLogSpy).toHaveBeenCalledWith(
JSON.stringify({
version: '4.18.2',
build: undefined,
combined: '4.18.2',
})
);
});
});
describe('parseJson', () => {
it('should return true', () => {
expect(command.parseJson()).toBe(true);
});
});
});

View File

@@ -241,8 +241,6 @@ export type ArrayDisk = Node & {
id: Scalars['PrefixedID']['output'];
/** Array slot number. Parity1 is always 0 and Parity2 is always 29. Array slots will be 1 - 28. Cache slots are 30 - 53. Flash is 54. */
idx: Scalars['Int']['output'];
/** Whether the disk is currently spinning */
isSpinning?: Maybe<Scalars['Boolean']['output']>;
name?: Maybe<Scalars['String']['output']>;
/** Number of unrecoverable errors reported by the device I/O drivers. Missing data due to unrecoverable array read errors is filled in on-the-fly using parity reconstruct (and we attempt to write this data back to the sector(s) which failed). Any unrecoverable write error results in disabling the disk. */
numErrors?: Maybe<Scalars['BigInt']['output']>;
@@ -450,6 +448,20 @@ export enum ConfigErrorState {
WITHDRAWN = 'WITHDRAWN'
}
export type ConfigFile = {
__typename?: 'ConfigFile';
content: Scalars['String']['output'];
name: Scalars['String']['output'];
path: Scalars['String']['output'];
/** Human-readable file size (e.g., "1.5 KB", "2.3 MB") */
sizeReadable: Scalars['String']['output'];
};
export type ConfigFilesResponse = {
__typename?: 'ConfigFilesResponse';
files: Array<ConfigFile>;
};
export type Connect = Node & {
__typename?: 'Connect';
/** The status of dynamic remote access */
@@ -541,16 +553,12 @@ export type CoreVersions = {
/** CPU load for a single core */
export type CpuLoad = {
__typename?: 'CpuLoad';
/** The percentage of time the CPU spent running virtual machines (guest). */
percentGuest: Scalars['Float']['output'];
/** The percentage of time the CPU was idle. */
percentIdle: Scalars['Float']['output'];
/** The percentage of time the CPU spent servicing hardware interrupts. */
percentIrq: Scalars['Float']['output'];
/** The percentage of time the CPU spent on low-priority (niced) user space processes. */
percentNice: Scalars['Float']['output'];
/** The percentage of CPU time stolen by the hypervisor. */
percentSteal: Scalars['Float']['output'];
/** The percentage of time the CPU spent in kernel space. */
percentSystem: Scalars['Float']['output'];
/** The total CPU load on a single core, in percent. */
@@ -609,8 +617,6 @@ export type Disk = Node & {
id: Scalars['PrefixedID']['output'];
/** The interface type of the disk */
interfaceType: DiskInterfaceType;
/** Whether the disk is spinning or not */
isSpinning: Scalars['Boolean']['output'];
/** The model name of the disk */
name: Scalars['String']['output'];
/** The partitions on the disk */
@@ -678,7 +684,6 @@ export enum DiskSmartStatus {
export type Docker = Node & {
__typename?: 'Docker';
containerUpdateStatuses: Array<ExplicitStatusItem>;
containers: Array<DockerContainer>;
id: Scalars['PrefixedID']['output'];
networks: Array<DockerNetwork>;
@@ -704,8 +709,6 @@ export type DockerContainer = Node & {
id: Scalars['PrefixedID']['output'];
image: Scalars['String']['output'];
imageId: Scalars['String']['output'];
isRebuildReady?: Maybe<Scalars['Boolean']['output']>;
isUpdateAvailable?: Maybe<Scalars['Boolean']['output']>;
labels?: Maybe<Scalars['JSON']['output']>;
mounts?: Maybe<Array<Scalars['JSON']['output']>>;
names: Array<Scalars['String']['output']>;
@@ -777,12 +780,6 @@ export type EnableDynamicRemoteAccessInput = {
url: AccessUrlInput;
};
export type ExplicitStatusItem = {
__typename?: 'ExplicitStatusItem';
name: Scalars['String']['output'];
updateStatus: UpdateStatus;
};
export type Flash = Node & {
__typename?: 'Flash';
guid: Scalars['String']['output'];
@@ -1238,7 +1235,6 @@ export type Mutation = {
rclone: RCloneMutations;
/** Reads each notification to recompute & update the overview. */
recalculateOverview: NotificationOverview;
refreshDockerDigests: Scalars['Boolean']['output'];
/** Remove one or more plugins from the API. Returns false if restart was triggered automatically, true if manual restart is required. */
removePlugin: Scalars['Boolean']['output'];
setDockerFolderChildren: ResolvedOrganizerV1;
@@ -1649,6 +1645,7 @@ export type PublicPartnerInfo = {
export type Query = {
__typename?: 'Query';
allConfigFiles: ConfigFilesResponse;
apiKey?: Maybe<ApiKey>;
/** All possible permissions for API keys */
apiKeyPossiblePermissions: Array<Permission>;
@@ -1658,6 +1655,7 @@ export type Query = {
array: UnraidArray;
cloud: Cloud;
config: Config;
configFile?: Maybe<ConfigFile>;
connect: Connect;
customization?: Maybe<Customization>;
disk: Disk;
@@ -1721,6 +1719,11 @@ export type QueryApiKeyArgs = {
};
export type QueryConfigFileArgs = {
name: Scalars['String']['input'];
};
export type QueryDiskArgs = {
id: Scalars['PrefixedID']['input'];
};
@@ -2274,14 +2277,6 @@ export type UpdateSettingsResponse = {
warnings?: Maybe<Array<Scalars['String']['output']>>;
};
/** Update status of a container. */
export enum UpdateStatus {
REBUILD_READY = 'REBUILD_READY',
UNKNOWN = 'UNKNOWN',
UPDATE_AVAILABLE = 'UPDATE_AVAILABLE',
UP_TO_DATE = 'UP_TO_DATE'
}
export type Uptime = {
__typename?: 'Uptime';
timestamp?: Maybe<Scalars['String']['output']>;

View File

@@ -1,76 +0,0 @@
import * as fs from 'node:fs/promises';
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
import { LogService } from '@app/unraid-api/cli/log.service.js';
import { PM2Service } from '@app/unraid-api/cli/pm2.service.js';
vi.mock('node:fs/promises');
vi.mock('execa');
vi.mock('@app/core/utils/files/file-exists.js', () => ({
fileExists: vi.fn().mockResolvedValue(false),
}));
vi.mock('@app/environment.js', () => ({
PATHS_LOGS_DIR: '/var/log/unraid-api',
PM2_HOME: '/var/log/.pm2',
PM2_PATH: '/path/to/pm2',
ECOSYSTEM_PATH: '/path/to/ecosystem.config.json',
SUPPRESS_LOGS: false,
LOG_LEVEL: 'info',
}));
describe('PM2Service', () => {
let pm2Service: PM2Service;
let logService: LogService;
const mockMkdir = vi.mocked(fs.mkdir);
beforeEach(() => {
vi.clearAllMocks();
logService = {
trace: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
log: vi.fn(),
info: vi.fn(),
debug: vi.fn(),
} as unknown as LogService;
pm2Service = new PM2Service(logService);
});
afterEach(() => {
vi.restoreAllMocks();
});
describe('ensurePm2Dependencies', () => {
it('should create logs directory and log that PM2 will handle its own directory', async () => {
mockMkdir.mockResolvedValue(undefined);
await pm2Service.ensurePm2Dependencies();
expect(mockMkdir).toHaveBeenCalledWith('/var/log/unraid-api', { recursive: true });
expect(mockMkdir).toHaveBeenCalledTimes(1); // Only logs directory, not PM2_HOME
expect(logService.trace).toHaveBeenCalledWith(
'PM2_HOME will be created at /var/log/.pm2 when PM2 daemon starts'
);
});
it('should log error but not throw when logs directory creation fails', async () => {
mockMkdir.mockRejectedValue(new Error('Disk full'));
await expect(pm2Service.ensurePm2Dependencies()).resolves.not.toThrow();
expect(logService.error).toHaveBeenCalledWith(
expect.stringContaining('Failed to fully ensure PM2 dependencies: Disk full')
);
});
it('should handle mkdir with recursive flag for nested logs path', async () => {
mockMkdir.mockResolvedValue(undefined);
await pm2Service.ensurePm2Dependencies();
expect(mockMkdir).toHaveBeenCalledWith('/var/log/unraid-api', { recursive: true });
expect(mockMkdir).toHaveBeenCalledTimes(1);
});
});
});

View File

@@ -42,22 +42,8 @@ export class PM2Service {
async run(context: CmdContext, ...args: string[]) {
const { tag, raw, ...execOptions } = context;
// Default to true to match execa's default behavior
execOptions.extendEnv ??= true;
execOptions.extendEnv ??= false;
execOptions.shell ??= 'bash';
// Ensure /usr/local/bin is in PATH for Node.js
const currentPath = execOptions.env?.PATH || process.env.PATH || '/usr/bin:/bin:/usr/sbin:/sbin';
const needsPathUpdate = !currentPath.includes('/usr/local/bin');
const finalPath = needsPathUpdate ? `/usr/local/bin:${currentPath}` : currentPath;
// Always ensure PM2_HOME is set in the environment for every PM2 command
execOptions.env = {
...execOptions.env,
PM2_HOME,
...(needsPathUpdate && { PATH: finalPath }),
};
const runCommand = () => execa(PM2_PATH, [...args], execOptions satisfies Options);
if (raw) {
return runCommand();
@@ -114,20 +100,8 @@ export class PM2Service {
/**
* Ensures that the dependencies necessary for PM2 to start and operate are present.
* Creates PM2_HOME directory with proper permissions if it doesn't exist.
*/
async ensurePm2Dependencies() {
try {
// Create logs directory
await mkdir(PATHS_LOGS_DIR, { recursive: true });
// PM2 automatically creates and manages its home directory when the daemon starts
this.logger.trace(`PM2_HOME will be created at ${PM2_HOME} when PM2 daemon starts`);
} catch (error) {
// Log error but don't throw - let PM2 fail with its own error messages if the setup is incomplete
this.logger.error(
`Failed to fully ensure PM2 dependencies: ${error instanceof Error ? error.message : error}. PM2 may encounter issues during operation.`
);
}
await mkdir(PATHS_LOGS_DIR, { recursive: true });
}
}

View File

@@ -1,37 +1,14 @@
import { Command, CommandRunner, Option } from 'nest-commander';
import { Command, CommandRunner } from 'nest-commander';
import { API_VERSION } from '@app/environment.js';
import { LogService } from '@app/unraid-api/cli/log.service.js';
interface VersionOptions {
json?: boolean;
}
@Command({ name: 'version', description: 'Display API version information' })
@Command({ name: 'version' })
export class VersionCommand extends CommandRunner {
constructor(private readonly logger: LogService) {
super();
}
@Option({
flags: '-j, --json',
description: 'Output version information as JSON',
})
parseJson(): boolean {
return true;
}
async run(passedParam: string[], options?: VersionOptions): Promise<void> {
if (options?.json) {
const [baseVersion, buildInfo] = API_VERSION.split('+');
const versionInfo = {
version: baseVersion || API_VERSION,
build: buildInfo || undefined,
combined: API_VERSION,
};
console.log(JSON.stringify(versionInfo));
} else {
this.logger.info(`Unraid API v${API_VERSION}`);
}
async run(): Promise<void> {
this.logger.info(`Unraid API v${API_VERSION}`);
}
}

View File

@@ -1,11 +1,11 @@
import { Module } from '@nestjs/common';
import { ScheduleModule } from '@nestjs/schedule';
import { JobModule } from '@app/unraid-api/cron/job.module.js';
import { LogRotateService } from '@app/unraid-api/cron/log-rotate.service.js';
import { WriteFlashFileService } from '@app/unraid-api/cron/write-flash-file.service.js';
@Module({
imports: [JobModule],
imports: [],
providers: [WriteFlashFileService, LogRotateService],
})
export class CronModule {}

View File

@@ -1,13 +0,0 @@
import { Module } from '@nestjs/common';
import { ScheduleModule } from '@nestjs/schedule';
/**
* Sets up common dependencies for initializing jobs (e.g. scheduler registry, cron jobs).
*
* Simplifies testing setup & application dependency tree by ensuring `forRoot` is called only once.
*/
@Module({
imports: [ScheduleModule.forRoot()],
exports: [ScheduleModule],
})
export class JobModule {}

View File

@@ -1,172 +0,0 @@
import { Reflector } from '@nestjs/core';
import { Field, Mutation, ObjectType, Query, ResolveField, Resolver } from '@nestjs/graphql';
import { beforeEach, describe, expect, it, vi } from 'vitest';
import { OMIT_IF_METADATA_KEY, OmitIf } from '@app/unraid-api/decorators/omit-if.decorator.js';
describe('OmitIf Decorator', () => {
let reflector: Reflector;
beforeEach(() => {
reflector = new Reflector();
});
describe('OmitIf', () => {
it('should set metadata when condition is true', () => {
class TestResolver {
@OmitIf(true)
testMethod() {
return 'test';
}
}
const instance = new TestResolver();
const metadata = reflector.get(OMIT_IF_METADATA_KEY, instance.testMethod);
expect(metadata).toBe(true);
});
it('should not set metadata when condition is false', () => {
class TestResolver {
@OmitIf(false)
testMethod() {
return 'test';
}
}
const instance = new TestResolver();
const metadata = reflector.get(OMIT_IF_METADATA_KEY, instance.testMethod);
expect(metadata).toBeUndefined();
});
it('should evaluate function conditions', () => {
const mockCondition = vi.fn(() => true);
class TestResolver {
@OmitIf(mockCondition)
testMethod() {
return 'test';
}
}
expect(mockCondition).toHaveBeenCalledOnce();
const instance = new TestResolver();
const metadata = reflector.get(OMIT_IF_METADATA_KEY, instance.testMethod);
expect(metadata).toBe(true);
});
it('should evaluate function conditions that return false', () => {
const mockCondition = vi.fn(() => false);
class TestResolver {
@OmitIf(mockCondition)
testMethod() {
return 'test';
}
}
expect(mockCondition).toHaveBeenCalledOnce();
const instance = new TestResolver();
const metadata = reflector.get(OMIT_IF_METADATA_KEY, instance.testMethod);
expect(metadata).toBeUndefined();
});
it('should work with environment variables', () => {
const originalEnv = process.env.NODE_ENV;
process.env.NODE_ENV = 'production';
class TestResolver {
@OmitIf(process.env.NODE_ENV === 'production')
testMethod() {
return 'test';
}
}
const instance = new TestResolver();
const metadata = reflector.get(OMIT_IF_METADATA_KEY, instance.testMethod);
expect(metadata).toBe(true);
process.env.NODE_ENV = originalEnv;
});
});
describe('Integration with NestJS GraphQL decorators', () => {
it('should work with @Query decorator', () => {
@Resolver()
class TestResolver {
@OmitIf(true)
@Query(() => String)
omittedQuery() {
return 'test';
}
@OmitIf(false)
@Query(() => String)
includedQuery() {
return 'test';
}
}
const instance = new TestResolver();
const omittedMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.omittedQuery);
const includedMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.includedQuery);
expect(omittedMetadata).toBe(true);
expect(includedMetadata).toBeUndefined();
});
it('should work with @Mutation decorator', () => {
@Resolver()
class TestResolver {
@OmitIf(true)
@Mutation(() => String)
omittedMutation() {
return 'test';
}
@OmitIf(false)
@Mutation(() => String)
includedMutation() {
return 'test';
}
}
const instance = new TestResolver();
const omittedMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.omittedMutation);
const includedMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.includedMutation);
expect(omittedMetadata).toBe(true);
expect(includedMetadata).toBeUndefined();
});
it('should work with @ResolveField decorator', () => {
@ObjectType()
class TestType {
@Field()
id: string = '';
}
@Resolver(() => TestType)
class TestResolver {
@OmitIf(true)
@ResolveField(() => String)
omittedField() {
return 'test';
}
@OmitIf(false)
@ResolveField(() => String)
includedField() {
return 'test';
}
}
const instance = new TestResolver();
const omittedMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.omittedField);
const includedMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.includedField);
expect(omittedMetadata).toBe(true);
expect(includedMetadata).toBeUndefined();
});
});
});

View File

@@ -1,80 +0,0 @@
import { SetMetadata } from '@nestjs/common';
import { Extensions } from '@nestjs/graphql';
import { MapperKind, mapSchema } from '@graphql-tools/utils';
import { GraphQLFieldConfig, GraphQLSchema } from 'graphql';
export const OMIT_IF_METADATA_KEY = 'omitIf';
/**
* Decorator that conditionally omits a GraphQL field/query/mutation based on a condition.
* The field will only be omitted from the schema when the condition evaluates to true.
*
* @param condition - If the condition evaluates to true, the field will be omitted from the schema
* @returns A decorator that wraps the target field/query/mutation
*
* @example
* ```typescript
* @OmitIf(process.env.NODE_ENV === 'production')
* @Query(() => String)
* async debugQuery() {
* return 'This query is omitted in production';
* }
* ```
*/
export function OmitIf(condition: boolean | (() => boolean)): MethodDecorator & PropertyDecorator {
const shouldOmit = typeof condition === 'function' ? condition() : condition;
return (target: object, propertyKey?: string | symbol, descriptor?: PropertyDescriptor) => {
if (shouldOmit) {
SetMetadata(OMIT_IF_METADATA_KEY, true)(
target,
propertyKey as string,
descriptor as PropertyDescriptor
);
Extensions({ omitIf: true })(
target,
propertyKey as string,
descriptor as PropertyDescriptor
);
}
return descriptor;
};
}
/**
* Schema transformer that omits fields/queries/mutations based on the OmitIf decorator.
* @param schema - The GraphQL schema to transform
* @returns The transformed GraphQL schema
*/
export function omitIfSchemaTransformer(schema: GraphQLSchema): GraphQLSchema {
return mapSchema(schema, {
[MapperKind.OBJECT_FIELD]: (
fieldConfig: GraphQLFieldConfig<any, any>,
fieldName: string,
typeName: string
) => {
const extensions = fieldConfig.extensions || {};
if (extensions.omitIf === true) {
return null;
}
return fieldConfig;
},
[MapperKind.ROOT_FIELD]: (
fieldConfig: GraphQLFieldConfig<any, any>,
fieldName: string,
typeName: string
) => {
const extensions = fieldConfig.extensions || {};
if (extensions.omitIf === true) {
return null;
}
return fieldConfig;
},
});
}

View File

@@ -1,317 +0,0 @@
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-nocheck
// fixme: types don't sync with mocks, and there's no override to simplify testing.
import { Reflector } from '@nestjs/core';
import { Mutation, Query, ResolveField, Resolver } from '@nestjs/graphql';
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
import { OMIT_IF_METADATA_KEY } from '@app/unraid-api/decorators/omit-if.decorator.js';
import { UseFeatureFlag } from '@app/unraid-api/decorators/use-feature-flag.decorator.js';
// Mock the FeatureFlags
vi.mock('@app/consts.js', () => ({
FeatureFlags: Object.freeze({
ENABLE_NEXT_DOCKER_RELEASE: false,
ENABLE_EXPERIMENTAL_FEATURE: true,
ENABLE_DEBUG_MODE: false,
ENABLE_BETA_FEATURES: true,
}),
}));
describe('UseFeatureFlag Decorator', () => {
let reflector: Reflector;
beforeEach(() => {
reflector = new Reflector();
});
afterEach(() => {
vi.clearAllMocks();
});
describe('Basic functionality', () => {
it('should omit field when feature flag is false', () => {
@Resolver()
class TestResolver {
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
@Query(() => String)
testQuery() {
return 'test';
}
}
const instance = new TestResolver();
const metadata = reflector.get(OMIT_IF_METADATA_KEY, instance.testQuery);
expect(metadata).toBe(true); // Should be omitted because flag is false
});
it('should include field when feature flag is true', () => {
@Resolver()
class TestResolver {
@UseFeatureFlag('ENABLE_EXPERIMENTAL_FEATURE')
@Query(() => String)
testQuery() {
return 'test';
}
}
const instance = new TestResolver();
const metadata = reflector.get(OMIT_IF_METADATA_KEY, instance.testQuery);
expect(metadata).toBeUndefined(); // Should not be omitted because flag is true
});
});
describe('With different decorator types', () => {
it('should work with @Query decorator', () => {
@Resolver()
class TestResolver {
@UseFeatureFlag('ENABLE_DEBUG_MODE')
@Query(() => String)
debugQuery() {
return 'debug';
}
@UseFeatureFlag('ENABLE_BETA_FEATURES')
@Query(() => String)
betaQuery() {
return 'beta';
}
}
const instance = new TestResolver();
const debugMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.debugQuery);
const betaMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.betaQuery);
expect(debugMetadata).toBe(true); // ENABLE_DEBUG_MODE is false
expect(betaMetadata).toBeUndefined(); // ENABLE_BETA_FEATURES is true
});
it('should work with @Mutation decorator', () => {
@Resolver()
class TestResolver {
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
@Mutation(() => String)
dockerMutation() {
return 'docker';
}
@UseFeatureFlag('ENABLE_EXPERIMENTAL_FEATURE')
@Mutation(() => String)
experimentalMutation() {
return 'experimental';
}
}
const instance = new TestResolver();
const dockerMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.dockerMutation);
const experimentalMetadata = reflector.get(
OMIT_IF_METADATA_KEY,
instance.experimentalMutation
);
expect(dockerMetadata).toBe(true); // ENABLE_NEXT_DOCKER_RELEASE is false
expect(experimentalMetadata).toBeUndefined(); // ENABLE_EXPERIMENTAL_FEATURE is true
});
it('should work with @ResolveField decorator', () => {
@Resolver()
class TestResolver {
@UseFeatureFlag('ENABLE_DEBUG_MODE')
@ResolveField(() => String)
debugField() {
return 'debug';
}
@UseFeatureFlag('ENABLE_BETA_FEATURES')
@ResolveField(() => String)
betaField() {
return 'beta';
}
}
const instance = new TestResolver();
const debugMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.debugField);
const betaMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.betaField);
expect(debugMetadata).toBe(true); // ENABLE_DEBUG_MODE is false
expect(betaMetadata).toBeUndefined(); // ENABLE_BETA_FEATURES is true
});
});
describe('Multiple decorators on same class', () => {
it('should handle multiple feature flags independently', () => {
@Resolver()
class TestResolver {
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
@Query(() => String)
dockerQuery() {
return 'docker';
}
@UseFeatureFlag('ENABLE_EXPERIMENTAL_FEATURE')
@Query(() => String)
experimentalQuery() {
return 'experimental';
}
@UseFeatureFlag('ENABLE_DEBUG_MODE')
@Query(() => String)
debugQuery() {
return 'debug';
}
@UseFeatureFlag('ENABLE_BETA_FEATURES')
@Query(() => String)
betaQuery() {
return 'beta';
}
}
const instance = new TestResolver();
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.dockerQuery)).toBe(true);
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.experimentalQuery)).toBeUndefined();
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.debugQuery)).toBe(true);
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.betaQuery)).toBeUndefined();
});
});
describe('Type safety', () => {
it('should only accept valid feature flag keys', () => {
// This test verifies TypeScript compile-time type safety
// The following would cause a TypeScript error if uncommented:
// @UseFeatureFlag('INVALID_FLAG')
@Resolver()
class TestResolver {
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
@Query(() => String)
validQuery() {
return 'valid';
}
}
const instance = new TestResolver();
expect(instance.validQuery).toBeDefined();
});
});
describe('Integration scenarios', () => {
it('should work correctly with other decorators', () => {
const customDecorator = (
target: any,
propertyKey: string | symbol,
descriptor: PropertyDescriptor
) => {
Reflect.defineMetadata('custom', true, target, propertyKey);
return descriptor;
};
@Resolver()
class TestResolver {
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
@customDecorator
@Query(() => String)
multiDecoratorQuery() {
return 'multi';
}
}
const instance = new TestResolver();
const omitMetadata = reflector.get(OMIT_IF_METADATA_KEY, instance.multiDecoratorQuery);
const customMetadata = Reflect.getMetadata('custom', instance, 'multiDecoratorQuery');
expect(omitMetadata).toBe(true);
expect(customMetadata).toBe(true);
});
it('should maintain correct decorator order', () => {
const orderTracker: string[] = [];
const trackingDecorator = (name: string) => {
return (target: any, propertyKey: string | symbol, descriptor: PropertyDescriptor) => {
orderTracker.push(name);
return descriptor;
};
};
@Resolver()
class TestResolver {
@trackingDecorator('first')
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
@trackingDecorator('last')
@Query(() => String)
orderedQuery() {
return 'ordered';
}
}
// Decorators are applied bottom-up
expect(orderTracker).toEqual(['last', 'first']);
});
});
describe('Real-world usage patterns', () => {
it('should work with Docker resolver pattern', () => {
@Resolver()
class DockerResolver {
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
@Mutation(() => String)
async createDockerFolder(name: string) {
return `Created folder: ${name}`;
}
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
@Mutation(() => String)
async deleteDockerEntries(entryIds: string[]) {
return `Deleted entries: ${entryIds.join(', ')}`;
}
@Query(() => String)
async getDockerInfo() {
return 'Docker info';
}
}
const instance = new DockerResolver();
// Feature flag is false, so these should be omitted
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.createDockerFolder)).toBe(true);
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.deleteDockerEntries)).toBe(true);
// No feature flag, so this should not be omitted
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.getDockerInfo)).toBeUndefined();
});
it('should handle mixed feature flags in same resolver', () => {
@Resolver()
class MixedResolver {
@UseFeatureFlag('ENABLE_EXPERIMENTAL_FEATURE')
@Query(() => String)
experimentalQuery() {
return 'experimental';
}
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
@Query(() => String)
dockerQuery() {
return 'docker';
}
@UseFeatureFlag('ENABLE_BETA_FEATURES')
@Mutation(() => String)
betaMutation() {
return 'beta';
}
}
const instance = new MixedResolver();
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.experimentalQuery)).toBeUndefined();
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.dockerQuery)).toBe(true);
expect(reflector.get(OMIT_IF_METADATA_KEY, instance.betaMutation)).toBeUndefined();
});
});
});

View File

@@ -1,22 +0,0 @@
import { FeatureFlags } from '@app/consts.js';
import { OmitIf } from '@app/unraid-api/decorators/omit-if.decorator.js';
/**
* Decorator that conditionally includes a GraphQL field/query/mutation based on a feature flag.
* The field will only be included in the schema when the feature flag is enabled.
*
* @param flagKey - The key of the feature flag in FeatureFlags
* @returns A decorator that wraps OmitIf
*
* @example
* ```typescript
* @UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
* @Mutation(() => String)
* async experimentalMutation() {
* return 'This mutation is only available when ENABLE_NEXT_DOCKER_RELEASE is true';
* }
* ```
*/
export function UseFeatureFlag(flagKey: keyof typeof FeatureFlags): MethodDecorator & PropertyDecorator {
return OmitIf(!FeatureFlags[flagKey]);
}

View File

@@ -12,7 +12,6 @@ import { NoUnusedVariablesRule } from 'graphql';
import { ENVIRONMENT } from '@app/environment.js';
import { ApiConfigModule } from '@app/unraid-api/config/api-config.module.js';
import { omitIfSchemaTransformer } from '@app/unraid-api/decorators/omit-if.decorator.js';
// Import enum registrations to ensure they're registered with GraphQL
import '@app/unraid-api/graph/auth/auth-action.enum.js';
@@ -65,12 +64,7 @@ import { PluginModule } from '@app/unraid-api/plugin/plugin.module.js';
},
// Only add transform when not in test environment to avoid GraphQL version conflicts
transformSchema:
process.env.NODE_ENV === 'test'
? undefined
: (schema) => {
const schemaWithPermissions = usePermissionsSchemaTransformer(schema);
return omitIfSchemaTransformer(schemaWithPermissions);
},
process.env.NODE_ENV === 'test' ? undefined : usePermissionsSchemaTransformer,
validationRules: [NoUnusedVariablesRule],
};
},

View File

@@ -126,9 +126,6 @@ export class ArrayDisk extends Node {
@Field(() => ArrayDiskFsColor, { nullable: true })
color?: ArrayDiskFsColor | null;
@Field(() => Boolean, { nullable: true, description: 'Whether the disk is currently spinning' })
isSpinning?: boolean | null;
}
@ObjectType({

View File

@@ -3,15 +3,7 @@ import { Field, ObjectType, registerEnumType } from '@nestjs/graphql';
import { Node } from '@unraid/shared/graphql.model.js';
import { PrefixedID } from '@unraid/shared/prefixed-id-scalar.js';
import { Type } from 'class-transformer';
import {
IsArray,
IsBoolean,
IsEnum,
IsNumber,
IsOptional,
IsString,
ValidateNested,
} from 'class-validator';
import { IsArray, IsEnum, IsNumber, IsOptional, IsString, ValidateNested } from 'class-validator';
export enum DiskFsType {
XFS = 'XFS',
@@ -144,8 +136,4 @@ export class Disk extends Node {
@ValidateNested({ each: true })
@Type(() => DiskPartition)
partitions!: DiskPartition[];
@Field(() => Boolean, { description: 'Whether the disk is spinning or not' })
@IsBoolean()
isSpinning!: boolean;
}

View File

@@ -66,7 +66,6 @@ describe('DisksResolver', () => {
smartStatus: DiskSmartStatus.OK,
temperature: -1,
partitions: [],
isSpinning: false,
},
];
mockDisksService.getDisks.mockResolvedValue(mockResult);
@@ -93,7 +92,6 @@ describe('DisksResolver', () => {
const mockDisk: Disk = {
id: 'SERIAL123',
device: '/dev/sda',
isSpinning: false,
type: 'SSD',
name: 'Samsung SSD 860 EVO 1TB',
vendor: 'Samsung',

View File

@@ -33,9 +33,4 @@ export class DisksResolver {
public async temperature(@Parent() disk: Disk) {
return this.disksService.getTemperature(disk.device);
}
@ResolveField(() => Boolean)
public async isSpinning(@Parent() disk: Disk) {
return disk.isSpinning;
}
}

View File

@@ -1,17 +1,11 @@
import { ConfigService } from '@nestjs/config';
import { Test, TestingModule } from '@nestjs/testing';
import type { Systeminformation } from 'systeminformation';
import { execa } from 'execa';
import { blockDevices, diskLayout } from 'systeminformation';
// Vitest imports
import { beforeEach, describe, expect, it, vi } from 'vitest';
import { beforeEach, describe, expect, it, Mock, MockedFunction, vi } from 'vitest';
import {
ArrayDisk,
ArrayDiskStatus,
ArrayDiskType,
} from '@app/unraid-api/graph/resolvers/array/array.model.js';
import {
Disk,
DiskFsType,
@@ -39,86 +33,6 @@ const mockBatchProcess = batchProcess as any;
describe('DisksService', () => {
let service: DisksService;
let configService: ConfigService;
// Mock ArrayDisk data from state
const mockArrayDisks: ArrayDisk[] = [
{
id: 'S4ENNF0N123456',
device: 'sda',
name: 'cache',
size: 512110190592,
idx: 30,
type: ArrayDiskType.CACHE,
status: ArrayDiskStatus.DISK_OK,
isSpinning: null, // NVMe/SSD doesn't spin
rotational: false,
exportable: false,
numErrors: 0,
numReads: 1000,
numWrites: 2000,
temp: 42,
comment: 'NVMe Cache',
format: 'GPT: 4KiB-aligned',
fsType: 'btrfs',
transport: 'nvme',
warning: null,
critical: null,
fsFree: null,
fsSize: null,
fsUsed: null,
},
{
id: 'WD-WCC7K7YL9876',
device: 'sdb',
name: 'disk1',
size: 4000787030016,
idx: 1,
type: ArrayDiskType.DATA,
status: ArrayDiskStatus.DISK_OK,
isSpinning: true, // Currently spinning
rotational: true,
exportable: false,
numErrors: 0,
numReads: 5000,
numWrites: 3000,
temp: 35,
comment: 'Data Disk 1',
format: 'GPT: 4KiB-aligned',
fsType: 'xfs',
transport: 'sata',
warning: null,
critical: null,
fsFree: 1000000000,
fsSize: 4000000000,
fsUsed: 3000000000,
},
{
id: 'WD-SPUNDOWN123',
device: 'sdd',
name: 'disk2',
size: 4000787030016,
idx: 2,
type: ArrayDiskType.DATA,
status: ArrayDiskStatus.DISK_OK,
isSpinning: false, // Spun down
rotational: true,
exportable: false,
numErrors: 0,
numReads: 3000,
numWrites: 1000,
temp: 30,
comment: 'Data Disk 2 (spun down)',
format: 'GPT: 4KiB-aligned',
fsType: 'xfs',
transport: 'sata',
warning: null,
critical: null,
fsFree: 2000000000,
fsSize: 4000000000,
fsUsed: 2000000000,
},
];
const mockDiskLayoutData: Systeminformation.DiskLayoutData[] = [
{
@@ -178,25 +92,6 @@ describe('DisksService', () => {
smartStatus: 'unknown', // Simulate unknown status
temperature: null,
},
{
device: '/dev/sdd',
type: 'HD',
name: 'WD Spun Down',
vendor: 'Western Digital',
size: 4000787030016,
bytesPerSector: 512,
totalCylinders: 486401,
totalHeads: 255,
totalSectors: 7814037168,
totalTracks: 124032255,
tracksPerCylinder: 255,
sectorsPerTrack: 63,
firmwareRevision: '82.00A82',
serialNum: 'WD-SPUNDOWN123',
interfaceType: 'SATA',
smartStatus: 'Ok',
temperature: null,
},
];
const mockBlockDeviceData: Systeminformation.BlockDevicesData[] = [
@@ -279,50 +174,17 @@ describe('DisksService', () => {
protocol: 'SATA', // Assume SATA even if interface type unknown for disk
identifier: '/dev/sdc1',
},
// Partition for sdd
{
name: 'sdd1',
type: 'part',
fsType: 'xfs',
mount: '/mnt/disk2',
size: 4000787030016,
physical: 'HDD',
uuid: 'UUID-SDD1',
label: 'Data2',
model: 'WD Spun Down',
serial: 'WD-SPUNDOWN123',
removable: false,
protocol: 'SATA',
identifier: '/dev/sdd1',
},
];
beforeEach(async () => {
// Reset mocks before each test using vi
vi.clearAllMocks();
// Create mock ConfigService
const mockConfigService = {
get: vi.fn().mockImplementation((key: string, defaultValue?: any) => {
if (key === 'store.emhttp.disks') {
return mockArrayDisks;
}
return defaultValue;
}),
};
const module: TestingModule = await Test.createTestingModule({
providers: [
DisksService,
{
provide: ConfigService,
useValue: mockConfigService,
},
],
providers: [DisksService],
}).compile();
service = module.get<DisksService>(DisksService);
configService = module.get<ConfigService>(ConfigService);
// Setup default mock implementations
mockDiskLayout.mockResolvedValue(mockDiskLayoutData);
@@ -345,112 +207,46 @@ describe('DisksService', () => {
// --- Test getDisks ---
describe('getDisks', () => {
it('should return disks with spinning state from store', async () => {
it('should return disks without temperature', async () => {
const disks = await service.getDisks();
expect(mockDiskLayout).toHaveBeenCalledTimes(1);
expect(mockBlockDevices).toHaveBeenCalledTimes(1);
expect(configService.get).toHaveBeenCalledWith('store.emhttp.disks', []);
expect(mockBatchProcess).toHaveBeenCalledTimes(1);
expect(mockExeca).not.toHaveBeenCalled(); // Temperature should not be fetched
expect(mockBatchProcess).toHaveBeenCalledTimes(1); // Still uses batchProcess for parsing
expect(disks).toHaveLength(mockDiskLayoutData.length);
// Check NVMe disk with null spinning state
const nvmeDisk = disks.find((d) => d.id === 'S4ENNF0N123456');
expect(nvmeDisk).toBeDefined();
expect(nvmeDisk?.isSpinning).toBe(false); // null from state defaults to false
expect(nvmeDisk?.interfaceType).toBe(DiskInterfaceType.PCIE);
expect(nvmeDisk?.smartStatus).toBe(DiskSmartStatus.OK);
expect(nvmeDisk?.partitions).toHaveLength(2);
// Check spinning disk
const spinningDisk = disks.find((d) => d.id === 'WD-WCC7K7YL9876');
expect(spinningDisk).toBeDefined();
expect(spinningDisk?.isSpinning).toBe(true); // From state
expect(spinningDisk?.interfaceType).toBe(DiskInterfaceType.SATA);
// Check spun down disk
const spunDownDisk = disks.find((d) => d.id === 'WD-SPUNDOWN123');
expect(spunDownDisk).toBeDefined();
expect(spunDownDisk?.isSpinning).toBe(false); // From state
// Check disk not in state (defaults to not spinning)
const unknownDisk = disks.find((d) => d.id === 'OTHER-SERIAL-123');
expect(unknownDisk).toBeDefined();
expect(unknownDisk?.isSpinning).toBe(false); // Not in state, defaults to false
expect(unknownDisk?.interfaceType).toBe(DiskInterfaceType.UNKNOWN);
expect(unknownDisk?.smartStatus).toBe(DiskSmartStatus.UNKNOWN);
});
it('should handle empty state gracefully', async () => {
vi.mocked(configService.get).mockImplementation((key: string, defaultValue?: any) => {
if (key === 'store.emhttp.disks') {
return [];
}
return defaultValue;
expect(disks[0]).toMatchObject({
id: 'S4ENNF0N123456',
device: '/dev/sda',
type: 'HD',
name: 'SAMSUNG MZVLB512HBJQ-000L7',
vendor: 'Samsung',
size: 512110190592,
interfaceType: DiskInterfaceType.PCIE,
smartStatus: DiskSmartStatus.OK,
temperature: null, // Temperature is now null by default
partitions: [
{ name: 'sda1', fsType: DiskFsType.VFAT, size: 536870912 },
{ name: 'sda2', fsType: DiskFsType.EXT4, size: 511560000000 },
],
});
const disks = await service.getDisks();
// All disks should default to not spinning when state is empty
expect(disks).toHaveLength(mockDiskLayoutData.length);
disks.forEach((disk) => {
expect(disk.isSpinning).toBe(false);
expect(disks[1]).toMatchObject({
id: 'WD-WCC7K7YL9876',
device: '/dev/sdb',
interfaceType: DiskInterfaceType.SATA,
smartStatus: DiskSmartStatus.OK,
temperature: null,
partitions: [{ name: 'sdb1', fsType: DiskFsType.XFS, size: 4000787030016 }],
});
});
it('should handle trimmed serial numbers correctly', async () => {
// Add disk with spaces in ID
const disksWithSpaces = [...mockArrayDisks];
disksWithSpaces[0] = {
...disksWithSpaces[0],
id: ' S4ENNF0N123456 ', // spaces around ID
};
vi.mocked(configService.get).mockImplementation((key: string, defaultValue?: any) => {
if (key === 'store.emhttp.disks') {
return disksWithSpaces;
}
return defaultValue;
expect(disks[2]).toMatchObject({
id: 'OTHER-SERIAL-123',
device: '/dev/sdc',
interfaceType: DiskInterfaceType.UNKNOWN,
smartStatus: DiskSmartStatus.UNKNOWN,
temperature: null,
partitions: [{ name: 'sdc1', fsType: DiskFsType.NTFS, size: 1000204886016 }],
});
const disks = await service.getDisks();
const disk = disks.find((d) => d.id === 'S4ENNF0N123456');
expect(disk).toBeDefined();
expect(disk?.isSpinning).toBe(false); // null becomes false
});
it('should correctly map partitions to disks', async () => {
const disks = await service.getDisks();
const disk1 = disks.find((d) => d.id === 'S4ENNF0N123456');
expect(disk1?.partitions).toHaveLength(2);
expect(disk1?.partitions[0]).toEqual({
name: 'sda1',
fsType: DiskFsType.VFAT,
size: 536870912,
});
expect(disk1?.partitions[1]).toEqual({
name: 'sda2',
fsType: DiskFsType.EXT4,
size: 511560000000,
});
const disk2 = disks.find((d) => d.id === 'WD-WCC7K7YL9876');
expect(disk2?.partitions).toHaveLength(1);
expect(disk2?.partitions[0]).toEqual({
name: 'sdb1',
fsType: DiskFsType.XFS,
size: 4000787030016,
});
});
it('should use ConfigService to get state data', async () => {
await service.getDisks();
// Verify we're accessing the state through ConfigService
expect(configService.get).toHaveBeenCalledWith('store.emhttp.disks', []);
});
it('should handle empty disk layout or block devices', async () => {
@@ -471,31 +267,6 @@ describe('DisksService', () => {
});
});
// --- Test getDisk ---
describe('getDisk', () => {
it('should return a specific disk by id', async () => {
const disk = await service.getDisk('S4ENNF0N123456');
expect(disk).toBeDefined();
expect(disk.id).toBe('S4ENNF0N123456');
expect(disk.isSpinning).toBe(false); // null becomes false
});
it('should return spinning disk correctly', async () => {
const disk = await service.getDisk('WD-WCC7K7YL9876');
expect(disk).toBeDefined();
expect(disk.id).toBe('WD-WCC7K7YL9876');
expect(disk.isSpinning).toBe(true);
});
it('should throw NotFoundException for non-existent disk', async () => {
await expect(service.getDisk('NONEXISTENT')).rejects.toThrow(
'Disk with id NONEXISTENT not found'
);
});
});
// --- Test getTemperature ---
describe('getTemperature', () => {
it('should return temperature for a disk', async () => {

View File

@@ -1,11 +1,9 @@
import { Injectable, NotFoundException } from '@nestjs/common';
import { ConfigService } from '@nestjs/config';
import type { Systeminformation } from 'systeminformation';
import { execa } from 'execa';
import { blockDevices, diskLayout } from 'systeminformation';
import { ArrayDisk } from '@app/unraid-api/graph/resolvers/array/array.model.js';
import {
Disk,
DiskFsType,
@@ -16,7 +14,6 @@ import { batchProcess } from '@app/utils.js';
@Injectable()
export class DisksService {
constructor(private readonly configService: ConfigService) {}
public async getTemperature(device: string): Promise<number | null> {
try {
const { stdout } = await execa('smartctl', ['-A', device]);
@@ -54,8 +51,7 @@ export class DisksService {
private async parseDisk(
disk: Systeminformation.DiskLayoutData,
partitionsToParse: Systeminformation.BlockDevicesData[],
arrayDisks: ArrayDisk[]
partitionsToParse: Systeminformation.BlockDevicesData[]
): Promise<Omit<Disk, 'temperature'>> {
const partitions = partitionsToParse
// Only get partitions from this disk
@@ -119,8 +115,6 @@ export class DisksService {
mappedInterfaceType = DiskInterfaceType.UNKNOWN;
}
const arrayDisk = arrayDisks.find((d) => d.id.trim() === disk.serialNum.trim());
return {
...disk,
id: disk.serialNum, // Ensure id is set
@@ -129,7 +123,6 @@ export class DisksService {
DiskSmartStatus.UNKNOWN,
interfaceType: mappedInterfaceType,
partitions,
isSpinning: arrayDisk?.isSpinning ?? false,
};
}
@@ -140,9 +133,9 @@ export class DisksService {
const partitions = await blockDevices().then((devices) =>
devices.filter((device) => device.type === 'part')
);
const arrayDisks = this.configService.get<ArrayDisk[]>('store.emhttp.disks', []);
const { data } = await batchProcess(await diskLayout(), async (disk) =>
this.parseDisk(disk, partitions, arrayDisks)
this.parseDisk(disk, partitions)
);
return data;
}

View File

@@ -1,47 +0,0 @@
import { Injectable, Logger, OnApplicationBootstrap } from '@nestjs/common';
import { SchedulerRegistry, Timeout } from '@nestjs/schedule';
import { CronJob } from 'cron';
import { DockerConfigService } from '@app/unraid-api/graph/resolvers/docker/docker-config.service.js';
import { DockerManifestService } from '@app/unraid-api/graph/resolvers/docker/docker-manifest.service.js';
@Injectable()
export class ContainerStatusJob implements OnApplicationBootstrap {
private readonly logger = new Logger(ContainerStatusJob.name);
constructor(
private readonly dockerManifestService: DockerManifestService,
private readonly schedulerRegistry: SchedulerRegistry,
private readonly dockerConfigService: DockerConfigService
) {}
/**
* Initialize cron job for refreshing the update status for all containers on a user-configurable schedule.
*/
onApplicationBootstrap() {
if (!this.dockerConfigService.enabled()) return;
const cronExpression = this.dockerConfigService.getConfig().updateCheckCronSchedule;
const cronJob = CronJob.from({
cronTime: cronExpression,
onTick: () => {
this.dockerManifestService.refreshDigests().catch((error) => {
this.logger.warn(error, 'Failed to refresh container update status');
});
},
start: true,
});
this.schedulerRegistry.addCronJob(ContainerStatusJob.name, cronJob);
this.logger.verbose(
`Initialized cron job for refreshing container update status: ${ContainerStatusJob.name}`
);
}
/**
* Refresh container digests 5 seconds after application start.
*/
@Timeout(5_000)
async refreshContainerDigestsAfterStartup() {
if (!this.dockerConfigService.enabled()) return;
await this.dockerManifestService.refreshDigests();
}
}

View File

@@ -1,7 +0,0 @@
import { Field, ObjectType } from '@nestjs/graphql';
@ObjectType()
export class DockerConfig {
@Field(() => String)
updateCheckCronSchedule!: string;
}

View File

@@ -1,195 +0,0 @@
import { ConfigService } from '@nestjs/config';
import { CronExpression } from '@nestjs/schedule';
import { Test, TestingModule } from '@nestjs/testing';
import { ValidationError } from 'class-validator';
import { beforeEach, describe, expect, it, vi } from 'vitest';
import { AppError } from '@app/core/errors/app-error.js';
import { DockerConfigService } from '@app/unraid-api/graph/resolvers/docker/docker-config.service.js';
vi.mock('cron', () => ({
validateCronExpression: vi.fn(),
}));
vi.mock('@app/unraid-api/graph/resolvers/validation.utils.js', () => ({
validateObject: vi.fn(),
}));
describe('DockerConfigService - validate', () => {
let service: DockerConfigService;
beforeEach(async () => {
const module: TestingModule = await Test.createTestingModule({
providers: [
DockerConfigService,
{
provide: ConfigService,
useValue: {
get: vi.fn(),
},
},
],
}).compile();
service = module.get<DockerConfigService>(DockerConfigService);
vi.clearAllMocks();
});
describe('validate', () => {
it('should validate and return docker config for valid cron expression', async () => {
const inputConfig = { updateCheckCronSchedule: '0 6 * * *' };
const validatedConfig = { updateCheckCronSchedule: '0 6 * * *' };
const { validateObject } = await import(
'@app/unraid-api/graph/resolvers/validation.utils.js'
);
const { validateCronExpression } = await import('cron');
vi.mocked(validateObject).mockResolvedValue(validatedConfig);
vi.mocked(validateCronExpression).mockReturnValue({ valid: true });
const result = await service.validate(inputConfig);
expect(validateObject).toHaveBeenCalledWith(expect.any(Function), inputConfig);
expect(validateCronExpression).toHaveBeenCalledWith('0 6 * * *');
expect(result).toBe(validatedConfig);
});
it('should validate and return docker config for predefined cron expression', async () => {
const inputConfig = { updateCheckCronSchedule: CronExpression.EVERY_DAY_AT_6AM };
const validatedConfig = { updateCheckCronSchedule: CronExpression.EVERY_DAY_AT_6AM };
const { validateObject } = await import(
'@app/unraid-api/graph/resolvers/validation.utils.js'
);
const { validateCronExpression } = await import('cron');
vi.mocked(validateObject).mockResolvedValue(validatedConfig);
vi.mocked(validateCronExpression).mockReturnValue({ valid: true });
const result = await service.validate(inputConfig);
expect(validateObject).toHaveBeenCalledWith(expect.any(Function), inputConfig);
expect(validateCronExpression).toHaveBeenCalledWith(CronExpression.EVERY_DAY_AT_6AM);
expect(result).toBe(validatedConfig);
});
it('should throw AppError for invalid cron expression', async () => {
const inputConfig = { updateCheckCronSchedule: 'invalid-cron' };
const validatedConfig = { updateCheckCronSchedule: 'invalid-cron' };
const { validateObject } = await import(
'@app/unraid-api/graph/resolvers/validation.utils.js'
);
const { validateCronExpression } = await import('cron');
vi.mocked(validateObject).mockResolvedValue(validatedConfig);
vi.mocked(validateCronExpression).mockReturnValue({ valid: false });
await expect(service.validate(inputConfig)).rejects.toThrow(
new AppError('Cron expression not supported: invalid-cron')
);
expect(validateObject).toHaveBeenCalledWith(expect.any(Function), inputConfig);
expect(validateCronExpression).toHaveBeenCalledWith('invalid-cron');
});
it('should throw AppError for empty cron expression', async () => {
const inputConfig = { updateCheckCronSchedule: '' };
const validatedConfig = { updateCheckCronSchedule: '' };
const { validateObject } = await import(
'@app/unraid-api/graph/resolvers/validation.utils.js'
);
const { validateCronExpression } = await import('cron');
vi.mocked(validateObject).mockResolvedValue(validatedConfig);
vi.mocked(validateCronExpression).mockReturnValue({ valid: false });
await expect(service.validate(inputConfig)).rejects.toThrow(
new AppError('Cron expression not supported: ')
);
expect(validateObject).toHaveBeenCalledWith(expect.any(Function), inputConfig);
expect(validateCronExpression).toHaveBeenCalledWith('');
});
it('should throw AppError for malformed cron expression', async () => {
const inputConfig = { updateCheckCronSchedule: '* * * *' };
const validatedConfig = { updateCheckCronSchedule: '* * * *' };
const { validateObject } = await import(
'@app/unraid-api/graph/resolvers/validation.utils.js'
);
const { validateCronExpression } = await import('cron');
vi.mocked(validateObject).mockResolvedValue(validatedConfig);
vi.mocked(validateCronExpression).mockReturnValue({ valid: false });
await expect(service.validate(inputConfig)).rejects.toThrow(
new AppError('Cron expression not supported: * * * *')
);
expect(validateObject).toHaveBeenCalledWith(expect.any(Function), inputConfig);
expect(validateCronExpression).toHaveBeenCalledWith('* * * *');
});
it('should propagate validation errors from validateObject', async () => {
const inputConfig = { updateCheckCronSchedule: '0 6 * * *' };
const validationError = new ValidationError();
validationError.property = 'updateCheckCronSchedule';
const { validateObject } = await import(
'@app/unraid-api/graph/resolvers/validation.utils.js'
);
vi.mocked(validateObject).mockRejectedValue(validationError);
await expect(service.validate(inputConfig)).rejects.toThrow();
expect(validateObject).toHaveBeenCalledWith(expect.any(Function), inputConfig);
});
it('should handle complex valid cron expressions', async () => {
const inputConfig = { updateCheckCronSchedule: '0 0,12 * * 1-5' };
const validatedConfig = { updateCheckCronSchedule: '0 0,12 * * 1-5' };
const { validateObject } = await import(
'@app/unraid-api/graph/resolvers/validation.utils.js'
);
const { validateCronExpression } = await import('cron');
vi.mocked(validateObject).mockResolvedValue(validatedConfig);
vi.mocked(validateCronExpression).mockReturnValue({ valid: true });
const result = await service.validate(inputConfig);
expect(validateObject).toHaveBeenCalledWith(expect.any(Function), inputConfig);
expect(validateCronExpression).toHaveBeenCalledWith('0 0,12 * * 1-5');
expect(result).toBe(validatedConfig);
});
it('should handle input with extra properties', async () => {
const inputConfig = {
updateCheckCronSchedule: '0 6 * * *',
extraProperty: 'should be ignored',
};
const validatedConfig = { updateCheckCronSchedule: '0 6 * * *' };
const { validateObject } = await import(
'@app/unraid-api/graph/resolvers/validation.utils.js'
);
const { validateCronExpression } = await import('cron');
vi.mocked(validateObject).mockResolvedValue(validatedConfig);
vi.mocked(validateCronExpression).mockReturnValue({ valid: true });
const result = await service.validate(inputConfig);
expect(validateObject).toHaveBeenCalledWith(expect.any(Function), inputConfig);
expect(validateCronExpression).toHaveBeenCalledWith('0 6 * * *');
expect(result).toBe(validatedConfig);
});
});
});

View File

@@ -1,45 +1,59 @@
import { Injectable } from '@nestjs/common';
import { ConfigService } from '@nestjs/config';
import { CronExpression } from '@nestjs/schedule';
import { ConfigFilePersister } from '@unraid/shared/services/config-file.js';
import { validateCronExpression } from 'cron';
import { FeatureFlags } from '@app/consts.js';
import { AppError } from '@app/core/errors/app-error.js';
import { DockerConfig } from '@app/unraid-api/graph/resolvers/docker/docker-config.model.js';
import { validateObject } from '@app/unraid-api/graph/resolvers/validation.utils.js';
import {
DEFAULT_ORGANIZER_ROOT_ID,
DEFAULT_ORGANIZER_VIEW_ID,
} from '@app/unraid-api/organizer/organizer.js';
import { OrganizerV1 } from '@app/unraid-api/organizer/organizer.model.js';
import { validateOrganizerIntegrity } from '@app/unraid-api/organizer/organizer.validation.js';
@Injectable()
export class DockerConfigService extends ConfigFilePersister<DockerConfig> {
export class DockerConfigService extends ConfigFilePersister<OrganizerV1> {
constructor(configService: ConfigService) {
super(configService);
}
enabled(): boolean {
return FeatureFlags.ENABLE_NEXT_DOCKER_RELEASE;
}
configKey(): string {
return 'docker';
return 'dockerOrganizer';
}
fileName(): string {
return 'docker.config.json';
return 'docker.organizer.json';
}
defaultConfig(): DockerConfig {
defaultConfig(): OrganizerV1 {
return {
updateCheckCronSchedule: CronExpression.EVERY_DAY_AT_6AM,
version: 1,
resources: {},
views: {
default: {
id: DEFAULT_ORGANIZER_VIEW_ID,
name: 'Default',
root: DEFAULT_ORGANIZER_ROOT_ID,
entries: {
root: {
type: 'folder',
id: DEFAULT_ORGANIZER_ROOT_ID,
name: 'Root',
children: [],
},
},
},
},
};
}
async validate(config: object): Promise<DockerConfig> {
const dockerConfig = await validateObject(DockerConfig, config);
const cronExpression = validateCronExpression(dockerConfig.updateCheckCronSchedule);
if (!cronExpression.valid) {
throw new AppError(`Cron expression not supported: ${dockerConfig.updateCheckCronSchedule}`);
async validate(config: object): Promise<OrganizerV1> {
const organizer = await validateObject(OrganizerV1, config);
const { isValid, errors } = await validateOrganizerIntegrity(organizer);
if (!isValid) {
throw new AppError(`Docker organizer validation failed: ${JSON.stringify(errors, null, 2)}`);
}
return dockerConfig;
return organizer;
}
}

View File

@@ -1,51 +0,0 @@
import { Logger } from '@nestjs/common';
import { Mutation, Parent, ResolveField, Resolver } from '@nestjs/graphql';
import { Resource } from '@unraid/shared/graphql.model.js';
import { AuthAction, UsePermissions } from '@unraid/shared/use-permissions.directive.js';
import { AppError } from '@app/core/errors/app-error.js';
import { UseFeatureFlag } from '@app/unraid-api/decorators/use-feature-flag.decorator.js';
import { DockerManifestService } from '@app/unraid-api/graph/resolvers/docker/docker-manifest.service.js';
import { DockerContainer } from '@app/unraid-api/graph/resolvers/docker/docker.model.js';
@Resolver(() => DockerContainer)
export class DockerContainerResolver {
private readonly logger = new Logger(DockerContainerResolver.name);
constructor(private readonly dockerManifestService: DockerManifestService) {}
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
@UsePermissions({
action: AuthAction.READ_ANY,
resource: Resource.DOCKER,
})
@ResolveField(() => Boolean, { nullable: true })
public async isUpdateAvailable(@Parent() container: DockerContainer) {
try {
return await this.dockerManifestService.isUpdateAvailableCached(container.image);
} catch (error) {
this.logger.error(error);
throw new AppError('Failed to read cached update status. See graphql-api.log for details.');
}
}
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
@UsePermissions({
action: AuthAction.READ_ANY,
resource: Resource.DOCKER,
})
@ResolveField(() => Boolean, { nullable: true })
public async isRebuildReady(@Parent() container: DockerContainer) {
return this.dockerManifestService.isRebuildReady(container.hostConfig?.networkMode);
}
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
@UsePermissions({
action: AuthAction.UPDATE_ANY,
resource: Resource.DOCKER,
})
@Mutation(() => Boolean)
public async refreshDockerDigests() {
return this.dockerManifestService.refreshDigests();
}
}

View File

@@ -1,62 +0,0 @@
import { Injectable } from '@nestjs/common';
import { AsyncMutex } from '@unraid/shared/util/processing.js';
import { docker } from '@app/core/utils/index.js';
import {
CachedStatusEntry,
DockerPhpService,
} from '@app/unraid-api/graph/resolvers/docker/docker-php.service.js';
@Injectable()
export class DockerManifestService {
constructor(private readonly dockerPhpService: DockerPhpService) {}
private readonly refreshDigestsMutex = new AsyncMutex(() => {
return this.dockerPhpService.refreshDigestsViaPhp();
});
/**
* Recomputes local/remote docker container digests and writes them to /var/lib/docker/unraid-update-status.json
* @param mutex - Optional mutex to use for the operation. If not provided, a default mutex will be used.
* @param dockerUpdatePath - Optional path to the DockerUpdate.php file. If not provided, the default path will be used.
* @returns True if the digests were refreshed, false if the operation failed
*/
async refreshDigests(mutex = this.refreshDigestsMutex, dockerUpdatePath?: string) {
return mutex.do(() => {
return this.dockerPhpService.refreshDigestsViaPhp(dockerUpdatePath);
});
}
/**
* Checks if an update is available for a given container image.
* @param imageRef - The image reference to check, e.g. "unraid/baseimage:latest". If no tag is provided, "latest" is assumed, following the webgui's implementation.
* @param cacheData read from /var/lib/docker/unraid-update-status.json by default
* @returns True if an update is available, false if not, or null if the status is unknown
*/
async isUpdateAvailableCached(imageRef: string, cacheData?: Record<string, CachedStatusEntry>) {
let taggedRef = imageRef;
if (!taggedRef.includes(':')) taggedRef += ':latest';
cacheData ??= await this.dockerPhpService.readCachedUpdateStatus();
const containerData = cacheData[taggedRef];
if (!containerData) return null;
return containerData.status?.toLowerCase() === 'true';
}
/**
* Checks if a container is rebuild ready.
* @param networkMode - The network mode of the container, e.g. "container:unraid/baseimage:latest".
* @returns True if the container is rebuild ready, false if not
*/
async isRebuildReady(networkMode?: string) {
if (!networkMode || !networkMode.startsWith('container:')) return false;
const target = networkMode.slice('container:'.length);
try {
await docker.getContainer(target).inspect();
return false;
} catch {
return true; // unresolved target -> ':???' equivalent
}
}
}

View File

@@ -2,17 +2,17 @@ import { Test } from '@nestjs/testing';
import { beforeEach, describe, expect, it, vi } from 'vitest';
import { DockerConfigService } from '@app/unraid-api/graph/resolvers/docker/docker-config.service.js';
import {
containerToResource,
DockerOrganizerService,
} from '@app/unraid-api/graph/resolvers/docker/docker-organizer.service.js';
import {
ContainerPortType,
ContainerState,
DockerContainer,
} from '@app/unraid-api/graph/resolvers/docker/docker.model.js';
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
import { DockerOrganizerConfigService } from '@app/unraid-api/graph/resolvers/docker/organizer/docker-organizer-config.service.js';
import {
containerToResource,
DockerOrganizerService,
} from '@app/unraid-api/graph/resolvers/docker/organizer/docker-organizer.service.js';
import { OrganizerV1 } from '@app/unraid-api/organizer/organizer.model.js';
describe('containerToResource', () => {
@@ -138,7 +138,7 @@ describe('containerToResource', () => {
describe('DockerOrganizerService', () => {
let service: DockerOrganizerService;
let configService: DockerOrganizerConfigService;
let configService: DockerConfigService;
let dockerService: DockerService;
const mockOrganizer: OrganizerV1 = {
@@ -178,7 +178,7 @@ describe('DockerOrganizerService', () => {
providers: [
DockerOrganizerService,
{
provide: DockerOrganizerConfigService,
provide: DockerConfigService,
useValue: {
getConfig: vi.fn().mockImplementation(() => structuredClone(mockOrganizer)),
validate: vi.fn().mockImplementation((config) => Promise.resolve(config)),
@@ -220,7 +220,7 @@ describe('DockerOrganizerService', () => {
}).compile();
service = moduleRef.get<DockerOrganizerService>(DockerOrganizerService);
configService = moduleRef.get<DockerOrganizerConfigService>(DockerOrganizerConfigService);
configService = moduleRef.get<DockerConfigService>(DockerConfigService);
dockerService = moduleRef.get<DockerService>(DockerService);
});

View File

@@ -3,9 +3,9 @@ import { Injectable, Logger } from '@nestjs/common';
import type { ContainerListOptions } from 'dockerode';
import { AppError } from '@app/core/errors/app-error.js';
import { DockerConfigService } from '@app/unraid-api/graph/resolvers/docker/docker-config.service.js';
import { DockerContainer } from '@app/unraid-api/graph/resolvers/docker/docker.model.js';
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
import { DockerOrganizerConfigService } from '@app/unraid-api/graph/resolvers/docker/organizer/docker-organizer-config.service.js';
import {
addMissingResourcesToView,
createFolderInView,
@@ -47,7 +47,7 @@ export function containerListToResourcesObject(containers: DockerContainer[]): O
export class DockerOrganizerService {
private readonly logger = new Logger(DockerOrganizerService.name);
constructor(
private readonly dockerConfigService: DockerOrganizerConfigService,
private readonly dockerConfigService: DockerConfigService,
private readonly dockerService: DockerService
) {}

View File

@@ -1,130 +0,0 @@
import { Injectable, Logger } from '@nestjs/common';
import { readFile } from 'fs/promises';
import { z } from 'zod';
import { phpLoader } from '@app/core/utils/plugins/php-loader.js';
import {
ExplicitStatusItem,
UpdateStatus,
} from '@app/unraid-api/graph/resolvers/docker/docker-update-status.model.js';
import { parseDockerPushCalls } from '@app/unraid-api/graph/resolvers/docker/utils/docker-push-parser.js';
type StatusItem = { name: string; updateStatus: 0 | 1 | 2 | 3 };
/**
* These types reflect the structure of the /var/lib/docker/unraid-update-status.json file,
* which is not controlled by the Unraid API.
*/
const CachedStatusEntrySchema = z.object({
/** sha256 digest - "sha256:..." */
local: z.string(),
/** sha256 digest - "sha256:..." */
remote: z.string(),
/** whether update is available (true), not available (false), or unknown (null) */
status: z.enum(['true', 'false']).nullable(),
});
const CachedStatusSchema = z.record(z.string(), CachedStatusEntrySchema);
export type CachedStatusEntry = z.infer<typeof CachedStatusEntrySchema>;
@Injectable()
export class DockerPhpService {
private readonly logger = new Logger(DockerPhpService.name);
constructor() {}
/**
* Reads JSON from a file containing cached update status.
* If the file does not exist, an empty object is returned.
* @param cacheFile
* @returns
*/
async readCachedUpdateStatus(
cacheFile = '/var/lib/docker/unraid-update-status.json'
): Promise<Record<string, CachedStatusEntry>> {
try {
const cache = await readFile(cacheFile, 'utf8');
const cacheData = JSON.parse(cache);
const { success, data } = CachedStatusSchema.safeParse(cacheData);
if (success) return data;
this.logger.warn(cacheData, 'Invalid cached update status');
return {};
} catch (error) {
this.logger.warn(error, 'Failed to read cached update status');
return {};
}
}
/**----------------------
* Refresh Container Digests
*------------------------**/
/**
* Recomputes local/remote digests by triggering `DockerTemplates->getAllInfo(true)` via DockerUpdate.php
* @param dockerUpdatePath - Path to the DockerUpdate.php file
* @returns True if the digests were refreshed, false if the file is not found or the operation failed
*/
async refreshDigestsViaPhp(
dockerUpdatePath = '/usr/local/emhttp/plugins/dynamix.docker.manager/include/DockerUpdate.php'
) {
try {
await phpLoader({
file: dockerUpdatePath,
method: 'GET',
});
return true;
} catch {
// ignore; offline may keep remote as 'undef'
return false;
}
}
/**----------------------
* Parse Container Statuses
*------------------------**/
private parseStatusesFromDockerPush(js: string): ExplicitStatusItem[] {
const matches = parseDockerPushCalls(js);
return matches.map(({ name, updateStatus }) => ({
name,
updateStatus: this.updateStatusToString(updateStatus as StatusItem['updateStatus']),
}));
}
private updateStatusToString(updateStatus: 0): UpdateStatus.UP_TO_DATE;
private updateStatusToString(updateStatus: 1): UpdateStatus.UPDATE_AVAILABLE;
private updateStatusToString(updateStatus: 2): UpdateStatus.REBUILD_READY;
private updateStatusToString(updateStatus: 3): UpdateStatus.UNKNOWN;
// prettier-ignore
private updateStatusToString(updateStatus: StatusItem['updateStatus']): ExplicitStatusItem['updateStatus'];
private updateStatusToString(
updateStatus: StatusItem['updateStatus']
): ExplicitStatusItem['updateStatus'] {
switch (updateStatus) {
case 0:
return UpdateStatus.UP_TO_DATE;
case 1:
return UpdateStatus.UPDATE_AVAILABLE;
case 2:
return UpdateStatus.REBUILD_READY;
default:
return UpdateStatus.UNKNOWN;
}
}
/**
* Gets the update statuses for all containers by triggering `DockerTemplates->getAllInfo(true)` via DockerContainers.php
* @param dockerContainersPath - Path to the DockerContainers.php file
* @returns The update statuses for all containers
*/
async getContainerUpdateStatuses(
dockerContainersPath = '/usr/local/emhttp/plugins/dynamix.docker.manager/include/DockerContainers.php'
): Promise<ExplicitStatusItem[]> {
const stdout = await phpLoader({
file: dockerContainersPath,
method: 'GET',
});
const parts = stdout.split('\0'); // [html, "docker.push(...)", busyFlag]
const js = parts[1] || '';
return this.parseStatusesFromDockerPush(js);
}
}

View File

@@ -1,25 +0,0 @@
import { Field, ObjectType, registerEnumType } from '@nestjs/graphql';
/**
* Note that these values propagate down to API consumers, so be aware of breaking changes.
*/
export enum UpdateStatus {
UP_TO_DATE = 'UP_TO_DATE',
UPDATE_AVAILABLE = 'UPDATE_AVAILABLE',
REBUILD_READY = 'REBUILD_READY',
UNKNOWN = 'UNKNOWN',
}
registerEnumType(UpdateStatus, {
name: 'UpdateStatus',
description: 'Update status of a container.',
});
@ObjectType()
export class ExplicitStatusItem {
@Field(() => String)
name!: string;
@Field(() => UpdateStatus)
updateStatus!: UpdateStatus;
}

View File

@@ -1,16 +1,15 @@
import { ConfigService } from '@nestjs/config';
import { Test, TestingModule } from '@nestjs/testing';
import { describe, expect, it, vi } from 'vitest';
import { DockerConfigService } from '@app/unraid-api/graph/resolvers/docker/docker-config.service.js';
import { DockerEventService } from '@app/unraid-api/graph/resolvers/docker/docker-event.service.js';
import { DockerPhpService } from '@app/unraid-api/graph/resolvers/docker/docker-php.service.js';
import { DockerOrganizerService } from '@app/unraid-api/graph/resolvers/docker/docker-organizer.service.js';
import { DockerModule } from '@app/unraid-api/graph/resolvers/docker/docker.module.js';
import { DockerMutationsResolver } from '@app/unraid-api/graph/resolvers/docker/docker.mutations.resolver.js';
import { DockerResolver } from '@app/unraid-api/graph/resolvers/docker/docker.resolver.js';
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
import { DockerOrganizerConfigService } from '@app/unraid-api/graph/resolvers/docker/organizer/docker-organizer-config.service.js';
import { DockerOrganizerService } from '@app/unraid-api/graph/resolvers/docker/organizer/docker-organizer.service.js';
describe('DockerModule', () => {
it('should compile the module', async () => {
@@ -19,8 +18,6 @@ describe('DockerModule', () => {
})
.overrideProvider(DockerService)
.useValue({ getDockerClient: vi.fn() })
.overrideProvider(DockerOrganizerConfigService)
.useValue({ getConfig: vi.fn() })
.overrideProvider(DockerConfigService)
.useValue({ getConfig: vi.fn() })
.compile();
@@ -64,7 +61,6 @@ describe('DockerModule', () => {
DockerResolver,
{ provide: DockerService, useValue: {} },
{ provide: DockerOrganizerService, useValue: {} },
{ provide: DockerPhpService, useValue: { getContainerUpdateStatuses: vi.fn() } },
],
}).compile();

View File

@@ -1,36 +1,22 @@
import { Module } from '@nestjs/common';
import { JobModule } from '@app/unraid-api/cron/job.module.js';
import { ContainerStatusJob } from '@app/unraid-api/graph/resolvers/docker/container-status.job.js';
import { DockerConfigService } from '@app/unraid-api/graph/resolvers/docker/docker-config.service.js';
import { DockerContainerResolver } from '@app/unraid-api/graph/resolvers/docker/docker-container.resolver.js';
import { DockerManifestService } from '@app/unraid-api/graph/resolvers/docker/docker-manifest.service.js';
import { DockerPhpService } from '@app/unraid-api/graph/resolvers/docker/docker-php.service.js';
import { DockerOrganizerService } from '@app/unraid-api/graph/resolvers/docker/docker-organizer.service.js';
import { DockerMutationsResolver } from '@app/unraid-api/graph/resolvers/docker/docker.mutations.resolver.js';
import { DockerResolver } from '@app/unraid-api/graph/resolvers/docker/docker.resolver.js';
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
import { DockerOrganizerConfigService } from '@app/unraid-api/graph/resolvers/docker/organizer/docker-organizer-config.service.js';
import { DockerOrganizerService } from '@app/unraid-api/graph/resolvers/docker/organizer/docker-organizer.service.js';
@Module({
imports: [JobModule],
providers: [
// Services
DockerService,
DockerOrganizerConfigService,
DockerOrganizerService,
DockerManifestService,
DockerPhpService,
DockerConfigService,
DockerOrganizerService,
// DockerEventService,
// Jobs
ContainerStatusJob,
// Resolvers
DockerResolver,
DockerMutationsResolver,
DockerContainerResolver,
],
exports: [DockerService],
})

View File

@@ -3,11 +3,10 @@ import { Test } from '@nestjs/testing';
import { beforeEach, describe, expect, it, vi } from 'vitest';
import { DockerPhpService } from '@app/unraid-api/graph/resolvers/docker/docker-php.service.js';
import { DockerOrganizerService } from '@app/unraid-api/graph/resolvers/docker/docker-organizer.service.js';
import { ContainerState, DockerContainer } from '@app/unraid-api/graph/resolvers/docker/docker.model.js';
import { DockerResolver } from '@app/unraid-api/graph/resolvers/docker/docker.resolver.js';
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
import { DockerOrganizerService } from '@app/unraid-api/graph/resolvers/docker/organizer/docker-organizer.service.js';
describe('DockerResolver', () => {
let resolver: DockerResolver;
@@ -27,13 +26,7 @@ describe('DockerResolver', () => {
{
provide: DockerOrganizerService,
useValue: {
resolveOrganizer: vi.fn(),
},
},
{
provide: DockerPhpService,
useValue: {
getContainerUpdateStatuses: vi.fn(),
getResolvedOrganizer: vi.fn(),
},
},
],

View File

@@ -3,25 +3,21 @@ import { Args, Mutation, Query, ResolveField, Resolver } from '@nestjs/graphql';
import { AuthAction, Resource } from '@unraid/shared/graphql.model.js';
import { UsePermissions } from '@unraid/shared/use-permissions.directive.js';
import { UseFeatureFlag } from '@app/unraid-api/decorators/use-feature-flag.decorator.js';
import { DockerPhpService } from '@app/unraid-api/graph/resolvers/docker/docker-php.service.js';
import { ExplicitStatusItem } from '@app/unraid-api/graph/resolvers/docker/docker-update-status.model.js';
import { DockerOrganizerService } from '@app/unraid-api/graph/resolvers/docker/docker-organizer.service.js';
import {
Docker,
DockerContainer,
DockerNetwork,
} from '@app/unraid-api/graph/resolvers/docker/docker.model.js';
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
import { DockerOrganizerService } from '@app/unraid-api/graph/resolvers/docker/organizer/docker-organizer.service.js';
import { DEFAULT_ORGANIZER_ROOT_ID } from '@app/unraid-api/organizer/organizer.js';
import { ResolvedOrganizerV1 } from '@app/unraid-api/organizer/organizer.model.js';
import { OrganizerV1, ResolvedOrganizerV1 } from '@app/unraid-api/organizer/organizer.model.js';
@Resolver(() => Docker)
export class DockerResolver {
constructor(
private readonly dockerService: DockerService,
private readonly dockerOrganizerService: DockerOrganizerService,
private readonly dockerPhpService: DockerPhpService
private readonly dockerOrganizerService: DockerOrganizerService
) {}
@UsePermissions({
@@ -57,7 +53,6 @@ export class DockerResolver {
return this.dockerService.getNetworks({ skipCache });
}
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
@UsePermissions({
action: AuthAction.READ_ANY,
resource: Resource.DOCKER,
@@ -67,7 +62,6 @@ export class DockerResolver {
return this.dockerOrganizerService.resolveOrganizer();
}
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
@UsePermissions({
action: AuthAction.UPDATE_ANY,
resource: Resource.DOCKER,
@@ -86,7 +80,6 @@ export class DockerResolver {
return this.dockerOrganizerService.resolveOrganizer(organizer);
}
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
@UsePermissions({
action: AuthAction.UPDATE_ANY,
resource: Resource.DOCKER,
@@ -103,7 +96,6 @@ export class DockerResolver {
return this.dockerOrganizerService.resolveOrganizer(organizer);
}
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
@UsePermissions({
action: AuthAction.UPDATE_ANY,
resource: Resource.DOCKER,
@@ -116,7 +108,6 @@ export class DockerResolver {
return this.dockerOrganizerService.resolveOrganizer(organizer);
}
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
@UsePermissions({
action: AuthAction.UPDATE_ANY,
resource: Resource.DOCKER,
@@ -132,14 +123,4 @@ export class DockerResolver {
});
return this.dockerOrganizerService.resolveOrganizer(organizer);
}
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
@UsePermissions({
action: AuthAction.READ_ANY,
resource: Resource.DOCKER,
})
@ResolveField(() => [ExplicitStatusItem])
public async containerUpdateStatuses() {
return this.dockerPhpService.getContainerUpdateStatuses();
}
}

View File

@@ -1,64 +0,0 @@
import { Injectable } from '@nestjs/common';
import { ConfigService } from '@nestjs/config';
import { ConfigFilePersister } from '@unraid/shared/services/config-file.js';
import { FeatureFlags } from '@app/consts.js';
import { AppError } from '@app/core/errors/app-error.js';
import { validateObject } from '@app/unraid-api/graph/resolvers/validation.utils.js';
import {
DEFAULT_ORGANIZER_ROOT_ID,
DEFAULT_ORGANIZER_VIEW_ID,
} from '@app/unraid-api/organizer/organizer.js';
import { OrganizerV1 } from '@app/unraid-api/organizer/organizer.model.js';
import { validateOrganizerIntegrity } from '@app/unraid-api/organizer/organizer.validation.js';
@Injectable()
export class DockerOrganizerConfigService extends ConfigFilePersister<OrganizerV1> {
constructor(configService: ConfigService) {
super(configService);
}
enabled(): boolean {
return FeatureFlags.ENABLE_NEXT_DOCKER_RELEASE;
}
configKey(): string {
return 'dockerOrganizer';
}
fileName(): string {
return 'docker.organizer.json';
}
defaultConfig(): OrganizerV1 {
return {
version: 1,
resources: {},
views: {
default: {
id: DEFAULT_ORGANIZER_VIEW_ID,
name: 'Default',
root: DEFAULT_ORGANIZER_ROOT_ID,
entries: {
root: {
type: 'folder',
id: DEFAULT_ORGANIZER_ROOT_ID,
name: 'Root',
children: [],
},
},
},
},
};
}
async validate(config: object): Promise<OrganizerV1> {
const organizer = await validateObject(OrganizerV1, config);
const { isValid, errors } = await validateOrganizerIntegrity(organizer);
if (!isValid) {
throw new AppError(`Docker organizer validation failed: ${JSON.stringify(errors, null, 2)}`);
}
return organizer;
}
}

View File

@@ -1,124 +0,0 @@
import { describe, expect, it } from 'vitest';
import type { DockerPushMatch } from '@app/unraid-api/graph/resolvers/docker/utils/docker-push-parser.js';
import { parseDockerPushCalls } from '@app/unraid-api/graph/resolvers/docker/utils/docker-push-parser.js';
describe('parseDockerPushCalls', () => {
it('should extract name and update status from valid docker.push call', () => {
const jsCode = "docker.push({name:'nginx',update:1});";
const result = parseDockerPushCalls(jsCode);
expect(result).toEqual([{ name: 'nginx', updateStatus: 1 }]);
});
it('should handle multiple docker.push calls in same string', () => {
const jsCode = `
docker.push({name:'nginx',update:1});
docker.push({name:'mysql',update:0});
docker.push({name:'redis',update:2});
`;
const result = parseDockerPushCalls(jsCode);
expect(result).toEqual([
{ name: 'nginx', updateStatus: 1 },
{ name: 'mysql', updateStatus: 0 },
{ name: 'redis', updateStatus: 2 },
]);
});
it('should handle docker.push calls with additional properties', () => {
const jsCode =
"docker.push({id:'123',name:'nginx',version:'latest',update:3,status:'running'});";
const result = parseDockerPushCalls(jsCode);
expect(result).toEqual([{ name: 'nginx', updateStatus: 3 }]);
});
it('should handle different property order', () => {
const jsCode = "docker.push({update:2,name:'postgres',id:'456'});";
const result = parseDockerPushCalls(jsCode);
expect(result).toEqual([{ name: 'postgres', updateStatus: 2 }]);
});
it('should handle container names with special characters', () => {
const jsCode = "docker.push({name:'my-app_v2.0',update:1});";
const result = parseDockerPushCalls(jsCode);
expect(result).toEqual([{ name: 'my-app_v2.0', updateStatus: 1 }]);
});
it('should handle whitespace variations', () => {
const jsCode = "docker.push({ name: 'nginx' , update: 1 });";
const result = parseDockerPushCalls(jsCode);
expect(result).toEqual([{ name: 'nginx', updateStatus: 1 }]);
});
it('should return empty array for empty string', () => {
const result = parseDockerPushCalls('');
expect(result).toEqual([]);
});
it('should return empty array when no docker.push calls found', () => {
const jsCode = "console.log('no docker calls here');";
const result = parseDockerPushCalls(jsCode);
expect(result).toEqual([]);
});
it('should ignore malformed docker.push calls', () => {
const jsCode = `
docker.push({name:'valid',update:1});
docker.push({name:'missing-update'});
docker.push({update:2});
docker.push({name:'another-valid',update:0});
`;
const result = parseDockerPushCalls(jsCode);
expect(result).toEqual([
{ name: 'valid', updateStatus: 1 },
{ name: 'another-valid', updateStatus: 0 },
]);
});
it('should handle all valid update status values', () => {
const jsCode = `
docker.push({name:'container0',update:0});
docker.push({name:'container1',update:1});
docker.push({name:'container2',update:2});
docker.push({name:'container3',update:3});
`;
const result = parseDockerPushCalls(jsCode);
expect(result).toEqual([
{ name: 'container0', updateStatus: 0 },
{ name: 'container1', updateStatus: 1 },
{ name: 'container2', updateStatus: 2 },
{ name: 'container3', updateStatus: 3 },
]);
});
it('should handle real-world example with HTML and multiple containers', () => {
const jsCode = `
<div>some html</div>
docker.push({id:'abc123',name:'plex',version:'1.32',update:1,autostart:true});
docker.push({id:'def456',name:'nextcloud',version:'latest',update:0,ports:'80:8080'});
<script>more content</script>
docker.push({id:'ghi789',name:'homeassistant',update:2});
`;
const result = parseDockerPushCalls(jsCode);
expect(result).toEqual([
{ name: 'plex', updateStatus: 1 },
{ name: 'nextcloud', updateStatus: 0 },
{ name: 'homeassistant', updateStatus: 2 },
]);
});
it('should handle nested braces in other properties', () => {
const jsCode = 'docker.push({config:\'{"nested":"value"}\',name:\'test\',update:1});';
const result = parseDockerPushCalls(jsCode);
expect(result).toEqual([{ name: 'test', updateStatus: 1 }]);
});
});

View File

@@ -1,24 +0,0 @@
export interface DockerPushMatch {
name: string;
updateStatus: number;
}
export function parseDockerPushCalls(jsCode: string): DockerPushMatch[] {
const dockerPushRegex = /docker\.push\(\{[^}]*(?:(?:[^{}]|{[^}]*})*)\}\);/g;
const matches: DockerPushMatch[] = [];
for (const match of jsCode.matchAll(dockerPushRegex)) {
const objectContent = match[0];
const nameMatch = objectContent.match(/name\s*:\s*'([^']+)'/);
const updateMatch = objectContent.match(/update\s*:\s*(\d)/);
if (nameMatch && updateMatch) {
const name = nameMatch[1];
const updateStatus = Number(updateMatch[1]);
matches.push({ name, updateStatus });
}
}
return matches;
}

View File

@@ -1,4 +1,4 @@
import { Injectable, Logger, OnApplicationBootstrap, OnModuleDestroy } from '@nestjs/common';
import { Injectable, Logger, OnModuleDestroy, OnModuleInit } from '@nestjs/common';
import crypto from 'crypto';
import { ChildProcess } from 'node:child_process';
import { mkdir, rm, writeFile } from 'node:fs/promises';
@@ -7,7 +7,6 @@ import { dirname, join } from 'node:path';
import { execa } from 'execa';
import got, { HTTPError } from 'got';
import pRetry from 'p-retry';
import semver from 'semver';
import { sanitizeParams } from '@app/core/log.js';
import { fileExists } from '@app/core/utils/files/file-exists.js';
@@ -26,7 +25,7 @@ import {
import { validateObject } from '@app/unraid-api/graph/resolvers/validation.utils.js';
@Injectable()
export class RCloneApiService implements OnApplicationBootstrap, OnModuleDestroy {
export class RCloneApiService implements OnModuleInit, OnModuleDestroy {
private isInitialized: boolean = false;
private readonly logger = new Logger(RCloneApiService.name);
private rcloneSocketPath: string = '';
@@ -45,7 +44,7 @@ export class RCloneApiService implements OnApplicationBootstrap, OnModuleDestroy
return this.isInitialized;
}
async onApplicationBootstrap(): Promise<void> {
async onModuleInit(): Promise<void> {
// RClone startup disabled - early return
if (ENVIRONMENT === 'production') {
this.logger.debug('RClone startup is disabled');
@@ -240,41 +239,12 @@ export class RCloneApiService implements OnApplicationBootstrap, OnModuleDestroy
}
/**
* Checks if the RClone binary is available on the system and meets minimum version requirements
* Checks if the RClone binary is available on the system
*/
private async checkRcloneBinaryExists(): Promise<boolean> {
try {
const result = await execa('rclone', ['version']);
const versionOutput = result.stdout.trim();
// Extract raw version string (format: "rclone vX.XX.X" or "rclone vX.XX.X-beta.X")
const versionMatch = versionOutput.match(/rclone v([\d.\-\w]+)/);
if (!versionMatch) {
this.logger.error('Unable to parse RClone version from output');
return false;
}
const rawVersion = versionMatch[1];
// Use semver.coerce to get base semver from prerelease versions
const coercedVersion = semver.coerce(rawVersion);
if (!coercedVersion) {
this.logger.error(`Failed to parse RClone version: raw="${rawVersion}"`);
return false;
}
const minimumVersion = '1.70.0';
if (!semver.gte(coercedVersion, minimumVersion)) {
this.logger.error(
`RClone version ${rawVersion} (coerced: ${coercedVersion}) is too old. Minimum required version is ${minimumVersion}`
);
return false;
}
this.logger.debug(
`RClone binary is available on the system (version ${rawVersion}, coerced: ${coercedVersion}).`
);
await execa('rclone', ['version']);
this.logger.debug('RClone binary is available on the system.');
return true;
} catch (error: unknown) {
if (error instanceof Error && 'code' in error && error.code === 'ENOENT') {

View File

@@ -1,216 +0,0 @@
import { ConfigService } from '@nestjs/config';
import { Test } from '@nestjs/testing';
import * as client from 'openid-client';
import { beforeEach, describe, expect, it, vi } from 'vitest';
import { OidcClientConfigService } from '@app/unraid-api/graph/resolvers/sso/client/oidc-client-config.service.js';
import { OidcValidationService } from '@app/unraid-api/graph/resolvers/sso/core/oidc-validation.service.js';
import { OidcProvider } from '@app/unraid-api/graph/resolvers/sso/models/oidc-provider.model.js';
vi.mock('openid-client');
describe('OidcClientConfigService - Cache Behavior', () => {
let service: OidcClientConfigService;
let validationService: OidcValidationService;
const createMockProvider = (port: number): OidcProvider => ({
id: 'test-provider',
name: 'Test Provider',
clientId: 'test-client-id',
clientSecret: 'test-secret',
issuer: `http://localhost:${port}`,
scopes: ['openid', 'profile', 'email'],
authorizationRules: [],
});
const createMockConfiguration = (port: number) => {
const mockConfig = {
serverMetadata: vi.fn(() => ({
issuer: `http://localhost:${port}`,
authorization_endpoint: `http://localhost:${port}/auth`,
token_endpoint: `http://localhost:${port}/token`,
jwks_uri: `http://localhost:${port}/jwks`,
userinfo_endpoint: `http://localhost:${port}/userinfo`,
})),
};
return mockConfig as unknown as client.Configuration;
};
beforeEach(async () => {
vi.clearAllMocks();
const mockConfigService = {
get: vi.fn(),
set: vi.fn(),
};
const module = await Test.createTestingModule({
providers: [
OidcClientConfigService,
OidcValidationService,
{
provide: ConfigService,
useValue: mockConfigService,
},
],
}).compile();
service = module.get<OidcClientConfigService>(OidcClientConfigService);
validationService = module.get<OidcValidationService>(OidcValidationService);
});
describe('Configuration Caching', () => {
it('should cache configuration on first call', async () => {
const provider = createMockProvider(1029);
const mockConfig = createMockConfiguration(1029);
vi.spyOn(validationService, 'performDiscovery').mockResolvedValueOnce(mockConfig);
// First call
const config1 = await service.getOrCreateConfig(provider);
expect(validationService.performDiscovery).toHaveBeenCalledTimes(1);
expect(config1.serverMetadata().issuer).toBe('http://localhost:1029');
// Second call with same provider ID should use cache
const config2 = await service.getOrCreateConfig(provider);
expect(validationService.performDiscovery).toHaveBeenCalledTimes(1);
expect(config2).toBe(config1);
});
it('should return stale cached configuration when issuer changes without cache clear', async () => {
const provider1029 = createMockProvider(1029);
const provider1030 = createMockProvider(1030);
const mockConfig1029 = createMockConfiguration(1029);
const mockConfig1030 = createMockConfiguration(1030);
vi.spyOn(validationService, 'performDiscovery')
.mockResolvedValueOnce(mockConfig1029)
.mockResolvedValueOnce(mockConfig1030);
// Initial configuration on port 1029
const config1 = await service.getOrCreateConfig(provider1029);
expect(config1.serverMetadata().issuer).toBe('http://localhost:1029');
expect(config1.serverMetadata().authorization_endpoint).toBe('http://localhost:1029/auth');
// Update provider to port 1030 (simulating UI change)
// Without clearing cache, it should still return the old cached config
const config2 = await service.getOrCreateConfig(provider1030);
// THIS IS THE BUG: The service returns cached config for port 1029
// even though the provider now has issuer on port 1030
expect(config2.serverMetadata().issuer).toBe('http://localhost:1029');
expect(config2.serverMetadata().authorization_endpoint).toBe('http://localhost:1029/auth');
// performDiscovery should only be called once because cache is used
expect(validationService.performDiscovery).toHaveBeenCalledTimes(1);
});
it('should return fresh configuration after cache is cleared', async () => {
const provider1029 = createMockProvider(1029);
const provider1030 = createMockProvider(1030);
const mockConfig1029 = createMockConfiguration(1029);
const mockConfig1030 = createMockConfiguration(1030);
vi.spyOn(validationService, 'performDiscovery')
.mockResolvedValueOnce(mockConfig1029)
.mockResolvedValueOnce(mockConfig1030);
// Initial configuration on port 1029
const config1 = await service.getOrCreateConfig(provider1029);
expect(config1.serverMetadata().issuer).toBe('http://localhost:1029');
// Clear cache for the provider
service.clearCache(provider1030.id);
// Now it should fetch fresh config for port 1030
const config2 = await service.getOrCreateConfig(provider1030);
expect(config2.serverMetadata().issuer).toBe('http://localhost:1030');
expect(config2.serverMetadata().authorization_endpoint).toBe('http://localhost:1030/auth');
// performDiscovery should be called twice (once for each port)
expect(validationService.performDiscovery).toHaveBeenCalledTimes(2);
});
it('should clear all provider caches when clearCache is called without providerId', async () => {
const provider1 = { ...createMockProvider(1029), id: 'provider1' };
const provider2 = { ...createMockProvider(1030), id: 'provider2' };
const mockConfig1 = createMockConfiguration(1029);
const mockConfig2 = createMockConfiguration(1030);
vi.spyOn(validationService, 'performDiscovery')
.mockResolvedValueOnce(mockConfig1)
.mockResolvedValueOnce(mockConfig2)
.mockResolvedValueOnce(mockConfig1)
.mockResolvedValueOnce(mockConfig2);
// Cache both providers
await service.getOrCreateConfig(provider1);
await service.getOrCreateConfig(provider2);
expect(service.getCacheSize()).toBe(2);
// Clear all caches
service.clearCache();
expect(service.getCacheSize()).toBe(0);
// Both should fetch fresh configs
await service.getOrCreateConfig(provider1);
await service.getOrCreateConfig(provider2);
// performDiscovery should be called 4 times total
expect(validationService.performDiscovery).toHaveBeenCalledTimes(4);
});
});
describe('Manual Configuration Caching', () => {
it('should cache manual configuration and exhibit same stale cache issue', async () => {
const provider1029: OidcProvider = {
id: 'manual-provider',
name: 'Manual Provider',
clientId: 'client-id',
clientSecret: 'secret',
issuer: '',
authorizationEndpoint: 'http://localhost:1029/auth',
tokenEndpoint: 'http://localhost:1029/token',
scopes: ['openid'],
authorizationRules: [],
};
const provider1030: OidcProvider = {
...provider1029,
authorizationEndpoint: 'http://localhost:1030/auth',
tokenEndpoint: 'http://localhost:1030/token',
};
// Mock the client.Configuration constructor for manual configs
const mockManualConfig1029 = createMockConfiguration(1029);
const mockManualConfig1030 = createMockConfiguration(1030);
let configCallCount = 0;
vi.mocked(client.Configuration).mockImplementation(() => {
configCallCount++;
return configCallCount === 1 ? mockManualConfig1029 : mockManualConfig1030;
});
vi.mocked(client.ClientSecretPost).mockReturnValue({} as any);
vi.mocked(client.allowInsecureRequests).mockImplementation(() => {});
// First call with port 1029
const config1 = await service.getOrCreateConfig(provider1029);
expect(config1.serverMetadata().authorization_endpoint).toBe('http://localhost:1029/auth');
// Update to port 1030 without clearing cache
const config2 = await service.getOrCreateConfig(provider1030);
// BUG: Still returns cached config with port 1029
expect(config2.serverMetadata().authorization_endpoint).toBe('http://localhost:1029/auth');
// Clear cache and try again
service.clearCache(provider1030.id);
const config3 = await service.getOrCreateConfig(provider1030);
// Now it should return the updated config
expect(config3.serverMetadata().authorization_endpoint).toBe('http://localhost:1030/auth');
});
});
});

View File

@@ -1,11 +1,11 @@
import { forwardRef, Module } from '@nestjs/common';
import { Module } from '@nestjs/common';
import { OidcClientConfigService } from '@app/unraid-api/graph/resolvers/sso/client/oidc-client-config.service.js';
import { OidcRedirectUriService } from '@app/unraid-api/graph/resolvers/sso/client/oidc-redirect-uri.service.js';
import { OidcBaseModule } from '@app/unraid-api/graph/resolvers/sso/core/oidc-base.module.js';
@Module({
imports: [forwardRef(() => OidcBaseModule)],
imports: [OidcBaseModule],
providers: [OidcClientConfigService, OidcRedirectUriService],
exports: [OidcClientConfigService, OidcRedirectUriService],
})

View File

@@ -1,13 +1,12 @@
import { forwardRef, Module } from '@nestjs/common';
import { Module } from '@nestjs/common';
import { UserSettingsModule } from '@unraid/shared/services/user-settings.js';
import { OidcClientModule } from '@app/unraid-api/graph/resolvers/sso/client/oidc-client.module.js';
import { OidcConfigPersistence } from '@app/unraid-api/graph/resolvers/sso/core/oidc-config.service.js';
import { OidcValidationService } from '@app/unraid-api/graph/resolvers/sso/core/oidc-validation.service.js';
@Module({
imports: [UserSettingsModule, forwardRef(() => OidcClientModule)],
imports: [UserSettingsModule],
providers: [OidcConfigPersistence, OidcValidationService],
exports: [OidcConfigPersistence, OidcValidationService],
})

View File

@@ -1,276 +0,0 @@
import { ConfigService } from '@nestjs/config';
import { Test } from '@nestjs/testing';
import * as fs from 'fs/promises';
import { UserSettingsService } from '@unraid/shared/services/user-settings.js';
import * as client from 'openid-client';
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
import { OidcClientConfigService } from '@app/unraid-api/graph/resolvers/sso/client/oidc-client-config.service.js';
import { OidcConfigPersistence } from '@app/unraid-api/graph/resolvers/sso/core/oidc-config.service.js';
import { OidcValidationService } from '@app/unraid-api/graph/resolvers/sso/core/oidc-validation.service.js';
import { OidcProvider } from '@app/unraid-api/graph/resolvers/sso/models/oidc-provider.model.js';
vi.mock('openid-client');
vi.mock('fs/promises', () => ({
writeFile: vi.fn().mockResolvedValue(undefined),
mkdir: vi.fn().mockResolvedValue(undefined),
stat: vi.fn().mockRejectedValue(new Error('File not found')),
}));
describe('OIDC Config Cache Fix - Integration Test', () => {
let configPersistence: OidcConfigPersistence;
let clientConfigService: OidcClientConfigService;
let mockConfigService: any;
afterEach(() => {
delete process.env.PATHS_CONFIG;
});
const createMockProvider = (port: number): OidcProvider => ({
id: 'test-provider',
name: 'Test Provider',
clientId: 'test-client-id',
clientSecret: 'test-secret',
issuer: `http://localhost:${port}`,
scopes: ['openid', 'profile', 'email'],
authorizationRules: [
{
claim: 'email',
operator: 'endsWith' as any,
value: ['@example.com'],
},
],
});
const createMockConfiguration = (port: number) => {
const mockConfig = {
serverMetadata: vi.fn(() => ({
issuer: `http://localhost:${port}`,
authorization_endpoint: `http://localhost:${port}/auth`,
token_endpoint: `http://localhost:${port}/token`,
jwks_uri: `http://localhost:${port}/jwks`,
userinfo_endpoint: `http://localhost:${port}/userinfo`,
})),
};
return mockConfig as unknown as client.Configuration;
};
beforeEach(async () => {
vi.clearAllMocks();
// Set environment variable for config path
process.env.PATHS_CONFIG = '/tmp/test-config';
mockConfigService = {
get: vi.fn((key: string) => {
if (key === 'oidc') {
return {
providers: [createMockProvider(1029)],
defaultAllowedOrigins: [],
};
}
if (key === 'paths.config') {
return '/tmp/test-config';
}
return undefined;
}),
set: vi.fn(),
getOrThrow: vi.fn((key: string) => {
if (key === 'paths.config' || key === 'paths') {
return '/tmp/test-config';
}
return '/tmp/test-config';
}),
};
const mockUserSettingsService = {
register: vi.fn(),
getAllSettings: vi.fn(),
getAllValues: vi.fn(),
updateNamespacedValues: vi.fn(),
};
const module = await Test.createTestingModule({
providers: [
OidcConfigPersistence,
OidcClientConfigService,
OidcValidationService,
{
provide: ConfigService,
useValue: mockConfigService,
},
{
provide: UserSettingsService,
useValue: mockUserSettingsService,
},
],
}).compile();
configPersistence = module.get<OidcConfigPersistence>(OidcConfigPersistence);
clientConfigService = module.get<OidcClientConfigService>(OidcClientConfigService);
// Mock the persist method since we don't want to write to disk in tests
vi.spyOn(configPersistence as any, 'persist').mockResolvedValue(undefined);
});
describe('Cache clearing on provider update', () => {
it('should clear cache when provider is updated via upsertProvider', async () => {
const provider1029 = createMockProvider(1029);
const provider1030 = createMockProvider(1030);
const mockConfig1029 = createMockConfiguration(1029);
const mockConfig1030 = createMockConfiguration(1030);
// Mock validation service to return configs
const validationService = (configPersistence as any).validationService;
vi.spyOn(validationService, 'performDiscovery')
.mockResolvedValueOnce(mockConfig1029)
.mockResolvedValueOnce(mockConfig1030);
// First, get config for port 1029 - this caches it
const config1 = await clientConfigService.getOrCreateConfig(provider1029);
expect(config1.serverMetadata().issuer).toBe('http://localhost:1029');
// Spy on clearCache method
const clearCacheSpy = vi.spyOn(clientConfigService, 'clearCache');
// Update the provider to port 1030 via upsertProvider
await configPersistence.upsertProvider(provider1030);
// Verify cache was cleared for this specific provider
expect(clearCacheSpy).toHaveBeenCalledWith(provider1030.id);
// Now get config again - should fetch fresh config for port 1030
const config2 = await clientConfigService.getOrCreateConfig(provider1030);
expect(config2.serverMetadata().issuer).toBe('http://localhost:1030');
expect(config2.serverMetadata().authorization_endpoint).toBe('http://localhost:1030/auth');
// Verify discovery was called twice (not using cache)
expect(validationService.performDiscovery).toHaveBeenCalledTimes(2);
});
it('should clear cache when provider is deleted', async () => {
const provider = createMockProvider(1029);
const mockConfig = createMockConfiguration(1029);
// Setup initial provider in config
mockConfigService.get.mockReturnValue({
providers: [provider, { ...provider, id: 'other-provider' }],
defaultAllowedOrigins: [],
});
// Mock validation service
const validationService = (configPersistence as any).validationService;
vi.spyOn(validationService, 'performDiscovery').mockResolvedValue(mockConfig);
// First, cache the provider config
await clientConfigService.getOrCreateConfig(provider);
// Spy on clearCache
const clearCacheSpy = vi.spyOn(clientConfigService, 'clearCache');
// Delete the provider
const deleted = await configPersistence.deleteProvider(provider.id);
expect(deleted).toBe(true);
// Verify cache was cleared for the deleted provider
expect(clearCacheSpy).toHaveBeenCalledWith(provider.id);
});
it('should clear all provider caches when updated via settings updateValues', async () => {
// This simulates what happens when settings are saved through the UI
const settingsCallback = (configPersistence as any).userSettings.register.mock.calls[0][1];
const newConfig = {
providers: [
{
...createMockProvider(1030),
authorizationMode: 'simple',
simpleAuthorization: {
allowedDomains: ['example.com'],
allowedEmails: [],
allowedUserIds: [],
},
},
],
defaultAllowedOrigins: [],
};
// Spy on clearCache
const clearCacheSpy = vi.spyOn(clientConfigService, 'clearCache');
// Mock validation
const validationService = (configPersistence as any).validationService;
vi.spyOn(validationService, 'validateProvider').mockResolvedValue({
isValid: true,
});
// Call the updateValues function (simulating saving settings from UI)
await settingsCallback.updateValues(newConfig);
// Verify cache was cleared (called without arguments to clear all)
expect(clearCacheSpy).toHaveBeenCalledWith();
});
it('should NOT require API restart after updating provider issuer', async () => {
// This test confirms that the fix eliminates the need for API restart
const settingsCallback = (configPersistence as any).userSettings.register.mock.calls[0][1];
const newConfig = {
providers: [createMockProvider(1030)],
defaultAllowedOrigins: [],
};
// Mock validation
const validationService = (configPersistence as any).validationService;
vi.spyOn(validationService, 'validateProvider').mockResolvedValue({
isValid: true,
});
// Update settings
const result = await settingsCallback.updateValues(newConfig);
// Verify that restartRequired is false
expect(result.restartRequired).toBe(false);
});
});
describe('Provider validation on save', () => {
it('should validate providers and include warnings but still save', async () => {
const settingsCallback = (configPersistence as any).userSettings.register.mock.calls[0][1];
const newConfig = {
providers: [
createMockProvider(1030),
{ ...createMockProvider(1031), id: 'invalid-provider', name: 'Invalid Provider' },
],
defaultAllowedOrigins: [],
};
// Mock validation - first provider valid, second invalid
const validationService = (configPersistence as any).validationService;
vi.spyOn(validationService, 'validateProvider')
.mockResolvedValueOnce({ isValid: true })
.mockResolvedValueOnce({
isValid: false,
error: 'Discovery failed: Unable to reach issuer',
});
// Update settings
const result = await settingsCallback.updateValues(newConfig);
// Should save successfully but include warnings
expect(result.restartRequired).toBe(false);
expect(result.warnings).toBeDefined();
expect(result.warnings).toContain(
'❌ Invalid Provider: Discovery failed: Unable to reach issuer'
);
expect(result.values.providers).toHaveLength(2);
// Cache should still be cleared even with validation warnings
const clearCacheSpy = vi.spyOn(clientConfigService, 'clearCache');
await settingsCallback.updateValues(newConfig);
expect(clearCacheSpy).toHaveBeenCalled();
});
});
});

View File

@@ -1,4 +1,4 @@
import { forwardRef, Inject, Injectable, Optional } from '@nestjs/common';
import { Injectable } from '@nestjs/common';
import { ConfigService } from '@nestjs/config';
import { RuleEffect } from '@jsonforms/core';
@@ -6,7 +6,6 @@ import { mergeSettingSlices } from '@unraid/shared/jsonforms/settings.js';
import { ConfigFilePersister } from '@unraid/shared/services/config-file.js';
import { UserSettingsService } from '@unraid/shared/services/user-settings.js';
import { OidcClientConfigService } from '@app/unraid-api/graph/resolvers/sso/client/oidc-client-config.service.js';
import { OidcValidationService } from '@app/unraid-api/graph/resolvers/sso/core/oidc-validation.service.js';
import {
AuthorizationOperator,
@@ -31,10 +30,7 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
constructor(
configService: ConfigService,
private readonly userSettings: UserSettingsService,
private readonly validationService: OidcValidationService,
@Optional()
@Inject(forwardRef(() => OidcClientConfigService))
private readonly clientConfigService?: OidcClientConfigService
private readonly validationService: OidcValidationService
) {
super(configService);
this.registerSettings();
@@ -256,15 +252,6 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
this.configService.set(this.configKey(), newConfig);
await this.persist(newConfig);
// Clear the OIDC client configuration cache when a provider is updated
// This ensures the new issuer/endpoints are used immediately
if (this.clientConfigService) {
this.clientConfigService.clearCache(cleanedProvider.id);
this.logger.debug(
`Cleared OIDC client configuration cache for provider ${cleanedProvider.id}`
);
}
return cleanedProvider;
}
@@ -341,12 +328,6 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
this.configService.set(this.configKey(), newConfig);
await this.persist(newConfig);
// Clear the cache for the deleted provider
if (this.clientConfigService) {
this.clientConfigService.clearCache(id);
this.logger.debug(`Cleared OIDC client configuration cache for deleted provider ${id}`);
}
return true;
}
@@ -459,13 +440,6 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
this.configService.set(this.configKey(), processedConfig);
await this.persist(processedConfig);
// Clear the OIDC client configuration cache to ensure fresh discovery
// This fixes the issue where changing issuer URLs requires API restart
if (this.clientConfigService) {
this.clientConfigService.clearCache();
this.logger.debug('Cleared OIDC client configuration cache after provider update');
}
// Include validation results in response
const response: { restartRequired: boolean; values: OidcConfig; warnings?: string[] } = {
restartRequired: false,

View File

@@ -1,149 +0,0 @@
import { Logger } from '@nestjs/common';
import { beforeEach, describe, expect, it, vi } from 'vitest';
import * as getUnraidVersionModule from '@app/common/dashboard/get-unraid-version.js';
import { FileModification } from '@app/unraid-api/unraid-file-modifier/file-modification.js';
vi.mock('@app/common/dashboard/get-unraid-version.js');
class TestFileModification extends FileModification {
id = 'test';
filePath = '/test/file';
protected async generatePatch(): Promise<string> {
return 'test patch';
}
}
describe('FileModification', () => {
let modification: TestFileModification;
let getUnraidVersionMock: any;
beforeEach(() => {
vi.clearAllMocks();
const logger = new Logger('TestFileModification');
modification = new TestFileModification(logger);
getUnraidVersionMock = vi.mocked(getUnraidVersionModule.getUnraidVersion);
});
describe('version comparison methods', () => {
describe('isUnraidVersionGreaterThanOrEqualTo', () => {
it('should return true when current version is greater', async () => {
getUnraidVersionMock.mockResolvedValue('7.3.0');
const result = await modification['isUnraidVersionGreaterThanOrEqualTo']('7.2.0');
expect(result).toBe(true);
});
it('should return true when current version is equal', async () => {
getUnraidVersionMock.mockResolvedValue('7.2.0');
const result = await modification['isUnraidVersionGreaterThanOrEqualTo']('7.2.0');
expect(result).toBe(true);
});
it('should return false when current version is less', async () => {
getUnraidVersionMock.mockResolvedValue('7.1.0');
const result = await modification['isUnraidVersionGreaterThanOrEqualTo']('7.2.0');
expect(result).toBe(false);
});
it('should handle prerelease versions correctly', async () => {
getUnraidVersionMock.mockResolvedValue('7.2.0-beta.1');
const result = await modification['isUnraidVersionGreaterThanOrEqualTo']('7.2.0-beta.1');
expect(result).toBe(true);
});
it('should treat prerelease as greater than stable when base versions are equal', async () => {
getUnraidVersionMock.mockResolvedValue('7.2.0-beta.1');
const result = await modification['isUnraidVersionGreaterThanOrEqualTo']('7.2.0', {
includePrerelease: true,
});
expect(result).toBe(true);
});
it('should compare prerelease versions correctly', async () => {
getUnraidVersionMock.mockResolvedValue('7.2.0-beta.2.4');
const result =
await modification['isUnraidVersionGreaterThanOrEqualTo']('7.2.0-beta.2.3');
expect(result).toBe(true);
});
it('should handle beta.2.3 being less than beta.2.4', async () => {
getUnraidVersionMock.mockResolvedValue('7.2.0-beta.2.3');
const result =
await modification['isUnraidVersionGreaterThanOrEqualTo']('7.2.0-beta.2.4');
expect(result).toBe(false);
});
});
describe('isUnraidVersionLessThanOrEqualTo', () => {
it('should return true when current version is less', async () => {
getUnraidVersionMock.mockResolvedValue('7.1.0');
const result = await modification['isUnraidVersionLessThanOrEqualTo']('7.2.0');
expect(result).toBe(true);
});
it('should return true when current version is equal', async () => {
getUnraidVersionMock.mockResolvedValue('7.2.0');
const result = await modification['isUnraidVersionLessThanOrEqualTo']('7.2.0');
expect(result).toBe(true);
});
it('should return false when current version is greater', async () => {
getUnraidVersionMock.mockResolvedValue('7.3.0');
const result = await modification['isUnraidVersionLessThanOrEqualTo']('7.2.0');
expect(result).toBe(false);
});
it('should handle prerelease versions correctly', async () => {
getUnraidVersionMock.mockResolvedValue('7.2.0-beta.1');
const result = await modification['isUnraidVersionLessThanOrEqualTo']('7.2.0-beta.1');
expect(result).toBe(true);
});
it('should treat prerelease as less than stable when base versions are equal', async () => {
getUnraidVersionMock.mockResolvedValue('7.2.0-beta.1');
const result = await modification['isUnraidVersionLessThanOrEqualTo']('7.2.0', {
includePrerelease: true,
});
expect(result).toBe(false);
});
it('should compare prerelease versions correctly', async () => {
getUnraidVersionMock.mockResolvedValue('7.2.0-beta.2.3');
const result = await modification['isUnraidVersionLessThanOrEqualTo']('7.2.0-beta.2.4');
expect(result).toBe(true);
});
it('should handle beta.2.3 being equal to beta.2.3', async () => {
getUnraidVersionMock.mockResolvedValue('7.2.0-beta.2.3');
const result = await modification['isUnraidVersionLessThanOrEqualTo']('7.2.0-beta.2.3');
expect(result).toBe(true);
});
it('should handle beta.2.4 being greater than beta.2.3', async () => {
getUnraidVersionMock.mockResolvedValue('7.2.0-beta.2.4');
const result = await modification['isUnraidVersionLessThanOrEqualTo']('7.2.0-beta.2.3');
expect(result).toBe(false);
});
});
describe('inverse relationship', () => {
it('should have opposite results for greater-than-or-equal and less-than-or-equal when not equal', async () => {
getUnraidVersionMock.mockResolvedValue('7.2.5');
const gte = await modification['isUnraidVersionGreaterThanOrEqualTo']('7.2.0');
const lte = await modification['isUnraidVersionLessThanOrEqualTo']('7.2.0');
expect(gte).toBe(true);
expect(lte).toBe(false);
});
it('should both return true when versions are equal', async () => {
getUnraidVersionMock.mockResolvedValue('7.2.0');
const gte = await modification['isUnraidVersionGreaterThanOrEqualTo']('7.2.0');
const lte = await modification['isUnraidVersionLessThanOrEqualTo']('7.2.0');
expect(gte).toBe(true);
expect(lte).toBe(true);
});
});
});
});

View File

@@ -5,7 +5,7 @@ import { access, readFile, unlink, writeFile } from 'fs/promises';
import { basename, dirname, join } from 'path';
import { applyPatch, createPatch, parsePatch, reversePatch } from 'diff';
import { coerce, compare, gte, lte } from 'semver';
import { coerce, compare, gte } from 'semver';
import { getUnraidVersion } from '@app/common/dashboard/get-unraid-version.js';
@@ -259,53 +259,29 @@ export abstract class FileModification {
return patch;
}
private async compareUnraidVersion(
version: string,
compareFn: typeof gte | typeof lte,
protected async isUnraidVersionGreaterThanOrEqualTo(
version: string = '7.2.0', // Defaults to the version of Unraid that includes the API by default
{ includePrerelease = true }: { includePrerelease?: boolean } = {}
): Promise<boolean> {
const unraidVersion = coerce(await getUnraidVersion(), { includePrerelease });
const comparedVersion = coerce(version, { includePrerelease });
if (!unraidVersion) {
throw new Error(`Failed to compare Unraid version - missing unraid version`);
}
if (!comparedVersion) {
throw new Error(`Failed to compare Unraid version - missing comparison version`);
}
// Special handling for prerelease versions when base versions are equal
// If includePrerelease and base versions are equal, treat prerelease as greater
if (includePrerelease) {
const baseUnraid = `${unraidVersion.major}.${unraidVersion.minor}.${unraidVersion.patch}`;
const baseCompared = `${comparedVersion.major}.${comparedVersion.minor}.${comparedVersion.patch}`;
if (baseUnraid === baseCompared) {
const unraidHasPrerelease = unraidVersion.prerelease.length > 0;
const comparedHasPrerelease = comparedVersion.prerelease.length > 0;
// If one has prerelease and the other doesn't, handle specially
if (unraidHasPrerelease && !comparedHasPrerelease) {
// For gte: prerelease is considered greater than stable
// For lte: prerelease is considered less than stable
return compareFn === gte;
// If unraidVersion has prerelease and comparedVersion does not, treat as greater
if (unraidVersion.prerelease.length && !comparedVersion.prerelease.length) {
return true;
}
}
}
return compareFn(unraidVersion, comparedVersion);
}
protected async isUnraidVersionGreaterThanOrEqualTo(
version: string = '7.2.0', // Defaults to the version of Unraid that includes the API by default
{ includePrerelease = true }: { includePrerelease?: boolean } = {}
): Promise<boolean> {
return this.compareUnraidVersion(version, gte, { includePrerelease });
}
protected async isUnraidVersionLessThanOrEqualTo(
version: string,
{ includePrerelease = true }: { includePrerelease?: boolean } = {}
): Promise<boolean> {
return this.compareUnraidVersion(version, lte, { includePrerelease });
return gte(unraidVersion, comparedVersion);
}
}

View File

@@ -1,334 +0,0 @@
Menu="UserPreferences"
Title="Display Settings"
Icon="icon-display"
Tag="desktop"
---
<?PHP
/* Copyright 2005-2025, Lime Technology
* Copyright 2012-2025, Bergware International.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License version 2,
* as published by the Free Software Foundation.
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*/
?>
<?
$void = "<img src='/webGui/images/banner.png' id='image' width='330' height='30' onclick='$(&quot;#drop&quot;).click()' style='cursor:pointer' title='_(Click to select PNG file)_'>";
$icon = "<i class='fa fa-trash top' title='_(Restore default image)_' onclick='restore()'></i>";
$plugins = '/var/log/plugins';
require_once "$docroot/plugins/dynamix.plugin.manager/include/PluginHelpers.php";
?>
<script src="<?autov('/webGui/javascript/jquery.filedrop.js')?>"></script>
<script>
var path = '/boot/config/plugins/dynamix';
var filename = '';
var locale = "<?=$locale?>";
function restore() {
// restore original image and activate APPLY button
$('#dropbox').html("<?=$void?>");
$('select[name="banner"]').trigger('change');
filename = 'reset';
}
function upload(lang) {
// save or delete upload when APPLY is pressed
if (filename=='reset') {
$.post("/webGui/include/FileUpload.php",{cmd:'delete',path:path,filename:'banner.png'});
} else if (filename) {
$.post("/webGui/include/FileUpload.php",{cmd:'save',path:path,filename:filename,output:'banner.png'});
}
// reset dashboard tiles when switching language
if (lang != locale) {
$.removeCookie('db-box1');
$.removeCookie('db-box2');
$.removeCookie('db-box3');
$.removeCookie('inactive_content');
$.removeCookie('hidden_content');
}
}
function presetBanner(form) {
if (form.banner.selectedIndex == 0) $('.js-bannerSettings').hide(); else $('.js-bannerSettings').show();
}
function presetRefresh(form) {
for (var i=0,item; item=form.refresh.options[i]; i++) item.value *= -1;
}
function presetPassive(index) {
if (index==0) $('#passive').hide(); else $('#passive').show();
}
function updateDirection(lang) {
// var rtl = ['ar_AR','fa_FA'].includes(lang) ? "dir='rtl' " : "";
// RTL display is not giving the desired results, we keep LTR
var rtl = "";
$('input[name="rtl"]').val(rtl);
}
$(function() {
var dropbox = $('#dropbox');
// attach the drag-n-drop feature to the 'dropbox' element
dropbox.filedrop({
maxfiles:1,
maxfilesize:512, // KB
data: {"csrf_token": "<?=$var['csrf_token']?>"},
url:'/webGui/include/FileUpload.php',
beforeEach:function(file) {
if (!file.type.match(/^image\/.*/)) {
swal({title:"_(Warning)_",text:"_(Only PNG images are allowed)_!",type:"warning",html:true,confirmButtonText:"_(Ok)_"});
return false;
}
},
error: function(err, file, i) {
switch (err) {
case 'BrowserNotSupported':
swal({title:"_(Browser error)_",text:"_(Your browser does not support HTML5 file uploads)_!",type:"error",html:true,confirmButtonText:"_(Ok)_"});
break;
case 'TooManyFiles':
swal({title:"_(Too many files)_",text:"_(Please select one file only)_!",html:true,type:"error"});
break;
case 'FileTooLarge':
swal({title:"_(File too large)_",text:"_(Maximum file upload size is 512K)_ (524,288 _(bytes)_)",type:"error",html:true,confirmButtonText:"_(Ok)_"});
break;
}
},
uploadStarted:function(i,file,count) {
var image = $('img', $(dropbox));
var reader = new FileReader();
image.width = 330;
image.height = 30;
reader.onload = function(e){image.attr('src',e.target.result);};
reader.readAsDataURL(file);
},
uploadFinished:function(i,file,response) {
if (response == 'OK 200') {
if (!filename || filename=='reset') $(dropbox).append("<?=$icon?>");
$('select[name="banner"]').trigger('change');
filename = file.name;
} else {
swal({title:"_(Upload error)_",text:response,type:"error",html:true,confirmButtonText:"_(Ok)_"});
}
}
});
// simulate a drop action when manual file selection is done
$('#drop').bind('change', function(e) {
var files = e.target.files;
if ($('#dropbox').triggerHandler({type:'drop',dataTransfer:{files:files}})==false) e.stopImmediatePropagation();
});
presetBanner(document.display_settings);
});
</script>
:display_settings_help:
<form markdown="1" name="display_settings" method="POST" action="/update.php" target="progressFrame" onsubmit="upload(this.locale.value)">
<input type="hidden" name="#file" value="dynamix/dynamix.cfg">
<input type="hidden" name="#section" value="display">
<input type="hidden" name="rtl" value="<?=$display['rtl']?>">
_(Display width)_:
: <select name="width">
<?=mk_option($display['width'], "",_('Boxed'))?>
<?=mk_option($display['width'], "1",_('Unlimited'))?>
</select>
:display_width_help:
_(Language)_:
: <select name="locale" class="fixed" onchange="updateDirection(this.value)">
<?echo mk_option($display['locale'], "","English");
foreach (glob("$plugins/lang-*.xml",GLOB_NOSORT) as $xml_file) {
$lang = language('Language', $xml_file);
$home = language('LanguageLocal', $xml_file);
$name = language('LanguagePack', $xml_file);
echo mk_option($display['locale'], $name, "$home ($lang)");
}
?></select>
_(Font size)_:
: <select name="font" id='font'>
<?=mk_option($display['font'], "50",_('Very small'))?>
<?=mk_option($display['font'], "56.25",_('Small'))?>
<?=mk_option($display['font'], "",_('Normal'))?>
<?=mk_option($display['font'], "68.75",_('Large'))?>
<?=mk_option($display['font'], "75",_('Very large'))?>
<?=mk_option($display['font'], "80",_('Huge'))?>
</select>
:display_font_size_help:
_(Terminal font size)_:
: <select name="tty" id="tty">
<?=mk_option($display['tty'], "11",_('Very small'))?>
<?=mk_option($display['tty'], "13",_('Small'))?>
<?=mk_option($display['tty'], "15",_('Normal'))?>
<?=mk_option($display['tty'], "17",_('Large'))?>
<?=mk_option($display['tty'], "19",_('Very large'))?>
<?=mk_option($display['tty'], "21",_('Huge'))?>
</select>
:display_tty_size_help:
_(Number format)_:
: <select name="number">
<?=mk_option($display['number'], ".,",_('[D] dot : [G] comma'))?>
<?=mk_option($display['number'], ". ",_('[D] dot : [G] space'))?>
<?=mk_option($display['number'], ".",_('[D] dot : [G] none'))?>
<?=mk_option($display['number'], ",.",_('[D] comma : [G] dot'))?>
<?=mk_option($display['number'], ", ",_('[D] comma : [G] space'))?>
<?=mk_option($display['number'], ",",_('[D] comma : [G] none'))?>
</select>
_(Number scaling)_:
: <select name="scale">
<?=mk_option($display['scale'], "-1",_('Automatic'))?>
<?=mk_option($display['scale'], "0",_('Disabled'))?>
<?=mk_option($display['scale'], "1",_('KB'))?>
<?=mk_option($display['scale'], "2",_('MB'))?>
<?=mk_option($display['scale'], "3",_('GB'))?>
<?=mk_option($display['scale'], "4",_('TB'))?>
<?=mk_option($display['scale'], "5",_('PB'))?>
</select>
_(Page view)_:
: <select name="tabs">
<?=mk_option($display['tabs'], "0",_('Tabbed'))?>
<?=mk_option($display['tabs'], "1",_('Non-tabbed'))?>
</select>
:display_page_view_help:
_(Placement of Users menu)_:
: <select name="users">
<?=mk_option($display['users'], "Tasks:3",_('Header menu'))?>
<?=mk_option($display['users'], "UserPreferences",_('Settings menu'))?>
</select>
:display_users_menu_help:
_(Listing height)_:
: <select name="resize">
<?=mk_option($display['resize'], "0",_('Automatic'))?>
<?=mk_option($display['resize'], "1",_('Fixed'))?>
</select>
:display_listing_height_help:
_(Display device name)_:
: <select name="raw">
<?=mk_option($display['raw'], "",_('Normalized'))?>
<?=mk_option($display['raw'], "1",_('Raw'))?>
</select>
_(Display world-wide-name in device ID)_:
: <select name="wwn">
<?=mk_option($display['wwn'], "0",_('Disabled'))?>
<?=mk_option($display['wwn'], "1",_('Automatic'))?>
</select>
:display_wwn_device_id_help:
_(Display array totals)_:
: <select name="total">
<?=mk_option($display['total'], "0",_('No'))?>
<?=mk_option($display['total'], "1",_('Yes'))?>
</select>
_(Show array utilization indicator)_:
: <select name="usage">
<?=mk_option($display['usage'], "0",_('No'))?>
<?=mk_option($display['usage'], "1",_('Yes'))?>
</select>
_(Temperature unit)_:
: <select name="unit">
<?=mk_option($display['unit'], "C",_('Celsius'))?>
<?=mk_option($display['unit'], "F",_('Fahrenheit'))?>
</select>
:display_temperature_unit_help:
_(Dynamix color theme)_:
: <select name="theme">
<?foreach (glob("$docroot/webGui/styles/themes/*.css") as $themes):?>
<?$theme = basename($themes, '.css');?>
<?=mk_option($display['theme'], $theme, _(ucfirst($theme)))?>
<?endforeach;?>
</select>
_(Used / Free columns)_:
: <select name="text">
<?=mk_option($display['text'], "0",_('Text'))?>
<?=mk_option($display['text'], "1",_('Bar (gray)'))?>
<?=mk_option($display['text'], "2",_('Bar (color)'))?>
<?=mk_option($display['text'], "10",_('Text - Bar (gray)'))?>
<?=mk_option($display['text'], "20",_('Text - Bar (color)'))?>
<?=mk_option($display['text'], "11",_('Bar (gray) - Text'))?>
<?=mk_option($display['text'], "21",_('Bar (color) - Text'))?>
</select>
_(Header custom text color)_:
: <input type="text" class="narrow" name="header" value="<?=$display['header']?>" maxlength="6" pattern="([0-9a-fA-F]{3}){1,2}" title="_(HTML color code of 3 or 6 hexadecimal digits)_">
:display_custom_text_color_help:
_(Header custom secondary text color)_:
: <input type="text" class="narrow" name="headermetacolor" value="<?=$display['headermetacolor']?>" maxlength="6" pattern="([0-9a-fA-F]{3}){1,2}" title="_(HTML color code of 3 or 6 hexadecimal digits)_">
_(Header custom background color)_:
: <input type="text" class="narrow" name="background" value="<?=$display['background']?>" maxlength="6" pattern="([0-9a-fA-F]{3}){1,2}" title="_(HTML color code of 3 or 6 hexadecimal digits)_">
:display_custom_background_color_help:
_(Header show description)_:
: <select name="headerdescription">
<?=mk_option($display['headerdescription'], "yes",_('Yes'))?>
<?=mk_option($display['headerdescription'], "no",_('No'))?>
</select>
_(Show banner)_:
: <select name="banner" onchange="presetBanner(this.form)">
<?=mk_option($display['banner'], "",_('No'))?>
<?=mk_option($display['banner'], "image",_('Yes'))?>
</select>
<div class="js-bannerSettings" markdown="1" style="display:none">
_(Custom banner)_:
<input type="hidden" name="#custom" value="">
: <span id="dropbox">
<?if (file_exists($banner)):?>
<img src="<?=autov($banner)?>" width="330" height="30" onclick="$('#drop').click()" style="cursor:pointer" title="_(Click to select PNG file)_"><?=$icon?>
<?else:?>
<?=$void?>
<?endif;?>
</span><em>_(Drag-n-drop a PNG file or click the image at the left)_.</em><input type="file" id="drop" accept="image/*" style="display:none">
:display_custom_banner_help:
</div>
<div class="js-bannerSettings" markdown="1" style="display:none">
_(Show banner background color fade)_:
: <select name="showBannerGradient">
<?=mk_option($display['showBannerGradient'], "no",_('No'))?>
<?=mk_option($display['showBannerGradient'], "yes",_('Yes'))?>
</select>
</div>
_(Favorites enabled)_:
: <select name="favorites">
<?=mk_option($display['favorites'], "yes",_('Yes'))?>
<?=mk_option($display['favorites'], "no",_('No'))?>
</select>
:display_favorites_enabled_help:
_(Allow realtime updates on inactive browsers)_:
: <select name='liveUpdate'>
<?=mk_option($display['liveUpdate'],"no",_('No'))?>
<?=mk_option($display['liveUpdate'],"yes",_('Yes'))?>
</select>
<input type="submit" name="#default" value="_(Default)_" onclick="filename='reset'">
: <input type="submit" name="#apply" value="_(Apply)_" disabled><input type="button" value="_(Done)_" onclick="done()">
</form>

View File

@@ -8,7 +8,6 @@ import { describe, expect, test, vi } from 'vitest';
import { FileModification } from '@app/unraid-api/unraid-file-modifier/file-modification.js';
import AuthRequestModification from '@app/unraid-api/unraid-file-modifier/modifications/auth-request.modification.js';
import DefaultPageLayoutModification from '@app/unraid-api/unraid-file-modifier/modifications/default-page-layout.modification.js';
import DisplaySettingsModification from '@app/unraid-api/unraid-file-modifier/modifications/display-settings.modification.js';
import NotificationsPageModification from '@app/unraid-api/unraid-file-modifier/modifications/notifications-page.modification.js';
import RcNginxModification from '@app/unraid-api/unraid-file-modifier/modifications/rc-nginx.modification.js';
import SSOFileModification from '@app/unraid-api/unraid-file-modifier/modifications/sso.modification.js';
@@ -36,12 +35,6 @@ const patchTestCases: ModificationTestCase[] = [
'https://raw.githubusercontent.com/unraid/webgui/refs/heads/7.1/emhttp/plugins/dynamix/Notifications.page',
fileName: 'Notifications.page',
},
{
ModificationClass: DisplaySettingsModification,
fileUrl:
'https://raw.githubusercontent.com/unraid/webgui/refs/heads/7.1/emhttp/plugins/dynamix/DisplaySettings.page',
fileName: 'DisplaySettings.page',
},
{
ModificationClass: SSOFileModification,
fileUrl:

View File

@@ -1,334 +0,0 @@
Menu="UserPreferences"
Title="Display Settings"
Icon="icon-display"
Tag="desktop"
---
<?PHP
/* Copyright 2005-2025, Lime Technology
* Copyright 2012-2025, Bergware International.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License version 2,
* as published by the Free Software Foundation.
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*/
?>
<?
$void = "<img src='/webGui/images/banner.png' id='image' width='330' height='30' onclick='$(&quot;#drop&quot;).click()' style='cursor:pointer' title='_(Click to select PNG file)_'>";
$icon = "<i class='fa fa-trash top' title='_(Restore default image)_' onclick='restore()'></i>";
$plugins = '/var/log/plugins';
require_once "$docroot/plugins/dynamix.plugin.manager/include/PluginHelpers.php";
?>
<script src="<?autov('/webGui/javascript/jquery.filedrop.js')?>"></script>
<script>
var path = '/boot/config/plugins/dynamix';
var filename = '';
var locale = "<?=$locale?>";
function restore() {
// restore original image and activate APPLY button
$('#dropbox').html("<?=$void?>");
$('select[name="banner"]').trigger('change');
filename = 'reset';
}
function upload(lang) {
// save or delete upload when APPLY is pressed
if (filename=='reset') {
$.post("/webGui/include/FileUpload.php",{cmd:'delete',path:path,filename:'banner.png'});
} else if (filename) {
$.post("/webGui/include/FileUpload.php",{cmd:'save',path:path,filename:filename,output:'banner.png'});
}
// reset dashboard tiles when switching language
if (lang != locale) {
$.removeCookie('db-box1');
$.removeCookie('db-box2');
$.removeCookie('db-box3');
$.removeCookie('inactive_content');
$.removeCookie('hidden_content');
}
}
function presetBanner(form) {
if (form.banner.selectedIndex == 0) $('.js-bannerSettings').hide(); else $('.js-bannerSettings').show();
}
function presetRefresh(form) {
for (var i=0,item; item=form.refresh.options[i]; i++) item.value *= -1;
}
function presetPassive(index) {
if (index==0) $('#passive').hide(); else $('#passive').show();
}
function updateDirection(lang) {
// var rtl = ['ar_AR','fa_FA'].includes(lang) ? "dir='rtl' " : "";
// RTL display is not giving the desired results, we keep LTR
var rtl = "";
$('input[name="rtl"]').val(rtl);
}
$(function() {
var dropbox = $('#dropbox');
// attach the drag-n-drop feature to the 'dropbox' element
dropbox.filedrop({
maxfiles:1,
maxfilesize:512, // KB
data: {"csrf_token": "<?=$var['csrf_token']?>"},
url:'/webGui/include/FileUpload.php',
beforeEach:function(file) {
if (!file.type.match(/^image\/.*/)) {
swal({title:"_(Warning)_",text:"_(Only PNG images are allowed)_!",type:"warning",html:true,confirmButtonText:"_(Ok)_"});
return false;
}
},
error: function(err, file, i) {
switch (err) {
case 'BrowserNotSupported':
swal({title:"_(Browser error)_",text:"_(Your browser does not support HTML5 file uploads)_!",type:"error",html:true,confirmButtonText:"_(Ok)_"});
break;
case 'TooManyFiles':
swal({title:"_(Too many files)_",text:"_(Please select one file only)_!",html:true,type:"error"});
break;
case 'FileTooLarge':
swal({title:"_(File too large)_",text:"_(Maximum file upload size is 512K)_ (524,288 _(bytes)_)",type:"error",html:true,confirmButtonText:"_(Ok)_"});
break;
}
},
uploadStarted:function(i,file,count) {
var image = $('img', $(dropbox));
var reader = new FileReader();
image.width = 330;
image.height = 30;
reader.onload = function(e){image.attr('src',e.target.result);};
reader.readAsDataURL(file);
},
uploadFinished:function(i,file,response) {
if (response == 'OK 200') {
if (!filename || filename=='reset') $(dropbox).append("<?=$icon?>");
$('select[name="banner"]').trigger('change');
filename = file.name;
} else {
swal({title:"_(Upload error)_",text:response,type:"error",html:true,confirmButtonText:"_(Ok)_"});
}
}
});
// simulate a drop action when manual file selection is done
$('#drop').bind('change', function(e) {
var files = e.target.files;
if ($('#dropbox').triggerHandler({type:'drop',dataTransfer:{files:files}})==false) e.stopImmediatePropagation();
});
presetBanner(document.display_settings);
});
</script>
:display_settings_help:
<form markdown="1" name="display_settings" method="POST" action="/update.php" target="progressFrame" onsubmit="upload(this.locale.value)">
<input type="hidden" name="#file" value="dynamix/dynamix.cfg">
<input type="hidden" name="#section" value="display">
<input type="hidden" name="rtl" value="<?=$display['rtl']?>">
_(Display width)_:
: <select name="width">
<?=mk_option($display['width'], "",_('Boxed'))?>
<?=mk_option($display['width'], "1",_('Unlimited'))?>
</select>
:display_width_help:
_(Language)_:
: <select name="locale" onchange="updateDirection(this.value)">
<?echo mk_option($display['locale'], "","English");
foreach (glob("$plugins/lang-*.xml",GLOB_NOSORT) as $xml_file) {
$lang = language('Language', $xml_file);
$home = language('LanguageLocal', $xml_file);
$name = language('LanguagePack', $xml_file);
echo mk_option($display['locale'], $name, "$home ($lang)");
}
?></select>
_(Font size)_:
: <select name="font" id='font'>
<?=mk_option($display['font'], "50",_('Very small'))?>
<?=mk_option($display['font'], "56.25",_('Small'))?>
<?=mk_option($display['font'], "",_('Normal'))?>
<?=mk_option($display['font'], "68.75",_('Large'))?>
<?=mk_option($display['font'], "75",_('Very large'))?>
<?=mk_option($display['font'], "80",_('Huge'))?>
</select>
:display_font_size_help:
_(Terminal font size)_:
: <select name="tty" id="tty">
<?=mk_option($display['tty'], "11",_('Very small'))?>
<?=mk_option($display['tty'], "13",_('Small'))?>
<?=mk_option($display['tty'], "15",_('Normal'))?>
<?=mk_option($display['tty'], "17",_('Large'))?>
<?=mk_option($display['tty'], "19",_('Very large'))?>
<?=mk_option($display['tty'], "21",_('Huge'))?>
</select>
:display_tty_size_help:
_(Number format)_:
: <select name="number">
<?=mk_option($display['number'], ".,",_('[D] dot : [G] comma'))?>
<?=mk_option($display['number'], ". ",_('[D] dot : [G] space'))?>
<?=mk_option($display['number'], ".",_('[D] dot : [G] none'))?>
<?=mk_option($display['number'], ",.",_('[D] comma : [G] dot'))?>
<?=mk_option($display['number'], ", ",_('[D] comma : [G] space'))?>
<?=mk_option($display['number'], ",",_('[D] comma : [G] none'))?>
</select>
_(Number scaling)_:
: <select name="scale">
<?=mk_option($display['scale'], "-1",_('Automatic'))?>
<?=mk_option($display['scale'], "0",_('Disabled'))?>
<?=mk_option($display['scale'], "1",_('KB'))?>
<?=mk_option($display['scale'], "2",_('MB'))?>
<?=mk_option($display['scale'], "3",_('GB'))?>
<?=mk_option($display['scale'], "4",_('TB'))?>
<?=mk_option($display['scale'], "5",_('PB'))?>
</select>
_(Page view)_:
: <select name="tabs">
<?=mk_option($display['tabs'], "0",_('Tabbed'))?>
<?=mk_option($display['tabs'], "1",_('Non-tabbed'))?>
</select>
:display_page_view_help:
_(Placement of Users menu)_:
: <select name="users">
<?=mk_option($display['users'], "Tasks:3",_('Header menu'))?>
<?=mk_option($display['users'], "UserPreferences",_('Settings menu'))?>
</select>
:display_users_menu_help:
_(Listing height)_:
: <select name="resize">
<?=mk_option($display['resize'], "0",_('Automatic'))?>
<?=mk_option($display['resize'], "1",_('Fixed'))?>
</select>
:display_listing_height_help:
_(Display device name)_:
: <select name="raw">
<?=mk_option($display['raw'], "",_('Normalized'))?>
<?=mk_option($display['raw'], "1",_('Raw'))?>
</select>
_(Display world-wide-name in device ID)_:
: <select name="wwn">
<?=mk_option($display['wwn'], "0",_('Disabled'))?>
<?=mk_option($display['wwn'], "1",_('Automatic'))?>
</select>
:display_wwn_device_id_help:
_(Display array totals)_:
: <select name="total">
<?=mk_option($display['total'], "0",_('No'))?>
<?=mk_option($display['total'], "1",_('Yes'))?>
</select>
_(Show array utilization indicator)_:
: <select name="usage">
<?=mk_option($display['usage'], "0",_('No'))?>
<?=mk_option($display['usage'], "1",_('Yes'))?>
</select>
_(Temperature unit)_:
: <select name="unit">
<?=mk_option($display['unit'], "C",_('Celsius'))?>
<?=mk_option($display['unit'], "F",_('Fahrenheit'))?>
</select>
:display_temperature_unit_help:
_(Dynamix color theme)_:
: <select name="theme">
<?foreach (glob("$docroot/webGui/styles/themes/*.css") as $themes):?>
<?$theme = basename($themes, '.css');?>
<?=mk_option($display['theme'], $theme, _(ucfirst($theme)))?>
<?endforeach;?>
</select>
_(Used / Free columns)_:
: <select name="text">
<?=mk_option($display['text'], "0",_('Text'))?>
<?=mk_option($display['text'], "1",_('Bar (gray)'))?>
<?=mk_option($display['text'], "2",_('Bar (color)'))?>
<?=mk_option($display['text'], "10",_('Text - Bar (gray)'))?>
<?=mk_option($display['text'], "20",_('Text - Bar (color)'))?>
<?=mk_option($display['text'], "11",_('Bar (gray) - Text'))?>
<?=mk_option($display['text'], "21",_('Bar (color) - Text'))?>
</select>
_(Header custom text color)_:
: <input type="text" class="narrow" name="header" value="<?=$display['header']?>" maxlength="6" pattern="([0-9a-fA-F]{3}){1,2}" title="_(HTML color code of 3 or 6 hexadecimal digits)_">
:display_custom_text_color_help:
_(Header custom secondary text color)_:
: <input type="text" class="narrow" name="headermetacolor" value="<?=$display['headermetacolor']?>" maxlength="6" pattern="([0-9a-fA-F]{3}){1,2}" title="_(HTML color code of 3 or 6 hexadecimal digits)_">
_(Header custom background color)_:
: <input type="text" class="narrow" name="background" value="<?=$display['background']?>" maxlength="6" pattern="([0-9a-fA-F]{3}){1,2}" title="_(HTML color code of 3 or 6 hexadecimal digits)_">
:display_custom_background_color_help:
_(Header show description)_:
: <select name="headerdescription">
<?=mk_option($display['headerdescription'], "yes",_('Yes'))?>
<?=mk_option($display['headerdescription'], "no",_('No'))?>
</select>
_(Show banner)_:
: <select name="banner" onchange="presetBanner(this.form)">
<?=mk_option($display['banner'], "",_('No'))?>
<?=mk_option($display['banner'], "image",_('Yes'))?>
</select>
<div class="js-bannerSettings" markdown="1" style="display:none">
_(Custom banner)_:
<input type="hidden" name="#custom" value="">
: <span id="dropbox">
<?if (file_exists($banner)):?>
<img src="<?=autov($banner)?>" width="330" height="30" onclick="$('#drop').click()" style="cursor:pointer" title="_(Click to select PNG file)_"><?=$icon?>
<?else:?>
<?=$void?>
<?endif;?>
</span><em>_(Drag-n-drop a PNG file or click the image at the left)_.</em><input type="file" id="drop" accept="image/*" style="display:none">
:display_custom_banner_help:
</div>
<div class="js-bannerSettings" markdown="1" style="display:none">
_(Show banner background color fade)_:
: <select name="showBannerGradient">
<?=mk_option($display['showBannerGradient'], "no",_('No'))?>
<?=mk_option($display['showBannerGradient'], "yes",_('Yes'))?>
</select>
</div>
_(Favorites enabled)_:
: <select name="favorites">
<?=mk_option($display['favorites'], "yes",_('Yes'))?>
<?=mk_option($display['favorites'], "no",_('No'))?>
</select>
:display_favorites_enabled_help:
_(Allow realtime updates on inactive browsers)_:
: <select name='liveUpdate'>
<?=mk_option($display['liveUpdate'],"no",_('No'))?>
<?=mk_option($display['liveUpdate'],"yes",_('Yes'))?>
</select>
<input type="submit" name="#default" value="_(Default)_" onclick="filename='reset'">
: <input type="submit" name="#apply" value="_(Apply)_" disabled><input type="button" value="_(Done)_" onclick="done()">
</form>

View File

@@ -14,13 +14,13 @@ export default class AuthRequestModification extends FileModification {
id: string = 'auth-request';
/**
* Get the list of .js and .css files in the given directory
* @param dir - The directory to search for .js and .css files
* @returns The list of .js and .css files in the given directory
* Get the list of .js files in the given directory
* @param dir - The directory to search for .js files
* @returns The list of .js files in the given directory
*/
private getAssetFiles = async (dir: string) => {
private getJsFiles = async (dir: string) => {
const { glob } = await import('glob');
const files = await glob(join(dir, '**/*.{js,css}'));
const files = await glob(join(dir, '**/*.js'));
const baseDir = '/usr/local/emhttp';
return files.map((file) => (file.startsWith(baseDir) ? file.slice(baseDir.length) : file));
};
@@ -33,30 +33,6 @@ export default class AuthRequestModification extends FileModification {
return null;
}
/**
* Check if this modification should be applied based on Unraid version
* Only apply for Unraid versions up to 7.2.0-beta.2.3
*/
async shouldApply(): Promise<ShouldApplyWithReason> {
// Apply for versions up to and including 7.2.0-beta.2.3
const maxVersion = '7.2.0-beta.2.3';
const isCompatibleVersion = await this.isUnraidVersionLessThanOrEqualTo(maxVersion, {
includePrerelease: true,
});
if (!isCompatibleVersion) {
return {
shouldApply: false,
reason: `Auth request modification only applies to Unraid versions up to ${maxVersion}`,
};
}
return {
shouldApply: true,
reason: `Auth request modification needed for Unraid version <= ${maxVersion}`,
};
}
/**
* Generate a patch for the auth-request.php file
* @param overridePath - The path to override the default file path
@@ -64,12 +40,10 @@ export default class AuthRequestModification extends FileModification {
*/
protected async generatePatch(overridePath?: string): Promise<string> {
const { getters } = await import('@app/store/index.js');
const assetFiles = await this.getAssetFiles(this.webComponentsDirectory);
this.logger.debug(
`Found ${assetFiles.length} asset files (.js and .css) in ${this.webComponentsDirectory}`
);
const jsFiles = await this.getJsFiles(this.webComponentsDirectory);
this.logger.debug(`Found ${jsFiles.length} .js files in ${this.webComponentsDirectory}`);
const filesToAdd = [getters.paths().webgui.logo.assetPath, ...assetFiles];
const filesToAdd = [getters.paths().webgui.logo.assetPath, ...jsFiles];
if (!(await fileExists(this.filePath))) {
throw new Error(`File ${this.filePath} not found.`);

View File

@@ -1,37 +0,0 @@
import { readFile } from 'node:fs/promises';
import { FileModification } from '@app/unraid-api/unraid-file-modifier/file-modification.js';
export default class DisplaySettingsModification extends FileModification {
id: string = 'display-settings';
public readonly filePath: string = '/usr/local/emhttp/plugins/dynamix/DisplaySettings.page';
private removeFixedClassFromLanguageSelect(source: string): string {
// Find lines with locale select and remove class="fixed" from them
return source
.split('\n')
.map((line) => {
// Check if this line contains the locale select element
if (line.includes('<select name="locale"') && line.includes('class="fixed"')) {
// Remove class="fixed" from the line, handling potential spacing variations
return line.replace(/\s*class="fixed"\s*/, ' ').replace(/\s+/g, ' ');
}
return line;
})
.join('\n');
}
private applyToSource(fileContent: string): string {
const transformers = [this.removeFixedClassFromLanguageSelect.bind(this)];
return transformers.reduce((content, transformer) => transformer(content), fileContent);
}
protected async generatePatch(overridePath?: string): Promise<string> {
const fileContent = await readFile(this.filePath, 'utf-8');
const newContent = await this.applyToSource(fileContent);
return this.createPatchWithDiff(overridePath ?? this.filePath, fileContent, newContent);
}
}

View File

@@ -1,17 +0,0 @@
Index: /usr/local/emhttp/plugins/dynamix/DisplaySettings.page
===================================================================
--- /usr/local/emhttp/plugins/dynamix/DisplaySettings.page original
+++ /usr/local/emhttp/plugins/dynamix/DisplaySettings.page modified
@@ -134,11 +134,11 @@
</select>
:display_width_help:
_(Language)_:
-: <select name="locale" class="fixed" onchange="updateDirection(this.value)">
+: <select name="locale" onchange="updateDirection(this.value)">
<?echo mk_option($display['locale'], "","English");
foreach (glob("$plugins/lang-*.xml",GLOB_NOSORT) as $xml_file) {
$lang = language('Language', $xml_file);
$home = language('LanguageLocal', $xml_file);
$name = language('LanguagePack', $xml_file);

View File

@@ -1,28 +0,0 @@
import { ForbiddenException } from '@nestjs/common';
/**
* Checks if a feature flag is enabled and throws an exception if disabled.
* Use this at the beginning of resolver methods for immediate feature flag checks.
*
* @example
* ```typescript
* @ResolveField(() => String)
* async organizer() {
* checkFeatureFlag(FeatureFlags, 'ENABLE_NEXT_DOCKER_RELEASE');
* return this.dockerOrganizerService.resolveOrganizer();
* }
* ```
*
* @param flags - The feature flag object containing boolean/truthy values
* @param key - The key within the feature flag object to check
* @throws ForbiddenException if the feature flag is disabled
*/
export function checkFeatureFlag<T extends Record<string, any>>(flags: T, key: keyof T): void {
const isEnabled = Boolean(flags[key]);
if (!isEnabled) {
throw new ForbiddenException(
`Feature "${String(key)}" is currently disabled. This functionality is not available at this time.`
);
}
}

View File

@@ -1,6 +1,3 @@
import { existsSync, readFileSync } from 'node:fs';
import { basename, join } from 'node:path';
import type { ViteUserConfig } from 'vitest/config';
import { viteCommonjs } from '@originjs/vite-plugin-commonjs';
import nodeResolve from '@rollup/plugin-node-resolve';
@@ -73,29 +70,6 @@ export default defineConfig(({ mode }): ViteUserConfig => {
},
},
}),
// Copy PHP files to assets directory
{
name: 'copy-php-files',
buildStart() {
const phpFiles = ['src/core/utils/plugins/wrapper.php'];
phpFiles.forEach((file) => this.addWatchFile(file));
},
async generateBundle() {
const phpFiles = ['src/core/utils/plugins/wrapper.php'];
phpFiles.forEach((file) => {
if (!existsSync(file)) {
this.warn(`[copy-php-files] PHP file ${file} does not exist`);
return;
}
const content = readFileSync(file);
this.emitFile({
type: 'asset',
fileName: join('assets', basename(file)),
source: content,
});
});
},
},
],
define: {
// Allows vite to preserve process.env variables and not hardcode them

View File

@@ -1,15 +1,14 @@
{
"name": "unraid-monorepo",
"private": true,
"version": "4.21.0",
"version": "4.18.2",
"scripts": {
"build": "pnpm -r build",
"build:watch": "pnpm -r --parallel --filter '!@unraid/ui' build:watch",
"build:watch": " pnpm -r --parallel build:watch",
"codegen": "pnpm -r codegen",
"dev": "pnpm -r dev",
"unraid:deploy": "pnpm -r unraid:deploy",
"test": "pnpm -r test",
"test:watch": "pnpm -r --parallel test:watch",
"lint": "pnpm -r lint",
"lint:fix": "pnpm -r lint:fix",
"type-check": "pnpm -r type-check",

View File

@@ -22,7 +22,7 @@
"@nestjs/graphql": "13.1.0",
"nest-authz": "2.17.0",
"typescript": "5.9.2",
"pify": "6.1.0"
"pify": "^6.1.0"
},
"peerDependencies": {
"@nestjs/common": "11.1.6",

View File

@@ -44,7 +44,7 @@
"graphql-ws": "6.0.6",
"lodash-es": "4.17.21",
"nest-authz": "2.17.0",
"pify": "6.1.0",
"pify": "^6.1.0",
"rimraf": "6.0.1",
"type-fest": "4.41.0",
"typescript": "5.9.2",

View File

@@ -11,19 +11,6 @@ import type { Subscription } from "rxjs";
import { ConfigFileHandler } from "../util/config-file-handler.js";
import { ConfigDefinition } from "../util/config-definition.js";
export type ConfigSubscription = {
/**
* Called when the config changes.
* To prevent race conditions, a config is not provided to the callback.
*/
next?: () => Promise<void>;
/**
* Called when an error occurs within the subscriber.
*/
error?: (error: unknown) => Promise<void>;
};
/**
* Abstract base class for persisting configuration objects to JSON files.
*
@@ -57,7 +44,7 @@ export abstract class ConfigFilePersister<T extends object>
/**
* Creates a new ConfigFilePersister instance.
*
*
* @param configService The NestJS ConfigService instance for reactive config management
*/
constructor(protected readonly configService: ConfigService) {
@@ -79,18 +66,9 @@ export abstract class ConfigFilePersister<T extends object>
*/
abstract configKey(): string;
/**
* Support feature flagging or dynamic toggling of config persistence.
*
* @returns Whether the config is enabled. Defaults to true.
*/
enabled(): boolean {
return true;
}
/**
* Returns a `structuredClone` of the current config object.
*
*
* @param assertExists - Whether to throw an error if the config does not exist. Defaults to true.
* @returns The current config object, or the default config if assertExists is false & no config exists
*/
@@ -112,7 +90,7 @@ export abstract class ConfigFilePersister<T extends object>
/**
* Replaces the current config with a new one. Will trigger a persistence attempt.
*
*
* @param config - The new config object
*/
replaceConfig(config: T) {
@@ -123,7 +101,7 @@ export abstract class ConfigFilePersister<T extends object>
/**
* Returns the absolute path to the configuration file.
* Combines `PATHS_CONFIG_MODULES` environment variable with the filename.
*
*
* @throws Error if `PATHS_CONFIG_MODULES` environment variable is not set
*/
configPath(): string {
@@ -154,33 +132,35 @@ export abstract class ConfigFilePersister<T extends object>
* Loads config from disk and sets up reactive change subscription.
*/
async onModuleInit() {
if (!this.enabled()) return;
this.logger.verbose(`Config path: ${this.configPath()}`);
await this.loadOrMigrateConfig();
this.configObserver = this.subscribe({
next: async () => {
await this.persist();
},
error: async (err) => {
this.logger.error(err, "Error receiving config changes");
},
});
this.configObserver = this.configService.changes$
.pipe(bufferTime(25))
.subscribe({
next: async (changes) => {
const configChanged = changes.some(({ path }) =>
path?.startsWith(this.configKey())
);
if (configChanged) {
await this.persist();
}
},
error: (err) => {
this.logger.error("Error receiving config changes:", err);
},
});
}
/**
* Persists configuration to disk with change detection optimization.
*
*
* @param config - The config object to persist (defaults to current config from service)
* @returns `true` if persisted to disk, `false` if skipped or failed
*/
async persist(
config = this.configService.get(this.configKey())
): Promise<boolean> {
if (!this.enabled()) {
this.logger.verbose(`Config is disabled, skipping persistence`);
return false;
}
if (!config) {
this.logger.warn(`Cannot persist undefined config`);
return false;
@@ -188,38 +168,10 @@ export abstract class ConfigFilePersister<T extends object>
return await this.fileHandler.writeConfigFile(config);
}
/**
* Subscribe to config changes. Changes are buffered for 25ms to prevent race conditions.
*
* When enabled() returns false, the `next` callback will not be called.
*
* @param subscription - The subscription to add
* @returns rxjs Subscription
*/
subscribe(subscription: ConfigSubscription) {
return this.configService.changes$.pipe(bufferTime(25)).subscribe({
next: async (changes) => {
if (!subscription.next) return;
const configChanged = changes.some(({ path }) =>
path?.startsWith(this.configKey())
);
if (configChanged && this.enabled()) {
await subscription.next();
}
},
error: async (err) => {
if (subscription.error) {
await subscription.error(err);
}
},
});
}
/**
* Load or migrate configuration and set it in ConfigService.
*/
private async loadOrMigrateConfig() {
if (!this.enabled()) return;
const config = await this.fileHandler.loadConfig();
this.configService.set(this.configKey(), config);
return this.persist(config);

View File

@@ -1,295 +0,0 @@
import { describe, it, expect, vi } from 'vitest';
import { AsyncMutex } from '../processing.js';
describe('AsyncMutex', () => {
describe('constructor-based operation', () => {
it('should execute the default operation when do() is called without parameters', async () => {
const mockOperation = vi.fn().mockResolvedValue('result');
const mutex = new AsyncMutex(mockOperation);
const result = await mutex.do();
expect(result).toBe('result');
expect(mockOperation).toHaveBeenCalledTimes(1);
});
it('should return the same promise when multiple calls are made concurrently', async () => {
let resolveOperation: (value: string) => void;
const operationPromise = new Promise<string>((resolve) => {
resolveOperation = resolve;
});
const mockOperation = vi.fn().mockReturnValue(operationPromise);
const mutex = new AsyncMutex(mockOperation);
const promise1 = mutex.do();
const promise2 = mutex.do();
const promise3 = mutex.do();
expect(mockOperation).toHaveBeenCalledTimes(1);
expect(promise1).toBe(promise2);
expect(promise2).toBe(promise3);
resolveOperation!('result');
const [result1, result2, result3] = await Promise.all([promise1, promise2, promise3]);
expect(result1).toBe('result');
expect(result2).toBe('result');
expect(result3).toBe('result');
});
it('should allow new operations after the first completes', async () => {
const mockOperation = vi.fn()
.mockResolvedValueOnce('first')
.mockResolvedValueOnce('second');
const mutex = new AsyncMutex(mockOperation);
const result1 = await mutex.do();
expect(result1).toBe('first');
expect(mockOperation).toHaveBeenCalledTimes(1);
const result2 = await mutex.do();
expect(result2).toBe('second');
expect(mockOperation).toHaveBeenCalledTimes(2);
});
it('should handle errors in the default operation', async () => {
const error = new Error('Operation failed');
const mockOperation = vi.fn().mockRejectedValue(error);
const mutex = new AsyncMutex(mockOperation);
await expect(mutex.do()).rejects.toThrow(error);
expect(mockOperation).toHaveBeenCalledTimes(1);
const secondOperation = vi.fn().mockResolvedValue('success');
const mutex2 = new AsyncMutex(secondOperation);
const result = await mutex2.do();
expect(result).toBe('success');
});
});
describe('per-call operation', () => {
it('should execute the provided operation', async () => {
const mutex = new AsyncMutex<number>();
const mockOperation = vi.fn().mockResolvedValue(42);
const result = await mutex.do(mockOperation);
expect(result).toBe(42);
expect(mockOperation).toHaveBeenCalledTimes(1);
});
it('should return the same promise for concurrent calls with same operation type', async () => {
const mutex = new AsyncMutex();
let resolveOperation: (value: string) => void;
const operationPromise = new Promise<string>((resolve) => {
resolveOperation = resolve;
});
const mockOperation = vi.fn().mockReturnValue(operationPromise);
const promise1 = mutex.do(mockOperation);
const promise2 = mutex.do(mockOperation);
const promise3 = mutex.do(mockOperation);
expect(mockOperation).toHaveBeenCalledTimes(1);
expect(promise1).toBe(promise2);
expect(promise2).toBe(promise3);
resolveOperation!('shared-result');
const [result1, result2, result3] = await Promise.all([promise1, promise2, promise3]);
expect(result1).toBe('shared-result');
expect(result2).toBe('shared-result');
expect(result3).toBe('shared-result');
});
it('should allow different operations with different types', async () => {
const mutex = new AsyncMutex();
const stringOp = vi.fn().mockResolvedValue('string-result');
const numberOp = vi.fn().mockResolvedValue(123);
const stringResult = await mutex.do(stringOp);
const numberResult = await mutex.do(numberOp);
expect(stringResult).toBe('string-result');
expect(numberResult).toBe(123);
expect(stringOp).toHaveBeenCalledTimes(1);
expect(numberOp).toHaveBeenCalledTimes(1);
});
it('should handle errors in per-call operations', async () => {
const mutex = new AsyncMutex();
const error = new Error('Operation failed');
const failingOp = vi.fn().mockRejectedValue(error);
await expect(mutex.do(failingOp)).rejects.toThrow(error);
expect(failingOp).toHaveBeenCalledTimes(1);
const successOp = vi.fn().mockResolvedValue('success');
const result = await mutex.do(successOp);
expect(result).toBe('success');
expect(successOp).toHaveBeenCalledTimes(1);
});
it('should throw an error when no operation is provided and no default is set', async () => {
const mutex = new AsyncMutex();
await expect(mutex.do()).rejects.toThrow('No operation provided and no default operation set');
});
});
describe('mixed usage', () => {
it('should allow overriding default operation with per-call operation', async () => {
const defaultOp = vi.fn().mockResolvedValue('default');
const mutex = new AsyncMutex(defaultOp);
const customOp = vi.fn().mockResolvedValue('custom');
const customResult = await mutex.do(customOp);
expect(customResult).toBe('custom');
expect(customOp).toHaveBeenCalledTimes(1);
expect(defaultOp).not.toHaveBeenCalled();
const defaultResult = await mutex.do();
expect(defaultResult).toBe('default');
expect(defaultOp).toHaveBeenCalledTimes(1);
});
it('should share lock between default and custom operations', async () => {
let resolveDefault: (value: string) => void;
const defaultPromise = new Promise<string>((resolve) => {
resolveDefault = resolve;
});
const defaultOp = vi.fn().mockReturnValue(defaultPromise);
const mutex = new AsyncMutex(defaultOp);
const customOp = vi.fn().mockResolvedValue('custom');
const defaultCall = mutex.do();
const customCall = mutex.do(customOp);
expect(defaultOp).toHaveBeenCalledTimes(1);
expect(customOp).not.toHaveBeenCalled();
expect(customCall).toBe(defaultCall);
resolveDefault!('default');
const [defaultResult, customResult] = await Promise.all([defaultCall, customCall]);
expect(defaultResult).toBe('default');
expect(customResult).toBe('default');
});
});
describe('timing and concurrency', () => {
it('should handle sequential slow operations', async () => {
const mutex = new AsyncMutex();
let callCount = 0;
const slowOp = vi.fn().mockImplementation(() => {
return new Promise((resolve) => {
const currentCall = ++callCount;
setTimeout(() => resolve(`result-${currentCall}`), 100);
});
});
const result1 = await mutex.do(slowOp);
expect(result1).toBe('result-1');
const result2 = await mutex.do(slowOp);
expect(result2).toBe('result-2');
expect(slowOp).toHaveBeenCalledTimes(2);
});
it('should deduplicate concurrent slow operations', async () => {
const mutex = new AsyncMutex();
let resolveOperation: (value: string) => void;
const slowOp = vi.fn().mockImplementation(() => {
return new Promise<string>((resolve) => {
resolveOperation = resolve;
});
});
const promises = [
mutex.do(slowOp),
mutex.do(slowOp),
mutex.do(slowOp),
mutex.do(slowOp),
mutex.do(slowOp)
];
expect(slowOp).toHaveBeenCalledTimes(1);
resolveOperation!('shared-slow-result');
const results = await Promise.all(promises);
expect(results).toEqual([
'shared-slow-result',
'shared-slow-result',
'shared-slow-result',
'shared-slow-result',
'shared-slow-result'
]);
});
it('should properly clean up after operation completes', async () => {
const mutex = new AsyncMutex();
const op1 = vi.fn().mockResolvedValue('first');
const op2 = vi.fn().mockResolvedValue('second');
await mutex.do(op1);
expect(op1).toHaveBeenCalledTimes(1);
await mutex.do(op2);
expect(op2).toHaveBeenCalledTimes(1);
});
it('should handle multiple rapid sequences of operations', async () => {
const mutex = new AsyncMutex();
const results: string[] = [];
for (let i = 0; i < 5; i++) {
const op = vi.fn().mockResolvedValue(`result-${i}`);
const result = await mutex.do(op);
results.push(result as string);
}
expect(results).toEqual(['result-0', 'result-1', 'result-2', 'result-3', 'result-4']);
});
});
describe('edge cases', () => {
it('should handle operations that return undefined', async () => {
const mutex = new AsyncMutex<undefined>();
const op = vi.fn().mockResolvedValue(undefined);
const result = await mutex.do(op);
expect(result).toBeUndefined();
expect(op).toHaveBeenCalledTimes(1);
});
it('should handle operations that return null', async () => {
const mutex = new AsyncMutex<null>();
const op = vi.fn().mockResolvedValue(null);
const result = await mutex.do(op);
expect(result).toBeNull();
expect(op).toHaveBeenCalledTimes(1);
});
it('should handle nested operations correctly', async () => {
const mutex = new AsyncMutex<string>();
const innerOp = vi.fn().mockResolvedValue('inner');
const outerOp = vi.fn().mockImplementation(async () => {
return 'outer';
});
const result = await mutex.do(outerOp);
expect(result).toBe('outer');
expect(outerOp).toHaveBeenCalledTimes(1);
});
});
});

View File

@@ -31,119 +31,3 @@ export function makeSafeRunner(onError: (error: unknown) => void) {
}
};
}
type AsyncOperation<T> = () => Promise<T>;
/**
* A mutex for asynchronous operations that ensures only one operation runs at a time.
*
* When multiple callers attempt to execute operations simultaneously, they will all
* receive the same promise from the currently running operation, effectively deduplicating
* concurrent calls. This is useful for expensive operations like API calls, file operations,
* or database queries that should not be executed multiple times concurrently.
*
* @template T - The default return type for operations when using a default operation
*
* @example
* // Basic usage with explicit operations
* const mutex = new AsyncMutex();
*
* // Multiple concurrent calls will deduplicate
* const [result1, result2, result3] = await Promise.all([
* mutex.do(() => fetch('/api/data')),
* mutex.do(() => fetch('/api/data')), // Same request, will get same promise
* mutex.do(() => fetch('/api/data')) // Same request, will get same promise
* ]);
* // Only one fetch actually happens
*
* @example
* // Usage with a default operation
* const dataLoader = new AsyncMutex(() =>
* fetch('/api/expensive-data').then(res => res.json())
* );
*
* const data1 = await dataLoader.do(); // Executes the fetch
* const data2 = await dataLoader.do(); // If first promise is finished, a new fetch is executed
*/
export class AsyncMutex<T = unknown> {
private currentOperation: Promise<T> | null = null;
private defaultOperation?: AsyncOperation<T>;
/**
* Creates a new AsyncMutex instance.
*
* @param operation - Optional default operation to execute when calling `do()` without arguments.
* This is useful when you have a specific operation that should be deduplicated.
*
* @example
* // Without default operation (shared mutex)
* const mutex = new AsyncMutex();
* const promise1 = mutex.do(() => someAsyncWork());
* const promise2 = mutex.do(() => someOtherAsyncWork());
*
* // Both promises will be the same
* expect(await promise1).toBe(await promise2);
*
* // After the first operation completes, new operations can run
* await promise1;
* const newPromise = mutex.do(() => someOtherAsyncWork()); // This will execute
*
* @example
* // With default operation (deduplicating a specific operation)
* const dataMutex = new AsyncMutex(() => loadExpensiveData());
* await dataMutex.do(); // Executes loadExpensiveData()
*/
constructor(operation?: AsyncOperation<T>) {
this.defaultOperation = operation;
}
/**
* Executes the provided operation, ensuring only one runs at a time.
*
* If an operation is already running, all subsequent calls will receive
* the same promise from the currently running operation. This effectively
* deduplicates concurrent calls to the same expensive operation.
*
* @param operation - Optional operation to execute. If not provided, uses the default operation.
* @returns Promise that resolves with the result of the operation
* @throws Error if no operation is provided and no default operation was set
*
* @example
* const mutex = new AsyncMutex();
*
* // These will all return the same promise
* const promise1 = mutex.do(() => fetch('/api/data'));
* const promise2 = mutex.do(() => fetch('/api/other')); // Still gets first promise!
* const promise3 = mutex.do(() => fetch('/api/another')); // Still gets first promise!
*
* // After the first operation completes, new operations can run
* await promise1;
* const newPromise = mutex.do(() => fetch('/api/new')); // This will execute
*/
do(operation?: AsyncOperation<T>): Promise<T> {
if (this.currentOperation) {
return this.currentOperation;
}
const op = operation ?? this.defaultOperation;
if (!op) {
return Promise.reject(
new Error("No operation provided and no default operation set")
);
}
const safeOp = () => {
try {
return op();
} catch (error) {
return Promise.reject(error);
}
};
const promise = safeOp().finally(() => {
if (this.currentOperation === promise) {
this.currentOperation = null;
}
});
this.currentOperation = promise;
return promise;
}
}

View File

@@ -10,34 +10,34 @@ import { cleanupTxzFiles } from "./utils/cleanup";
import { apiDir } from "./utils/paths";
import { getVendorBundleName, getVendorFullPath } from "./build-vendor-store";
import { getAssetUrl } from "./utils/bucket-urls";
import { validateStandaloneManifest, getStandaloneManifestPath } from "./utils/manifest-validator";
// Check for manifest files in expected locations
// Recursively search for manifest files
const findManifestFiles = async (dir: string): Promise<string[]> => {
const files: string[] = [];
// Check standalone subdirectory (preferred)
try {
const standaloneDir = join(dir, "standalone");
const entries = await readdir(standaloneDir, { withFileTypes: true });
for (const entry of entries) {
if (entry.isFile() && entry.name === "standalone.manifest.json") {
files.push("standalone/standalone.manifest.json");
}
}
} catch (error) {
// Directory doesn't exist, continue checking other locations
}
// Check root directory for backwards compatibility
try {
const entries = await readdir(dir, { withFileTypes: true });
const files: string[] = [];
for (const entry of entries) {
if (entry.isFile() && entry.name === "standalone.manifest.json") {
files.push("standalone.manifest.json");
const fullPath = join(dir, entry.name);
if (entry.isDirectory()) {
try {
files.push(...(await findManifestFiles(fullPath)));
} catch (error) {
// Log and continue if a subdirectory can't be read
console.warn(`Warning: Could not read directory ${fullPath}: ${error.message}`);
}
} else if (
entry.isFile() &&
(entry.name === "manifest.json" ||
entry.name === "ui.manifest.json" ||
entry.name === "standalone.manifest.json")
) {
files.push(entry.name);
}
}
return files;
} catch (error) {
if (error.code === 'ENOENT') {
console.warn(`Directory does not exist: ${dir}`);
@@ -45,8 +45,6 @@ const findManifestFiles = async (dir: string): Promise<string[]> => {
}
throw error; // Re-throw other errors
}
return files;
};
// Function to store vendor archive information in a recoverable location
@@ -127,41 +125,24 @@ const validateSourceDir = async (validatedEnv: TxzEnv) => {
}
const manifestFiles = await findManifestFiles(webcomponentDir);
const hasStandaloneManifest = manifestFiles.some(file =>
file === "standalone.manifest.json" || file === "standalone/standalone.manifest.json"
);
const hasManifest = manifestFiles.includes("manifest.json");
const hasStandaloneManifest = manifestFiles.includes("standalone.manifest.json");
const hasUiManifest = manifestFiles.includes("ui.manifest.json");
// Only require standalone.manifest.json for new standalone apps
if (!hasStandaloneManifest) {
// Accept either manifest.json (old web components) or standalone.manifest.json (new standalone apps)
if ((!hasManifest && !hasStandaloneManifest) || !hasUiManifest) {
console.log("Existing Manifest Files:", manifestFiles);
const missingFiles: string[] = [];
if (!hasManifest && !hasStandaloneManifest) missingFiles.push("manifest.json or standalone.manifest.json");
if (!hasUiManifest) missingFiles.push("ui.manifest.json");
throw new Error(
`Webcomponents missing required file: standalone.manifest.json - ` +
`run 'pnpm build' in web to generate standalone.manifest.json in the standalone/ subdirectory`
`Webcomponents missing required file(s): ${missingFiles.join(", ")} - ` +
`${!hasUiManifest ? "run 'pnpm build:wc' in unraid-ui for ui.manifest.json" : ""}` +
`${(!hasManifest && !hasStandaloneManifest) && !hasUiManifest ? " and " : ""}` +
`${(!hasManifest && !hasStandaloneManifest) ? "run 'pnpm build' in web for standalone.manifest.json" : ""}`
);
}
// Validate the manifest contents
const manifestPath = getStandaloneManifestPath(webcomponentDir);
if (manifestPath) {
const validation = await validateStandaloneManifest(manifestPath);
if (!validation.isValid) {
console.error("Standalone manifest validation failed:");
validation.errors.forEach(error => console.error(`${error}`));
if (validation.warnings.length > 0) {
console.warn("Warnings:");
validation.warnings.forEach(warning => console.warn(` ⚠️ ${warning}`));
}
throw new Error("Standalone manifest validation failed. See errors above.");
}
if (validation.warnings.length > 0) {
console.warn("Standalone manifest validation warnings:");
validation.warnings.forEach(warning => console.warn(` ⚠️ ${warning}`));
}
console.log("✅ Standalone manifest validation passed");
}
if (!existsSync(apiDir)) {
throw new Error(`API directory ${apiDir} does not exist`);

View File

@@ -1,290 +0,0 @@
import { describe, it, expect, beforeEach, afterEach } from "vitest";
import { mkdir, writeFile, rm } from "fs/promises";
import { join } from "path";
import { tmpdir } from "os";
import {
validateStandaloneManifest,
getStandaloneManifestPath,
type StandaloneManifest
} from "./manifest-validator";
describe("manifest-validator", () => {
let testDir: string;
let manifestPath: string;
beforeEach(async () => {
// Create a temporary test directory
testDir = join(tmpdir(), `manifest-test-${Date.now()}`);
await mkdir(testDir, { recursive: true });
manifestPath = join(testDir, "standalone.manifest.json");
});
afterEach(async () => {
// Clean up test directory
await rm(testDir, { recursive: true, force: true });
});
describe("validateStandaloneManifest", () => {
it("should fail when manifest file does not exist", async () => {
const result = await validateStandaloneManifest(join(testDir, "nonexistent.json"));
expect(result.isValid).toBe(false);
expect(result.errors).toHaveLength(1);
expect(result.errors[0]).toContain("Manifest file does not exist");
});
it("should fail when manifest has invalid JSON", async () => {
await writeFile(manifestPath, "{ invalid json");
const result = await validateStandaloneManifest(manifestPath);
expect(result.isValid).toBe(false);
expect(result.errors).toHaveLength(1);
expect(result.errors[0]).toContain("Failed to parse manifest JSON");
});
it("should pass for valid manifest with existing files", async () => {
// Create the referenced files
await writeFile(join(testDir, "app.js"), "console.log('app');");
await writeFile(join(testDir, "app.css"), "body { color: red; }");
// Create valid manifest
const manifest: StandaloneManifest = {
"app.js": {
file: "app.js",
src: "app.js",
isEntry: true,
},
"app.css": {
file: "app.css",
src: "app.css",
},
ts: Date.now(),
};
await writeFile(manifestPath, JSON.stringify(manifest, null, 2));
const result = await validateStandaloneManifest(manifestPath);
expect(result.isValid).toBe(true);
expect(result.errors).toHaveLength(0);
expect(result.warnings).toHaveLength(0);
});
it("should fail when referenced files are missing", async () => {
const manifest: StandaloneManifest = {
"app.js": {
file: "app.js",
src: "app.js",
},
"app.css": {
file: "app.css",
src: "app.css",
},
};
await writeFile(manifestPath, JSON.stringify(manifest, null, 2));
const result = await validateStandaloneManifest(manifestPath);
expect(result.isValid).toBe(false);
expect(result.errors).toHaveLength(2);
expect(result.errors).toContain("Missing file referenced in manifest: app.js");
expect(result.errors).toContain("Missing file referenced in manifest: app.css");
});
it("should fail when CSS files in array are missing", async () => {
await writeFile(join(testDir, "app.js"), "console.log('app');");
const manifest: StandaloneManifest = {
"app.js": {
file: "app.js",
css: ["style1.css", "style2.css"],
},
};
await writeFile(manifestPath, JSON.stringify(manifest, null, 2));
const result = await validateStandaloneManifest(manifestPath);
expect(result.isValid).toBe(false);
expect(result.errors).toHaveLength(2);
expect(result.errors).toContain("Missing CSS file referenced in manifest: style1.css");
expect(result.errors).toContain("Missing CSS file referenced in manifest: style2.css");
});
it("should fail when asset files are missing", async () => {
await writeFile(join(testDir, "app.js"), "console.log('app');");
const manifest: StandaloneManifest = {
"app.js": {
file: "app.js",
assets: ["image.png", "font.woff2"],
},
};
await writeFile(manifestPath, JSON.stringify(manifest, null, 2));
const result = await validateStandaloneManifest(manifestPath);
expect(result.isValid).toBe(false);
expect(result.errors).toHaveLength(2);
expect(result.errors).toContain("Missing asset file referenced in manifest: image.png");
expect(result.errors).toContain("Missing asset file referenced in manifest: font.woff2");
});
it("should warn for missing imports but not fail", async () => {
await writeFile(join(testDir, "app.js"), "console.log('app');");
const manifest: StandaloneManifest = {
"app.js": {
file: "app.js",
imports: ["virtual-module"],
},
};
await writeFile(manifestPath, JSON.stringify(manifest, null, 2));
const result = await validateStandaloneManifest(manifestPath);
expect(result.isValid).toBe(true);
expect(result.errors).toHaveLength(0);
expect(result.warnings).toHaveLength(1);
expect(result.warnings[0]).toContain("Missing import file referenced in manifest: virtual-module");
});
it("should skip timestamp field", async () => {
await writeFile(join(testDir, "app.js"), "console.log('app');");
const manifest = {
"app.js": {
file: "app.js",
},
ts: 1234567890,
};
await writeFile(manifestPath, JSON.stringify(manifest, null, 2));
const result = await validateStandaloneManifest(manifestPath);
expect(result.isValid).toBe(true);
expect(result.errors).toHaveLength(0);
});
it("should warn for non-entry fields", async () => {
await writeFile(join(testDir, "app.js"), "console.log('app');");
const manifest = {
"app.js": {
file: "app.js",
},
"invalid": "not an entry",
};
await writeFile(manifestPath, JSON.stringify(manifest, null, 2));
const result = await validateStandaloneManifest(manifestPath);
expect(result.isValid).toBe(true);
expect(result.warnings).toHaveLength(1);
expect(result.warnings[0]).toContain("Skipping non-entry field: invalid");
});
it("should fail when no JavaScript entry exists", async () => {
await writeFile(join(testDir, "app.css"), "body { color: red; }");
const manifest: StandaloneManifest = {
"app.css": {
file: "app.css",
},
};
await writeFile(manifestPath, JSON.stringify(manifest, null, 2));
const result = await validateStandaloneManifest(manifestPath);
expect(result.isValid).toBe(false);
expect(result.errors).toHaveLength(1);
expect(result.errors[0]).toContain("Manifest must contain at least one JavaScript entry file");
});
it("should not check duplicate files multiple times", async () => {
await writeFile(join(testDir, "app.js"), "console.log('app');");
await writeFile(join(testDir, "shared.css"), "body { color: red; }");
const manifest: StandaloneManifest = {
"entry1": {
file: "app.js",
css: ["shared.css"],
},
"entry2": {
file: "app.js",
css: ["shared.css"],
},
};
await writeFile(manifestPath, JSON.stringify(manifest, null, 2));
const result = await validateStandaloneManifest(manifestPath);
expect(result.isValid).toBe(true);
expect(result.errors).toHaveLength(0);
});
});
describe("getStandaloneManifestPath", () => {
it("should find manifest in standalone subdirectory (preferred)", async () => {
const standaloneDir = join(testDir, "standalone");
await mkdir(standaloneDir, { recursive: true });
const standaloneManifestPath = join(standaloneDir, "standalone.manifest.json");
await writeFile(standaloneManifestPath, "{}");
const path = getStandaloneManifestPath(testDir);
expect(path).toBe(standaloneManifestPath);
});
it("should find manifest in root directory", async () => {
await writeFile(manifestPath, "{}");
const path = getStandaloneManifestPath(testDir);
expect(path).toBe(manifestPath);
});
it("should find manifest in nuxt subdirectory for backwards compatibility", async () => {
const nuxtDir = join(testDir, "nuxt");
await mkdir(nuxtDir, { recursive: true });
const nuxtManifestPath = join(nuxtDir, "standalone.manifest.json");
await writeFile(nuxtManifestPath, "{}");
const path = getStandaloneManifestPath(testDir);
expect(path).toBe(nuxtManifestPath);
});
it("should prefer standalone subdirectory over root and nuxt", async () => {
// Create manifest in all locations
const standaloneDir = join(testDir, "standalone");
await mkdir(standaloneDir, { recursive: true });
const standaloneManifestPath = join(standaloneDir, "standalone.manifest.json");
await writeFile(standaloneManifestPath, "{}");
await writeFile(manifestPath, "{}");
const nuxtDir = join(testDir, "nuxt");
await mkdir(nuxtDir, { recursive: true });
await writeFile(join(nuxtDir, "standalone.manifest.json"), "{}");
const path = getStandaloneManifestPath(testDir);
expect(path).toBe(standaloneManifestPath);
});
it("should return null when no manifest exists", async () => {
const path = getStandaloneManifestPath(testDir);
expect(path).toBeNull();
});
});
});

View File

@@ -1,173 +0,0 @@
import { existsSync } from "fs";
import { readFile } from "fs/promises";
import { join, dirname } from "path";
export interface ManifestEntry {
file: string;
src?: string;
css?: string[];
assets?: string[];
imports?: string[];
dynamicImports?: string[];
isDynamicEntry?: boolean;
isEntry?: boolean;
}
export interface StandaloneManifest {
[key: string]: ManifestEntry | number;
}
export interface ValidationResult {
isValid: boolean;
errors: string[];
warnings: string[];
manifest?: StandaloneManifest;
}
/**
* Validates a standalone.manifest.json file and checks that all referenced files exist
* @param manifestPath - Path to the manifest file
* @returns Validation result with errors and warnings
*/
export async function validateStandaloneManifest(manifestPath: string): Promise<ValidationResult> {
const errors: string[] = [];
const warnings: string[] = [];
// Check if manifest file exists
if (!existsSync(manifestPath)) {
return {
isValid: false,
errors: [`Manifest file does not exist: ${manifestPath}`],
warnings,
};
}
let manifest: StandaloneManifest;
try {
const content = await readFile(manifestPath, "utf-8");
manifest = JSON.parse(content);
} catch (error) {
return {
isValid: false,
errors: [`Failed to parse manifest JSON: ${error.message}`],
warnings,
};
}
// Get the directory containing the manifest
// Files should be relative to the manifest location
const manifestDir = dirname(manifestPath);
// Track which files were checked to avoid duplicates
const checkedFiles = new Set<string>();
// Validate each entry in the manifest
for (const [key, value] of Object.entries(manifest)) {
// Skip the timestamp field
if (key === "ts" && typeof value === "number") {
continue;
}
// Skip if not a manifest entry
if (typeof value !== "object" || !value || !("file" in value)) {
warnings.push(`Skipping non-entry field: ${key}`);
continue;
}
const entry = value as ManifestEntry;
// Check main file
if (entry.file) {
const filePath = join(manifestDir, entry.file);
if (!checkedFiles.has(filePath)) {
checkedFiles.add(filePath);
if (!existsSync(filePath)) {
errors.push(`Missing file referenced in manifest: ${entry.file}`);
}
}
}
// Check CSS files
if (entry.css && Array.isArray(entry.css)) {
for (const cssFile of entry.css) {
const cssPath = join(manifestDir, cssFile);
if (!checkedFiles.has(cssPath)) {
checkedFiles.add(cssPath);
if (!existsSync(cssPath)) {
errors.push(`Missing CSS file referenced in manifest: ${cssFile}`);
}
}
}
}
// Check asset files
if (entry.assets && Array.isArray(entry.assets)) {
for (const assetFile of entry.assets) {
const assetPath = join(manifestDir, assetFile);
if (!checkedFiles.has(assetPath)) {
checkedFiles.add(assetPath);
if (!existsSync(assetPath)) {
errors.push(`Missing asset file referenced in manifest: ${assetFile}`);
}
}
}
}
// Check imports
if (entry.imports && Array.isArray(entry.imports)) {
for (const importFile of entry.imports) {
const importPath = join(manifestDir, importFile);
if (!checkedFiles.has(importPath)) {
checkedFiles.add(importPath);
if (!existsSync(importPath)) {
warnings.push(`Missing import file referenced in manifest: ${importFile} (this may be okay if it's a virtual import)`);
}
}
}
}
}
// Check for required entries
const hasJsEntry = Object.values(manifest).some(
(entry) => typeof entry === "object" && entry?.file?.endsWith(".js")
);
if (!hasJsEntry) {
errors.push("Manifest must contain at least one JavaScript entry file");
}
return {
isValid: errors.length === 0,
errors,
warnings,
manifest,
};
}
/**
* Gets the path to the standalone manifest file in a directory
* @param dir - Directory to search in
* @returns Path to the manifest file or null if not found
*/
export function getStandaloneManifestPath(dir: string): string | null {
// Check standalone subdirectory first (preferred location)
const standaloneManifest = join(dir, "standalone", "standalone.manifest.json");
if (existsSync(standaloneManifest)) {
return standaloneManifest;
}
// Check root directory for backwards compatibility
const rootManifest = join(dir, "standalone.manifest.json");
if (existsSync(rootManifest)) {
return rootManifest;
}
// Check nuxt subdirectory for backwards compatibility
const nuxtManifest = join(dir, "nuxt", "standalone.manifest.json");
if (existsSync(nuxtManifest)) {
return nuxtManifest;
}
return null;
}

View File

@@ -12,7 +12,7 @@ services:
- ./source:/app/source
- ./scripts:/app/scripts
- ../unraid-ui/dist-wc:/app/source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components/uui
- ../web/dist:/app/source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components/standalone
- ../web/.nuxt/nuxt-custom-elements/dist/unraid-components:/app/source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components/nuxt
- ../api/deploy/release/:/app/source/dynamix.unraid.net/usr/local/unraid-api # Use the release dir instead of pack to allow watcher to not try to build with node_modules
stdin_open: true # equivalent to -i
tty: true # equivalent to -t

View File

@@ -1,6 +1,6 @@
{
"name": "@unraid/connect-plugin",
"version": "4.21.0",
"version": "4.18.2",
"private": true,
"dependencies": {
"commander": "14.0.0",

View File

@@ -14,9 +14,9 @@
<!ENTITY tag "">
<!ENTITY api_version "">
]>
<!-- Plugin min version is lower than the actual min version of Connect (6.12.0) to facilitate cleanup on newly unsupported versions. -->
<PLUGIN name="&name;" author="&author;" version="&version;" pluginURL="&plugin_url;"
launch="&launch;" min="6.9.0-rc1" icon="globe">
launch="&launch;" min="6.12.15" icon="globe">
<CHANGES>
##a long time ago in a galaxy far far away
@@ -60,10 +60,10 @@ exit 0
// Check Unraid version
$version = @parse_ini_file('/etc/unraid-version', true)['version'];
// Check if this is a recommended version
// - Should be 6.12.15 or higher
// - Should not be a 6.12.15 beta/rc version
$is_stable_6_12_or_higher = version_compare($version, '6.12.15', '>=') && !preg_match('/^6\\.12\\.15-/', $version);
// Check if this is a supported version
// - Must be 6.12.15 or higher
// - Must not be a 6.12.15 beta/rc version
$is_stable_6_12_or_higher = version_compare($version, '6.12.15', '>=') && !preg_match('/^6\\.12\\.0-/', $version);
if ($is_stable_6_12_or_higher) {
echo "Running on supported version {$version}\n";
@@ -71,104 +71,9 @@ if ($is_stable_6_12_or_higher) {
}
echo "Warning: Unsupported Unraid version {$version}. This plugin requires Unraid 6.12.15 or higher.\n";
echo "The plugin may not function correctly on this system. It may stop working entirely in the future.\n";
echo "⚠️ Please uninstall this plugin or upgrade to a newer version of Unraid to enjoy Unraid Connect\n";
echo "The plugin will not function correctly on this system.\n";
// early escape handled via unraid_connect_cleanup_for_unsupported_os_versions step
exit(0);
]]>
</INLINE>
</FILE>
<!-- cleanup script for Unraid 6.12.0 and earlier. when run, it will exit the plugin install with an error status. -->
<!-- See the version of dynamix.unraid.net.plg in commit a240a031a for the original implementation of this code. -->
<FILE Name="/tmp/unraid_connect_cleanup_for_unsupported_os_versions.sh" Run="/bin/bash" Method="install" Max="6.12.0">
<INLINE>
<![CDATA[
# Inlined copy of perform_connect_cleanup from
# /usr/local/share/dynamix.unraid.net/install/scripts/cleanup.sh
# to avoid the need to decompress the plugin tarball just to run the cleanup script
# Handle flash backup deactivation and Connect signout
perform_connect_cleanup() {
printf "\n**********************************\n"
printf "🧹 CLEANING UP - may take a minute\n"
printf "**********************************\n"
# Handle git-based flash backups
if [ -f "/boot/.git" ]; then
if [ -f "/etc/rc.d/rc.flash_backup" ]; then
printf "\nStopping flash backup service. Please wait...\n"
/etc/rc.d/rc.flash_backup stop >/dev/null 2>&1
fi
if [ -f "/usr/local/emhttp/plugins/dynamix.my.servers/include/UpdateFlashBackup.php" ]; then
printf "\nDeactivating flash backup. Please wait...\n"
/usr/bin/php /usr/local/emhttp/plugins/dynamix.my.servers/include/UpdateFlashBackup.php deactivate
fi
fi
# Check if connect.json or myservers.cfg exists
if [ -f "/boot/config/plugins/dynamix.my.servers/configs/connect.json" ] || [ -f "/boot/config/plugins/dynamix.my.servers/myservers.cfg" ]; then
# Stop unraid-api
printf "\nStopping unraid-api. Please wait...\n"
if [ -f "/etc/rc.d/rc.unraid-api" ]; then
/etc/rc.d/rc.unraid-api stop 2>&1
elif command -v unraid-api >/dev/null 2>&1; then
unraid-api stop 2>&1
else
echo "⚠️ Warning: Neither /etc/rc.d/rc.unraid-api nor unraid-api command found"
fi
# Sign out of Unraid Connect (we'll use curl directly from shell)
# We need to extract the username from connect.json or myservers.cfg and the registration key
has_username=false
# Check connect.json first (newer format)
if [ -f "/boot/config/plugins/dynamix.my.servers/configs/connect.json" ] && command -v jq >/dev/null 2>&1; then
username=$(jq -r '.username' "/boot/config/plugins/dynamix.my.servers/configs/connect.json" 2>/dev/null)
if [ -n "$username" ] && [ "$username" != "null" ]; then
has_username=true
fi
fi
# Fallback to myservers.cfg (legacy format)
if [ "$has_username" = false ] && [ -f "/boot/config/plugins/dynamix.my.servers/myservers.cfg" ]; then
if grep -q 'username' "/boot/config/plugins/dynamix.my.servers/myservers.cfg"; then
has_username=true
fi
fi
if [ "$has_username" = true ]; then
printf "\nSigning out of Unraid Connect\n"
# Check if regFILE exists in var.ini
if [ -f "/var/local/emhttp/var.ini" ]; then
regfile=$(grep "regFILE" "/var/local/emhttp/var.ini" | cut -d= -f2)
if [ -n "$regfile" ] && [ -f "$regfile" ]; then
# Base64 encode the key file and send to server
encoded_key=$(base64 "$regfile" | tr -d '\n')
if [ -n "$encoded_key" ]; then
curl -s -X POST "https://keys.lime-technology.com/account/server/unregister" \
-d "keyfile=$encoded_key" >/dev/null 2>&1
fi
fi
fi
fi
# Remove config files
rm -f /boot/config/plugins/dynamix.my.servers/myservers.cfg
rm -f /boot/config/plugins/dynamix.my.servers/configs/connect.json
# Reload nginx to disable Remote Access
printf "\n⚠ Reloading Web Server. If this window stops updating for two minutes please close it.\n"
/etc/rc.d/rc.nginx reload >/dev/null 2>&1
fi
}
perform_connect_cleanup
echo "Done. Please uninstall the Connect plugin to complete the cleanup."
# Exit with error to clarify that further user action--either uninstalling the plugin or upgrading to a newer version of Unraid--is required.
exit 1;
exit(1);
]]>
</INLINE>
</FILE>
@@ -427,14 +332,6 @@ if [ -d "/usr/local/unraid-api/node_modules" ]; then
rm -rf "/usr/local/unraid-api/node_modules"
fi
# Clear existing unraid-components directory contents to ensure clean installation
echo "Cleaning up existing unraid-components directory..."
DIR="/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components"
if [ -d "$DIR" ]; then
echo "Clearing contents of: $DIR"
rm -rf "$DIR"/*
fi
# Install the package using the explicit file path
upgradepkg --install-new --reinstall "${PKG_FILE}"
if [ $? -ne 0 ]; then

View File

@@ -27,10 +27,10 @@ CONTAINER_NAME="plugin-builder"
# Create the directory if it doesn't exist
# This is to prevent errors when mounting volumes in docker compose
WEB_DIST_DIR="../web/dist"
if [ ! -d "$WEB_DIST_DIR" ]; then
echo "Creating directory $WEB_DIST_DIR for Docker volume mount..."
mkdir -p "$WEB_DIST_DIR"
NUXT_COMPONENTS_DIR="../web/.nuxt/nuxt-custom-elements/dist/unraid-components"
if [ ! -d "$NUXT_COMPONENTS_DIR" ]; then
echo "Creating directory $NUXT_COMPONENTS_DIR for Docker volume mount..."
mkdir -p "$NUXT_COMPONENTS_DIR"
fi
# Stop any running plugin-builder container first

View File

@@ -25,9 +25,3 @@ backup_file_if_exists usr/local/unraid-api/.env
cp usr/local/unraid-api/.env.production usr/local/unraid-api/.env
# auto-generated actions from makepkg:
( cd usr/local/bin ; rm -rf corepack )
( cd usr/local/bin ; ln -sf ../lib/node_modules/corepack/dist/corepack.js corepack )
( cd usr/local/bin ; rm -rf npm )
( cd usr/local/bin ; ln -sf ../lib/node_modules/npm/bin/npm-cli.js npm )
( cd usr/local/bin ; rm -rf npx )
( cd usr/local/bin ; ln -sf ../lib/node_modules/npm/bin/npx-cli.js npx )

View File

@@ -0,0 +1,6 @@
Menu="UNRAID-OS"
Title="Detail Layout"
Icon="icon-u-globe"
Tag="globe"
---
<unraid-detail-test />

View File

@@ -1 +0,0 @@
This folder is PUBLIC. Please be aware of this when using this on Unraid OS

View File

@@ -1,7 +1,5 @@
#!/bin/sh
# Script to handle cleanup operations during removal
# NOTE: an inline copy of this script exists in dynamix.unraid.net.plg for Unraid 6.12.14 and earlier
# When updating this script, be sure to update the inline copy as well.
# Get the operation mode
MODE="${1:-cleanup}"
@@ -25,38 +23,20 @@ perform_connect_cleanup() {
fi
fi
# Check if connect.json or myservers.cfg exists
if [ -f "/boot/config/plugins/dynamix.my.servers/configs/connect.json" ] || [ -f "/boot/config/plugins/dynamix.my.servers/myservers.cfg" ]; then
# Check if myservers.cfg exists
if [ -f "/boot/config/plugins/dynamix.my.servers/myservers.cfg" ]; then
# Stop unraid-api
printf "\nStopping unraid-api. Please wait...\n"
if [ -f "/etc/rc.d/rc.unraid-api" ]; then
/etc/rc.d/rc.unraid-api stop 2>&1
elif command -v unraid-api >/dev/null 2>&1; then
unraid-api stop 2>&1
else
echo "⚠️ Warning: Neither /etc/rc.d/rc.unraid-api nor unraid-api command found"
output=$(/etc/rc.d/rc.unraid-api stop --delete 2>&1)
if [ -z "$output" ]; then
echo "Waiting for unraid-api to stop..."
sleep 5 # Give it time to stop
fi
echo "Stopped unraid-api: $output"
# Sign out of Unraid Connect (we'll use curl directly from shell)
# We need to extract the username from connect.json or myservers.cfg and the registration key
has_username=false
# Check connect.json first (newer format)
if [ -f "/boot/config/plugins/dynamix.my.servers/configs/connect.json" ] && command -v jq >/dev/null 2>&1; then
username=$(jq -r '.username' "/boot/config/plugins/dynamix.my.servers/configs/connect.json" 2>/dev/null)
if [ -n "$username" ] && [ "$username" != "null" ]; then
has_username=true
fi
fi
# Fallback to myservers.cfg (legacy format)
if [ "$has_username" = false ] && [ -f "/boot/config/plugins/dynamix.my.servers/myservers.cfg" ]; then
if grep -q 'username' "/boot/config/plugins/dynamix.my.servers/myservers.cfg"; then
has_username=true
fi
fi
if [ "$has_username" = true ]; then
# We need to extract the username from myservers.cfg and the registration key
if grep -q 'username' "/boot/config/plugins/dynamix.my.servers/myservers.cfg"; then
printf "\nSigning out of Unraid Connect\n"
# Check if regFILE exists in var.ini
if [ -f "/var/local/emhttp/var.ini" ]; then
@@ -72,9 +52,8 @@ perform_connect_cleanup() {
fi
fi
# Remove config files
# Remove myservers.cfg
rm -f /boot/config/plugins/dynamix.my.servers/myservers.cfg
rm -f /boot/config/plugins/dynamix.my.servers/configs/connect.json
# Reload nginx to disable Remote Access
printf "\n⚠ Reloading Web Server. If this window stops updating for two minutes please close it.\n"
@@ -152,4 +131,4 @@ case "$MODE" in
echo "Usage: $0 [cleanup]"
exit 1
;;
esac
esac

Some files were not shown because too many files have changed in this diff Show More