mirror of
https://github.com/unraid/api.git
synced 2026-01-05 16:09:49 -06:00
Compare commits
61 Commits
4.15.1-bui
...
feat/trans
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0b080501c1 | ||
|
|
a27453fda8 | ||
|
|
98e6058cd8 | ||
|
|
6c2c51ae1d | ||
|
|
d10c12035e | ||
|
|
5dd6f42550 | ||
|
|
4759b3d0b3 | ||
|
|
daeeba8c1f | ||
|
|
196bd52628 | ||
|
|
6c0061923a | ||
|
|
f33afe7ae5 | ||
|
|
aecf70ffad | ||
|
|
785f1f5eb1 | ||
|
|
193be3df36 | ||
|
|
116ee88fcf | ||
|
|
413db4bd30 | ||
|
|
095c2221c9 | ||
|
|
dfe891ce38 | ||
|
|
797bf50ec7 | ||
|
|
af5ca11860 | ||
|
|
f0cffbdc7a | ||
|
|
16905dd3a6 | ||
|
|
2ecdb99052 | ||
|
|
286f1be8ed | ||
|
|
bcefdd5261 | ||
|
|
d3459ecbc6 | ||
|
|
534a07788b | ||
|
|
239cdd6133 | ||
|
|
77cfc07dda | ||
|
|
728b38ac11 | ||
|
|
44774d0acd | ||
|
|
e204eb80a0 | ||
|
|
0c727c37f4 | ||
|
|
292bc0fc81 | ||
|
|
53f501e1a7 | ||
|
|
6cf7c88242 | ||
|
|
33774aa596 | ||
|
|
88087d5201 | ||
|
|
5d89682a3f | ||
|
|
bc15bd3d70 | ||
|
|
7c3aee8f3f | ||
|
|
c7c3bb57ea | ||
|
|
99dbad57d5 | ||
|
|
c42f79d406 | ||
|
|
4d8588b173 | ||
|
|
0d1d27064e | ||
|
|
0fe2c2c1c8 | ||
|
|
a8e4119270 | ||
|
|
372a4ebb42 | ||
|
|
4e945f5f56 | ||
|
|
6356f9c41d | ||
|
|
a1ee915ca5 | ||
|
|
c147a6b507 | ||
|
|
9d42b36f74 | ||
|
|
26a95af953 | ||
|
|
0ead267838 | ||
|
|
163763f9e5 | ||
|
|
6469d002b7 | ||
|
|
ab11e7ff7f | ||
|
|
7316dc753f | ||
|
|
1bf74e9d6c |
8
.github/workflows/build-plugin.yml
vendored
8
.github/workflows/build-plugin.yml
vendored
@@ -36,6 +36,8 @@ on:
|
||||
required: true
|
||||
CF_ENDPOINT:
|
||||
required: true
|
||||
UNRAID_BOT_GITHUB_ADMIN_TOKEN:
|
||||
required: false
|
||||
jobs:
|
||||
build-plugin:
|
||||
name: Build and Deploy Plugin
|
||||
@@ -97,7 +99,7 @@ jobs:
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
pattern: unraid-wc-rich
|
||||
path: ${{ github.workspace }}/plugin/source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components/nuxt
|
||||
path: ${{ github.workspace }}/plugin/source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components/standalone
|
||||
merge-multiple: true
|
||||
- name: Download Unraid API
|
||||
uses: actions/download-artifact@v5
|
||||
@@ -151,8 +153,8 @@ jobs:
|
||||
uses: the-actions-org/workflow-dispatch@v4.0.0
|
||||
with:
|
||||
workflow: release-production.yml
|
||||
inputs: '{ "version": "${{ steps.vars.outputs.API_VERSION }}" }'
|
||||
token: ${{ secrets.WORKFLOW_TRIGGER_PAT }}
|
||||
inputs: '{ "version": "v${{ steps.vars.outputs.API_VERSION }}" }'
|
||||
token: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN }}
|
||||
|
||||
- name: Upload to Cloudflare
|
||||
if: inputs.RELEASE_CREATED == 'false'
|
||||
|
||||
2
.github/workflows/deploy-storybook.yml
vendored
2
.github/workflows/deploy-storybook.yml
vendored
@@ -65,7 +65,7 @@ jobs:
|
||||
|
||||
- name: Comment PR with deployment URL
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: actions/github-script@v7
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
script: |
|
||||
github.rest.issues.createComment({
|
||||
|
||||
46
.github/workflows/main.yml
vendored
46
.github/workflows/main.yml
vendored
@@ -47,7 +47,7 @@ jobs:
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential libvirt-daemon-system
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential libvirt-daemon-system php-cli
|
||||
version: 1.0
|
||||
|
||||
- name: Install pnpm
|
||||
@@ -117,42 +117,68 @@ jobs:
|
||||
# Verify libvirt is running using sudo to bypass group membership delays
|
||||
sudo virsh list --all || true
|
||||
|
||||
- uses: oven-sh/setup-bun@v2
|
||||
with:
|
||||
bun-version: latest
|
||||
- name: Build UI Package First
|
||||
run: |
|
||||
echo "🔧 Building UI package for web tests dependency..."
|
||||
cd ../unraid-ui && pnpm run build
|
||||
|
||||
- name: Run Tests Concurrently
|
||||
run: |
|
||||
set -e
|
||||
|
||||
# Run all tests in parallel with labeled output
|
||||
# Run all tests in parallel with labeled output and coverage generation
|
||||
echo "🚀 Starting API coverage tests..."
|
||||
pnpm run coverage > api-test.log 2>&1 &
|
||||
API_PID=$!
|
||||
|
||||
echo "🚀 Starting Connect plugin tests..."
|
||||
(cd ../packages/unraid-api-plugin-connect && pnpm test) > connect-test.log 2>&1 &
|
||||
(cd ../packages/unraid-api-plugin-connect && pnpm test --coverage 2>/dev/null || pnpm test) > connect-test.log 2>&1 &
|
||||
CONNECT_PID=$!
|
||||
|
||||
echo "🚀 Starting Shared package tests..."
|
||||
(cd ../packages/unraid-shared && pnpm test) > shared-test.log 2>&1 &
|
||||
(cd ../packages/unraid-shared && pnpm test --coverage 2>/dev/null || pnpm test) > shared-test.log 2>&1 &
|
||||
SHARED_PID=$!
|
||||
|
||||
echo "🚀 Starting Web package coverage tests..."
|
||||
(cd ../web && (pnpm test --coverage || pnpm test)) > web-test.log 2>&1 &
|
||||
WEB_PID=$!
|
||||
|
||||
echo "🚀 Starting UI package coverage tests..."
|
||||
(cd ../unraid-ui && pnpm test --coverage 2>/dev/null || pnpm test) > ui-test.log 2>&1 &
|
||||
UI_PID=$!
|
||||
|
||||
echo "🚀 Starting Plugin tests..."
|
||||
(cd ../plugin && pnpm test) > plugin-test.log 2>&1 &
|
||||
PLUGIN_PID=$!
|
||||
|
||||
# Wait for all processes and capture exit codes
|
||||
wait $API_PID && echo "✅ API tests completed" || { echo "❌ API tests failed"; API_EXIT=1; }
|
||||
wait $CONNECT_PID && echo "✅ Connect tests completed" || { echo "❌ Connect tests failed"; CONNECT_EXIT=1; }
|
||||
wait $SHARED_PID && echo "✅ Shared tests completed" || { echo "❌ Shared tests failed"; SHARED_EXIT=1; }
|
||||
wait $WEB_PID && echo "✅ Web tests completed" || { echo "❌ Web tests failed"; WEB_EXIT=1; }
|
||||
wait $UI_PID && echo "✅ UI tests completed" || { echo "❌ UI tests failed"; UI_EXIT=1; }
|
||||
wait $PLUGIN_PID && echo "✅ Plugin tests completed" || { echo "❌ Plugin tests failed"; PLUGIN_EXIT=1; }
|
||||
|
||||
# Display all outputs
|
||||
echo "📋 API Test Results:" && cat api-test.log
|
||||
echo "📋 Connect Plugin Test Results:" && cat connect-test.log
|
||||
echo "📋 Shared Package Test Results:" && cat shared-test.log
|
||||
echo "📋 Web Package Test Results:" && cat web-test.log
|
||||
echo "📋 UI Package Test Results:" && cat ui-test.log
|
||||
echo "📋 Plugin Test Results:" && cat plugin-test.log
|
||||
|
||||
# Exit with error if any test failed
|
||||
if [[ ${API_EXIT:-0} -eq 1 || ${CONNECT_EXIT:-0} -eq 1 || ${SHARED_EXIT:-0} -eq 1 ]]; then
|
||||
if [[ ${API_EXIT:-0} -eq 1 || ${CONNECT_EXIT:-0} -eq 1 || ${SHARED_EXIT:-0} -eq 1 || ${WEB_EXIT:-0} -eq 1 || ${UI_EXIT:-0} -eq 1 || ${PLUGIN_EXIT:-0} -eq 1 ]]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Upload all coverage reports to Codecov
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
files: ./coverage/coverage-final.json,../web/coverage/coverage-final.json,../unraid-ui/coverage/coverage-final.json,../packages/unraid-api-plugin-connect/coverage/coverage-final.json,../packages/unraid-shared/coverage/coverage-final.json
|
||||
fail_ci_if_error: false
|
||||
|
||||
build-api:
|
||||
name: Build API
|
||||
runs-on: ubuntu-latest
|
||||
@@ -307,7 +333,6 @@ jobs:
|
||||
echo VITE_CONNECT=${{ secrets.VITE_CONNECT }} >> .env
|
||||
echo VITE_UNRAID_NET=${{ secrets.VITE_UNRAID_NET }} >> .env
|
||||
echo VITE_CALLBACK_KEY=${{ secrets.VITE_CALLBACK_KEY }} >> .env
|
||||
cat .env
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
@@ -359,7 +384,7 @@ jobs:
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: unraid-wc-rich
|
||||
path: web/.nuxt/nuxt-custom-elements/dist/unraid-components
|
||||
path: web/dist
|
||||
|
||||
build-plugin-staging-pr:
|
||||
name: Build and Deploy Plugin
|
||||
@@ -404,3 +429,4 @@ jobs:
|
||||
CF_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
CF_BUCKET_PREVIEW: ${{ secrets.CF_BUCKET_PREVIEW }}
|
||||
CF_ENDPOINT: ${{ secrets.CF_ENDPOINT }}
|
||||
UNRAID_BOT_GITHUB_ADMIN_TOKEN: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN }}
|
||||
|
||||
15
.github/workflows/release-production.yml
vendored
15
.github/workflows/release-production.yml
vendored
@@ -37,7 +37,7 @@ jobs:
|
||||
EOF
|
||||
- run: npm install html-escaper@2 xml2js
|
||||
- name: Update Plugin Changelog
|
||||
uses: actions/github-script@v7
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
script: |
|
||||
const fs = require('fs');
|
||||
@@ -124,3 +124,16 @@ jobs:
|
||||
--no-guess-mime-type \
|
||||
--content-encoding none \
|
||||
--acl public-read
|
||||
|
||||
- name: Actions for Discord
|
||||
uses: Ilshidur/action-discord@0.4.0
|
||||
env:
|
||||
DISCORD_WEBHOOK: ${{ secrets.PUBLIC_DISCORD_RELEASE_ENDPOINT }}
|
||||
with:
|
||||
args: |
|
||||
🚀 **Unraid API Release ${{ inputs.version }}**
|
||||
|
||||
View Release: https://github.com/${{ github.repository }}/releases/tag/${{ inputs.version }}
|
||||
|
||||
**Changelog:**
|
||||
${{ steps.release-info.outputs.body }}
|
||||
|
||||
4
.github/workflows/test-libvirt.yml
vendored
4
.github/workflows/test-libvirt.yml
vendored
@@ -28,7 +28,7 @@ jobs:
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.13.6"
|
||||
python-version: "3.13.7"
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
@@ -44,7 +44,7 @@ jobs:
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10.14.0
|
||||
version: 10.15.0
|
||||
run_install: false
|
||||
|
||||
- name: Get pnpm store directory
|
||||
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -29,6 +29,10 @@ unraid-ui/node_modules/
|
||||
# TypeScript v1 declaration files
|
||||
typings/
|
||||
|
||||
# Auto-generated type declarations for Nuxt UI
|
||||
auto-imports.d.ts
|
||||
components.d.ts
|
||||
|
||||
# Optional npm cache directory
|
||||
.npm
|
||||
|
||||
@@ -118,3 +122,4 @@ api/dev/Unraid.net/myservers.cfg
|
||||
|
||||
# local Mise settings
|
||||
.mise.toml
|
||||
|
||||
|
||||
@@ -1 +1 @@
|
||||
{".":"4.15.1"}
|
||||
{".":"4.20.1"}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
@custom-variant dark (&:where(.dark, .dark *));
|
||||
|
||||
@layer utilities {
|
||||
:host {
|
||||
/* Utility defaults for web components (when we were using shadow DOM) */
|
||||
:host {
|
||||
--tw-divide-y-reverse: 0;
|
||||
--tw-border-style: solid;
|
||||
--tw-font-weight: initial;
|
||||
@@ -48,21 +48,20 @@
|
||||
--tw-drop-shadow: initial;
|
||||
--tw-duration: initial;
|
||||
--tw-ease: initial;
|
||||
}
|
||||
}
|
||||
|
||||
@layer base {
|
||||
*,
|
||||
::after,
|
||||
::before,
|
||||
::backdrop,
|
||||
::file-selector-button {
|
||||
border-color: hsl(var(--border));
|
||||
}
|
||||
/* Global border color - this is what's causing the issue! */
|
||||
/* Commenting out since it affects all elements globally
|
||||
*,
|
||||
::after,
|
||||
::before,
|
||||
::backdrop,
|
||||
::file-selector-button {
|
||||
border-color: hsl(var(--border));
|
||||
}
|
||||
*/
|
||||
|
||||
|
||||
|
||||
body {
|
||||
body {
|
||||
--color-alpha: #1c1b1b;
|
||||
--color-beta: #f2f2f2;
|
||||
--color-gamma: #999999;
|
||||
@@ -74,8 +73,24 @@
|
||||
--ring-shadow: 0 0 var(--color-beta);
|
||||
}
|
||||
|
||||
button:not(:disabled),
|
||||
[role='button']:not(:disabled) {
|
||||
cursor: pointer;
|
||||
}
|
||||
button:not(:disabled),
|
||||
[role='button']:not(:disabled) {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
/* Font size overrides for SSO button component */
|
||||
unraid-sso-button {
|
||||
--text-xs: 0.75rem;
|
||||
--text-sm: 0.875rem;
|
||||
--text-base: 1rem;
|
||||
--text-lg: 1.125rem;
|
||||
--text-xl: 1.25rem;
|
||||
--text-2xl: 1.5rem;
|
||||
--text-3xl: 1.875rem;
|
||||
--text-4xl: 2.25rem;
|
||||
--text-5xl: 3rem;
|
||||
--text-6xl: 3.75rem;
|
||||
--text-7xl: 4.5rem;
|
||||
--text-8xl: 6rem;
|
||||
--text-9xl: 8rem;
|
||||
}
|
||||
@@ -1,7 +1,61 @@
|
||||
/* Hybrid theme system: Native CSS + Theme Store fallback */
|
||||
@layer base {
|
||||
/* Light mode defaults */
|
||||
:root {
|
||||
|
||||
/* Light mode defaults */
|
||||
:root {
|
||||
/* Nuxt UI Color System - Primary (Orange for Unraid) */
|
||||
--ui-color-primary-50: #fff7ed;
|
||||
--ui-color-primary-100: #ffedd5;
|
||||
--ui-color-primary-200: #fed7aa;
|
||||
--ui-color-primary-300: #fdba74;
|
||||
--ui-color-primary-400: #fb923c;
|
||||
--ui-color-primary-500: #ff8c2f;
|
||||
--ui-color-primary-600: #ea580c;
|
||||
--ui-color-primary-700: #c2410c;
|
||||
--ui-color-primary-800: #9a3412;
|
||||
--ui-color-primary-900: #7c2d12;
|
||||
--ui-color-primary-950: #431407;
|
||||
|
||||
/* Nuxt UI Color System - Neutral (True Gray) */
|
||||
--ui-color-neutral-50: #fafafa;
|
||||
--ui-color-neutral-100: #f5f5f5;
|
||||
--ui-color-neutral-200: #e5e5e5;
|
||||
--ui-color-neutral-300: #d4d4d4;
|
||||
--ui-color-neutral-400: #a3a3a3;
|
||||
--ui-color-neutral-500: #737373;
|
||||
--ui-color-neutral-600: #525252;
|
||||
--ui-color-neutral-700: #404040;
|
||||
--ui-color-neutral-800: #262626;
|
||||
--ui-color-neutral-900: #171717;
|
||||
--ui-color-neutral-950: #0a0a0a;
|
||||
|
||||
/* Nuxt UI Default color shades */
|
||||
--ui-primary: var(--ui-color-primary-500);
|
||||
--ui-secondary: var(--ui-color-neutral-500);
|
||||
|
||||
/* Nuxt UI Design Tokens - Text */
|
||||
--ui-text-dimmed: var(--ui-color-neutral-400);
|
||||
--ui-text-muted: var(--ui-color-neutral-500);
|
||||
--ui-text-toned: var(--ui-color-neutral-600);
|
||||
--ui-text: var(--ui-color-neutral-700);
|
||||
--ui-text-highlighted: var(--ui-color-neutral-900);
|
||||
--ui-text-inverted: white;
|
||||
|
||||
/* Nuxt UI Design Tokens - Background */
|
||||
--ui-bg: white;
|
||||
--ui-bg-muted: var(--ui-color-neutral-50);
|
||||
--ui-bg-elevated: var(--ui-color-neutral-100);
|
||||
--ui-bg-accented: var(--ui-color-neutral-200);
|
||||
--ui-bg-inverted: var(--ui-color-neutral-900);
|
||||
|
||||
/* Nuxt UI Design Tokens - Border */
|
||||
--ui-border: var(--ui-color-neutral-200);
|
||||
--ui-border-muted: var(--ui-color-neutral-200);
|
||||
--ui-border-accented: var(--ui-color-neutral-300);
|
||||
--ui-border-inverted: var(--ui-color-neutral-900);
|
||||
|
||||
/* Nuxt UI Radius */
|
||||
--ui-radius: 0.5rem;
|
||||
|
||||
--background: 0 0% 100%;
|
||||
--foreground: 0 0% 3.9%;
|
||||
--muted: 0 0% 96.1%;
|
||||
@@ -12,7 +66,7 @@
|
||||
--card-foreground: 0 0% 3.9%;
|
||||
--border: 0 0% 89.8%;
|
||||
--input: 0 0% 89.8%;
|
||||
--primary: 0 0% 9%;
|
||||
--primary: 24 100% 50%; /* Orange #ff8c2f in HSL */
|
||||
--primary-foreground: 0 0% 98%;
|
||||
--secondary: 0 0% 96.1%;
|
||||
--secondary-foreground: 0 0% 9%;
|
||||
@@ -20,7 +74,7 @@
|
||||
--accent-foreground: 0 0% 9%;
|
||||
--destructive: 0 84.2% 60.2%;
|
||||
--destructive-foreground: 0 0% 98%;
|
||||
--ring: 0 0% 3.9%;
|
||||
--ring: 24 100% 50%; /* Orange ring to match primary */
|
||||
--chart-1: 12 76% 61%;
|
||||
--chart-2: 173 58% 39%;
|
||||
--chart-3: 197 37% 24%;
|
||||
@@ -30,6 +84,31 @@
|
||||
|
||||
/* Dark mode */
|
||||
.dark {
|
||||
/* Nuxt UI Default color shades - Dark mode */
|
||||
--ui-primary: var(--ui-color-primary-400);
|
||||
--ui-secondary: var(--ui-color-neutral-400);
|
||||
|
||||
/* Nuxt UI Design Tokens - Text (Dark) */
|
||||
--ui-text-dimmed: var(--ui-color-neutral-500);
|
||||
--ui-text-muted: var(--ui-color-neutral-400);
|
||||
--ui-text-toned: var(--ui-color-neutral-300);
|
||||
--ui-text: var(--ui-color-neutral-200);
|
||||
--ui-text-highlighted: white;
|
||||
--ui-text-inverted: var(--ui-color-neutral-900);
|
||||
|
||||
/* Nuxt UI Design Tokens - Background (Dark) */
|
||||
--ui-bg: var(--ui-color-neutral-900);
|
||||
--ui-bg-muted: var(--ui-color-neutral-800);
|
||||
--ui-bg-elevated: var(--ui-color-neutral-800);
|
||||
--ui-bg-accented: var(--ui-color-neutral-700);
|
||||
--ui-bg-inverted: white;
|
||||
|
||||
/* Nuxt UI Design Tokens - Border (Dark) */
|
||||
--ui-border: var(--ui-color-neutral-800);
|
||||
--ui-border-muted: var(--ui-color-neutral-700);
|
||||
--ui-border-accented: var(--ui-color-neutral-700);
|
||||
--ui-border-inverted: white;
|
||||
|
||||
--background: 0 0% 3.9%;
|
||||
--foreground: 0 0% 98%;
|
||||
--muted: 0 0% 14.9%;
|
||||
@@ -40,15 +119,15 @@
|
||||
--card-foreground: 0 0% 98%;
|
||||
--border: 0 0% 14.9%;
|
||||
--input: 0 0% 14.9%;
|
||||
--primary: 0 0% 98%;
|
||||
--primary-foreground: 0 0% 9%;
|
||||
--primary: 24 100% 50%; /* Orange #ff8c2f in HSL */
|
||||
--primary-foreground: 0 0% 98%;
|
||||
--secondary: 0 0% 14.9%;
|
||||
--secondary-foreground: 0 0% 98%;
|
||||
--accent: 0 0% 14.9%;
|
||||
--accent-foreground: 0 0% 98%;
|
||||
--destructive: 0 62.8% 30.6%;
|
||||
--destructive-foreground: 0 0% 98%;
|
||||
--ring: 0 0% 83.1%;
|
||||
--ring: 24 100% 50%; /* Orange ring to match primary */
|
||||
--chart-1: 220 70% 50%;
|
||||
--chart-2: 160 60% 45%;
|
||||
--chart-3: 30 80% 55%;
|
||||
@@ -62,69 +141,4 @@
|
||||
--background: 0 0% 3.9%;
|
||||
--foreground: 0 0% 98%;
|
||||
--border: 0 0% 14.9%;
|
||||
}
|
||||
|
||||
/* For web components: inherit CSS variables from the host */
|
||||
:host {
|
||||
--background: inherit;
|
||||
--foreground: inherit;
|
||||
--muted: inherit;
|
||||
--muted-foreground: inherit;
|
||||
--popover: inherit;
|
||||
--popover-foreground: inherit;
|
||||
--card: inherit;
|
||||
--card-foreground: inherit;
|
||||
--border: inherit;
|
||||
--input: inherit;
|
||||
--primary: inherit;
|
||||
--primary-foreground: inherit;
|
||||
--secondary: inherit;
|
||||
--secondary-foreground: inherit;
|
||||
--accent: inherit;
|
||||
--accent-foreground: inherit;
|
||||
--destructive: inherit;
|
||||
--destructive-foreground: inherit;
|
||||
--ring: inherit;
|
||||
--chart-1: inherit;
|
||||
--chart-2: inherit;
|
||||
--chart-3: inherit;
|
||||
--chart-4: inherit;
|
||||
--chart-5: inherit;
|
||||
}
|
||||
|
||||
/* Class-based dark mode support for web components using :host-context */
|
||||
:host-context(.dark) {
|
||||
--background: 0 0% 3.9%;
|
||||
--foreground: 0 0% 98%;
|
||||
--muted: 0 0% 14.9%;
|
||||
--muted-foreground: 0 0% 63.9%;
|
||||
--popover: 0 0% 3.9%;
|
||||
--popover-foreground: 0 0% 98%;
|
||||
--card: 0 0% 3.9%;
|
||||
--card-foreground: 0 0% 98%;
|
||||
--border: 0 0% 14.9%;
|
||||
--input: 0 0% 14.9%;
|
||||
--primary: 0 0% 98%;
|
||||
--primary-foreground: 0 0% 9%;
|
||||
--secondary: 0 0% 14.9%;
|
||||
--secondary-foreground: 0 0% 98%;
|
||||
--accent: 0 0% 14.9%;
|
||||
--accent-foreground: 0 0% 98%;
|
||||
--destructive: 0 62.8% 30.6%;
|
||||
--destructive-foreground: 0 0% 98%;
|
||||
--ring: 0 0% 83.1%;
|
||||
--chart-1: 220 70% 50%;
|
||||
--chart-2: 160 60% 45%;
|
||||
--chart-3: 30 80% 55%;
|
||||
--chart-4: 280 65% 60%;
|
||||
--chart-5: 340 75% 55%;
|
||||
}
|
||||
|
||||
/* Alternative class-based dark mode support for specific Unraid themes */
|
||||
:host-context(.dark[data-theme='black']),
|
||||
:host-context(.dark[data-theme='gray']) {
|
||||
--background: 0 0% 3.9%;
|
||||
--foreground: 0 0% 98%;
|
||||
--border: 0 0% 14.9%;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
/* Tailwind Shared Styles - Single entry point for all shared CSS */
|
||||
@import './css-variables.css';
|
||||
@import './unraid-theme.css';
|
||||
@import './theme-variants.css';
|
||||
@import './base-utilities.css';
|
||||
@import './sonner.css';
|
||||
@import './sonner.css';
|
||||
|
||||
@@ -229,12 +229,14 @@
|
||||
top: 0;
|
||||
height: 20px;
|
||||
width: 20px;
|
||||
min-width: inherit !important;
|
||||
margin: 0 !important;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
padding: 0;
|
||||
color: var(--gray12);
|
||||
border: 1px solid var(--gray4);
|
||||
color: hsl(var(--foreground));
|
||||
border: 1px solid hsl(var(--border));
|
||||
transform: var(--toast-close-button-transform);
|
||||
border-radius: 50%;
|
||||
cursor: pointer;
|
||||
@@ -243,7 +245,7 @@
|
||||
}
|
||||
|
||||
[data-sonner-toast] [data-close-button] {
|
||||
background: var(--gray1);
|
||||
background: hsl(var(--background));
|
||||
}
|
||||
|
||||
:where([data-sonner-toast]) :where([data-close-button]):focus-visible {
|
||||
@@ -255,8 +257,8 @@
|
||||
}
|
||||
|
||||
[data-sonner-toast]:hover [data-close-button]:hover {
|
||||
background: var(--gray2);
|
||||
border-color: var(--gray5);
|
||||
background: hsl(var(--muted));
|
||||
border-color: hsl(var(--border));
|
||||
}
|
||||
|
||||
/* Leave a ghost div to avoid setting hover to false when swiping out */
|
||||
@@ -414,10 +416,27 @@
|
||||
}
|
||||
|
||||
[data-sonner-toaster][data-theme='light'] {
|
||||
--normal-bg: #fff;
|
||||
--normal-border: var(--gray4);
|
||||
--normal-text: var(--gray12);
|
||||
--normal-bg: hsl(var(--background));
|
||||
--normal-border: hsl(var(--border));
|
||||
--normal-text: hsl(var(--foreground));
|
||||
|
||||
--success-bg: hsl(var(--background));
|
||||
--success-border: hsl(var(--border));
|
||||
--success-text: hsl(140, 100%, 27%);
|
||||
|
||||
--info-bg: hsl(var(--background));
|
||||
--info-border: hsl(var(--border));
|
||||
--info-text: hsl(210, 92%, 45%);
|
||||
|
||||
--warning-bg: hsl(var(--background));
|
||||
--warning-border: hsl(var(--border));
|
||||
--warning-text: hsl(31, 92%, 45%);
|
||||
|
||||
--error-bg: hsl(var(--background));
|
||||
--error-border: hsl(var(--border));
|
||||
--error-text: hsl(360, 100%, 45%);
|
||||
|
||||
/* Old colors, preserved for reference
|
||||
--success-bg: hsl(143, 85%, 96%);
|
||||
--success-border: hsl(145, 92%, 91%);
|
||||
--success-text: hsl(140, 100%, 27%);
|
||||
@@ -432,26 +451,43 @@
|
||||
|
||||
--error-bg: hsl(359, 100%, 97%);
|
||||
--error-border: hsl(359, 100%, 94%);
|
||||
--error-text: hsl(360, 100%, 45%);
|
||||
--error-text: hsl(360, 100%, 45%); */
|
||||
}
|
||||
|
||||
[data-sonner-toaster][data-theme='light'] [data-sonner-toast][data-invert='true'] {
|
||||
--normal-bg: #000;
|
||||
--normal-border: hsl(0, 0%, 20%);
|
||||
--normal-text: var(--gray1);
|
||||
--normal-bg: hsl(0 0% 3.9%);
|
||||
--normal-border: hsl(0 0% 14.9%);
|
||||
--normal-text: hsl(0 0% 98%);
|
||||
}
|
||||
|
||||
[data-sonner-toaster][data-theme='dark'] [data-sonner-toast][data-invert='true'] {
|
||||
--normal-bg: #fff;
|
||||
--normal-border: var(--gray3);
|
||||
--normal-text: var(--gray12);
|
||||
--normal-bg: hsl(0 0% 100%);
|
||||
--normal-border: hsl(0 0% 89.8%);
|
||||
--normal-text: hsl(0 0% 3.9%);
|
||||
}
|
||||
|
||||
[data-sonner-toaster][data-theme='dark'] {
|
||||
--normal-bg: #000;
|
||||
--normal-border: hsl(0, 0%, 20%);
|
||||
--normal-text: var(--gray1);
|
||||
--normal-bg: hsl(var(--background));
|
||||
--normal-border: hsl(var(--border));
|
||||
--normal-text: hsl(var(--foreground));
|
||||
|
||||
--success-bg: hsl(var(--background));
|
||||
--success-border: hsl(var(--border));
|
||||
--success-text: hsl(150, 86%, 65%);
|
||||
|
||||
--info-bg: hsl(var(--background));
|
||||
--info-border: hsl(var(--border));
|
||||
--info-text: hsl(216, 87%, 65%);
|
||||
|
||||
--warning-bg: hsl(var(--background));
|
||||
--warning-border: hsl(var(--border));
|
||||
--warning-text: hsl(46, 87%, 65%);
|
||||
|
||||
--error-bg: hsl(var(--background));
|
||||
--error-border: hsl(var(--border));
|
||||
--error-text: hsl(358, 100%, 81%);
|
||||
|
||||
/* Old colors, preserved for reference
|
||||
--success-bg: hsl(150, 100%, 6%);
|
||||
--success-border: hsl(147, 100%, 12%);
|
||||
--success-text: hsl(150, 86%, 65%);
|
||||
@@ -466,7 +502,7 @@
|
||||
|
||||
--error-bg: hsl(358, 76%, 10%);
|
||||
--error-border: hsl(357, 89%, 16%);
|
||||
--error-text: hsl(358, 100%, 81%);
|
||||
--error-text: hsl(358, 100%, 81%); */
|
||||
}
|
||||
|
||||
[data-rich-colors='true'][data-sonner-toast][data-type='success'] {
|
||||
@@ -541,7 +577,7 @@
|
||||
|
||||
.sonner-loading-bar {
|
||||
animation: sonner-spin 1.2s linear infinite;
|
||||
background: var(--gray11);
|
||||
background: hsl(var(--muted-foreground));
|
||||
border-radius: 6px;
|
||||
height: 8%;
|
||||
left: -10%;
|
||||
@@ -662,4 +698,11 @@
|
||||
.sonner-loader[data-visible='false'] {
|
||||
opacity: 0;
|
||||
transform: scale(0.8) translate(-50%, -50%);
|
||||
}
|
||||
|
||||
/* Override Unraid webgui docker icon styles on sonner containers */
|
||||
[data-sonner-toast] [data-icon]:before,
|
||||
[data-sonner-toast] .fa-docker:before {
|
||||
font-family: inherit !important;
|
||||
content: '' !important;
|
||||
}
|
||||
96
@tailwind-shared/theme-variants.css
Normal file
96
@tailwind-shared/theme-variants.css
Normal file
@@ -0,0 +1,96 @@
|
||||
/**
|
||||
* Tailwind v4 Theme Variants
|
||||
* Defines theme-specific CSS variables that can be switched via classes
|
||||
* These are applied dynamically based on the theme selected in GraphQL
|
||||
*/
|
||||
|
||||
/* Default/White Theme */
|
||||
:root,
|
||||
.theme-white {
|
||||
--header-text-primary: #ffffff;
|
||||
--header-text-secondary: #999999;
|
||||
--header-background-color: #1c1b1b;
|
||||
--header-gradient-start: rgba(28, 27, 27, 0);
|
||||
--header-gradient-end: rgba(28, 27, 27, 0.7);
|
||||
--ui-border-muted: hsl(240 5% 20%);
|
||||
--color-border: #383735;
|
||||
--color-alpha: #ff8c2f;
|
||||
--color-beta: #1c1b1b;
|
||||
--color-gamma: #ffffff;
|
||||
--color-gamma-opaque: rgba(255, 255, 255, 0.3);
|
||||
}
|
||||
|
||||
/* Black Theme */
|
||||
.theme-black,
|
||||
.theme-black.dark {
|
||||
--header-text-primary: #1c1b1b;
|
||||
--header-text-secondary: #999999;
|
||||
--header-background-color: #f2f2f2;
|
||||
--header-gradient-start: rgba(242, 242, 242, 0);
|
||||
--header-gradient-end: rgba(242, 242, 242, 0.7);
|
||||
--ui-border-muted: hsl(240 5.9% 90%);
|
||||
--color-border: #e0e0e0;
|
||||
--color-alpha: #ff8c2f;
|
||||
--color-beta: #f2f2f2;
|
||||
--color-gamma: #1c1b1b;
|
||||
--color-gamma-opaque: rgba(28, 27, 27, 0.3);
|
||||
}
|
||||
|
||||
/* Gray Theme */
|
||||
.theme-gray {
|
||||
--header-text-primary: #ffffff;
|
||||
--header-text-secondary: #999999;
|
||||
--header-background-color: #1c1b1b;
|
||||
--header-gradient-start: rgba(28, 27, 27, 0);
|
||||
--header-gradient-end: rgba(28, 27, 27, 0.7);
|
||||
--ui-border-muted: hsl(240 5% 25%);
|
||||
--color-border: #383735;
|
||||
--color-alpha: #ff8c2f;
|
||||
--color-beta: #383735;
|
||||
--color-gamma: #ffffff;
|
||||
--color-gamma-opaque: rgba(255, 255, 255, 0.3);
|
||||
}
|
||||
|
||||
/* Azure Theme */
|
||||
.theme-azure {
|
||||
--header-text-primary: #1c1b1b;
|
||||
--header-text-secondary: #999999;
|
||||
--header-background-color: #f2f2f2;
|
||||
--header-gradient-start: rgba(242, 242, 242, 0);
|
||||
--header-gradient-end: rgba(242, 242, 242, 0.7);
|
||||
--ui-border-muted: hsl(210 40% 80%);
|
||||
--color-border: #5a8bb8;
|
||||
--color-alpha: #ff8c2f;
|
||||
--color-beta: #e7f2f8;
|
||||
--color-gamma: #336699;
|
||||
--color-gamma-opaque: rgba(51, 102, 153, 0.3);
|
||||
}
|
||||
|
||||
/* Dark Mode Overrides */
|
||||
.dark {
|
||||
--ui-border-muted: hsl(240 5% 20%);
|
||||
--color-border: #383735;
|
||||
}
|
||||
|
||||
/*
|
||||
* Dynamic color variables for user overrides from GraphQL
|
||||
* These are set via JavaScript and override the theme defaults
|
||||
*/
|
||||
.has-custom-header-text {
|
||||
--header-text-primary: var(--custom-header-text-primary);
|
||||
--color-header-text-primary: var(--custom-header-text-primary);
|
||||
}
|
||||
|
||||
.has-custom-header-meta {
|
||||
--header-text-secondary: var(--custom-header-text-secondary);
|
||||
--color-header-text-secondary: var(--custom-header-text-secondary);
|
||||
}
|
||||
|
||||
.has-custom-header-bg {
|
||||
--header-background-color: var(--custom-header-background-color);
|
||||
--color-header-background: var(--custom-header-background-color);
|
||||
--header-gradient-start: var(--custom-header-gradient-start);
|
||||
--header-gradient-end: var(--custom-header-gradient-end);
|
||||
--color-header-gradient-start: var(--custom-header-gradient-start);
|
||||
--color-header-gradient-end: var(--custom-header-gradient-end);
|
||||
}
|
||||
@@ -84,23 +84,23 @@
|
||||
--color-primary-900: #7c2d12;
|
||||
--color-primary-950: #431407;
|
||||
|
||||
/* Header colors */
|
||||
--color-header-text-primary: var(--header-text-primary);
|
||||
--color-header-text-secondary: var(--header-text-secondary);
|
||||
--color-header-background-color: var(--header-background-color);
|
||||
/* Header colors - defaults will be overridden by theme */
|
||||
--color-header-text-primary: var(--header-text-primary, #1c1c1c);
|
||||
--color-header-text-secondary: var(--header-text-secondary, #999999);
|
||||
--color-header-background: var(--header-background-color, #f2f2f2);
|
||||
|
||||
/* Legacy colors */
|
||||
--color-alpha: var(--color-alpha);
|
||||
--color-beta: var(--color-beta);
|
||||
--color-gamma: var(--color-gamma);
|
||||
--color-gamma-opaque: var(--color-gamma-opaque);
|
||||
--color-customgradient-start: var(--color-customgradient-start);
|
||||
--color-customgradient-end: var(--color-customgradient-end);
|
||||
/* Legacy colors - defaults (overridden by theme-variants.css) */
|
||||
--color-alpha: #ff8c2f;
|
||||
--color-beta: #f2f2f2;
|
||||
--color-gamma: #999999;
|
||||
--color-gamma-opaque: rgba(153, 153, 153, 0.5);
|
||||
--color-customgradient-start: rgba(242, 242, 242, 0);
|
||||
--color-customgradient-end: rgba(242, 242, 242, 0.85);
|
||||
|
||||
/* Gradients */
|
||||
--color-header-gradient-start: var(--header-gradient-start);
|
||||
--color-header-gradient-end: var(--header-gradient-end);
|
||||
--color-banner-gradient: var(--banner-gradient);
|
||||
/* Gradients - defaults (overridden by theme-variants.css) */
|
||||
--color-header-gradient-start: rgba(242, 242, 242, 0);
|
||||
--color-header-gradient-end: rgba(242, 242, 242, 0.85);
|
||||
--color-banner-gradient: none;
|
||||
|
||||
/* Font sizes */
|
||||
--font-10px: 10px;
|
||||
@@ -167,6 +167,27 @@
|
||||
--max-width-800px: 800px;
|
||||
--max-width-1024px: 1024px;
|
||||
|
||||
/* Container sizes adjusted for 10px base font size (1.6x scale) */
|
||||
--container-xs: 32rem;
|
||||
--container-sm: 38.4rem;
|
||||
--container-md: 44.8rem;
|
||||
--container-lg: 51.2rem;
|
||||
--container-xl: 57.6rem;
|
||||
--container-2xl: 67.2rem;
|
||||
--container-3xl: 76.8rem;
|
||||
--container-4xl: 89.6rem;
|
||||
--container-5xl: 102.4rem;
|
||||
--container-6xl: 115.2rem;
|
||||
--container-7xl: 128rem;
|
||||
|
||||
/* Extended width scale for max-w-* utilities */
|
||||
--width-5xl: 102.4rem;
|
||||
--width-6xl: 115.2rem;
|
||||
--width-7xl: 128rem;
|
||||
--width-8xl: 140.8rem;
|
||||
--width-9xl: 153.6rem;
|
||||
--width-10xl: 166.4rem;
|
||||
|
||||
/* Animations */
|
||||
--animate-mark-2: mark-2 1.5s ease infinite;
|
||||
--animate-mark-3: mark-3 1.5s ease infinite;
|
||||
|
||||
@@ -157,4 +157,7 @@ Enables GraphQL playground at `http://tower.local/graphql`
|
||||
|
||||
- We are using tailwind v4 we do not need a tailwind config anymore
|
||||
- always search the internet for tailwind v4 documentation when making tailwind related style changes
|
||||
- never run or restart the API server or web server. I will handle the lifecylce, simply wait and ask me to do this for you
|
||||
- never run or restart the API server or web server. I will handle the lifecycle, simply wait and ask me to do this for you
|
||||
- Never use the `any` type. Always prefer proper typing
|
||||
- Avoid using casting whenever possible, prefer proper typing from the start
|
||||
- **IMPORTANT:** cache-manager v7 expects TTL values in **milliseconds**, not seconds. Always use milliseconds when setting cache TTL (e.g., 600000 for 10 minutes, not 600)
|
||||
|
||||
114
api/CHANGELOG.md
114
api/CHANGELOG.md
@@ -1,5 +1,119 @@
|
||||
# Changelog
|
||||
|
||||
## [4.20.1](https://github.com/unraid/api/compare/v4.20.0...v4.20.1) (2025-09-09)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* adjust header styles to fix flashing and width issues - thanks ZarZ ([4759b3d](https://github.com/unraid/api/commit/4759b3d0b3fb6bc71636f75f807cd6f4f62305d1))
|
||||
|
||||
## [4.20.0](https://github.com/unraid/api/compare/v4.19.1...v4.20.0) (2025-09-08)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **disks:** add isSpinning field to Disk type ([#1527](https://github.com/unraid/api/issues/1527)) ([193be3d](https://github.com/unraid/api/commit/193be3df3672514be9904e3d4fbdff776470afc0))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* better component loading to prevent per-page strange behavior ([095c222](https://github.com/unraid/api/commit/095c2221c94f144f8ad410a69362b15803765531))
|
||||
* **deps:** pin dependencies ([#1669](https://github.com/unraid/api/issues/1669)) ([413db4b](https://github.com/unraid/api/commit/413db4bd30a06aa69d3ca86e793782854f822589))
|
||||
* **plugin:** add fallback for unraid-api stop in deprecation cleanup ([#1668](https://github.com/unraid/api/issues/1668)) ([797bf50](https://github.com/unraid/api/commit/797bf50ec702ebc8244ff71a8ef1a80ea5cd2169))
|
||||
* prepend 'v' to API version in workflow dispatch inputs ([f0cffbd](https://github.com/unraid/api/commit/f0cffbdc7ac36e7037ab60fe9dddbb2cab4a5e10))
|
||||
* progress frame background color fix ([#1672](https://github.com/unraid/api/issues/1672)) ([785f1f5](https://github.com/unraid/api/commit/785f1f5eb1a1cc8b41f6eb502e4092d149cfbd80))
|
||||
* properly override header values ([#1673](https://github.com/unraid/api/issues/1673)) ([aecf70f](https://github.com/unraid/api/commit/aecf70ffad60c83074347d3d6ec23f73acbd1aee))
|
||||
|
||||
## [4.19.1](https://github.com/unraid/api/compare/v4.19.0...v4.19.1) (2025-09-05)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* custom path detection to fix setup issues ([#1664](https://github.com/unraid/api/issues/1664)) ([2ecdb99](https://github.com/unraid/api/commit/2ecdb99052f39d89af21bbe7ad3f80b83bb1eaa9))
|
||||
|
||||
## [4.19.0](https://github.com/unraid/api/compare/v4.18.2...v4.19.0) (2025-09-04)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* mount vue apps, not web components ([#1639](https://github.com/unraid/api/issues/1639)) ([88087d5](https://github.com/unraid/api/commit/88087d5201992298cdafa791d5d1b5bb23dcd72b))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* api version json response ([#1653](https://github.com/unraid/api/issues/1653)) ([292bc0f](https://github.com/unraid/api/commit/292bc0fc810a0d0f0cce6813b0631ff25099cc05))
|
||||
* enhance DOM validation and cleanup in vue-mount-app ([6cf7c88](https://github.com/unraid/api/commit/6cf7c88242f2f4fe9f83871560039767b5b90273))
|
||||
* enhance getKeyFile function to handle missing key file gracefully ([#1659](https://github.com/unraid/api/issues/1659)) ([728b38a](https://github.com/unraid/api/commit/728b38ac11faeacd39ce9d0157024ad140e29b36))
|
||||
* info alert docker icon ([#1661](https://github.com/unraid/api/issues/1661)) ([239cdd6](https://github.com/unraid/api/commit/239cdd6133690699348e61f68e485d2b54fdcbdb))
|
||||
* oidc cache busting issues fixed ([#1656](https://github.com/unraid/api/issues/1656)) ([e204eb8](https://github.com/unraid/api/commit/e204eb80a00ab9242e3dca4ccfc3e1b55a7694b7))
|
||||
* **plugin:** restore cleanup behavior for unsupported unraid versions ([#1658](https://github.com/unraid/api/issues/1658)) ([534a077](https://github.com/unraid/api/commit/534a07788b76de49e9ba14059a9aed0bf16e02ca))
|
||||
* UnraidToaster component and update dialog close button ([#1657](https://github.com/unraid/api/issues/1657)) ([44774d0](https://github.com/unraid/api/commit/44774d0acdd25aa33cb60a5d0b4f80777f4068e5))
|
||||
* vue mounting logic with tests ([#1651](https://github.com/unraid/api/issues/1651)) ([33774aa](https://github.com/unraid/api/commit/33774aa596124a031a7452b62ca4c43743a09951))
|
||||
|
||||
## [4.18.2](https://github.com/unraid/api/compare/v4.18.1...v4.18.2) (2025-09-03)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* add missing CPU guest metrics to CPU responses ([#1644](https://github.com/unraid/api/issues/1644)) ([99dbad5](https://github.com/unraid/api/commit/99dbad57d55a256f5f3f850f9a47a6eaa6348065))
|
||||
* **plugin:** raise minimum unraid os version to 6.12.15 ([#1649](https://github.com/unraid/api/issues/1649)) ([bc15bd3](https://github.com/unraid/api/commit/bc15bd3d7008acb416ac3c6fb1f4724c685ec7e7))
|
||||
* update GitHub Actions token for workflow trigger ([4d8588b](https://github.com/unraid/api/commit/4d8588b17331afa45ba8caf84fcec8c0ea03591f))
|
||||
* update OIDC URL validation and add tests ([#1646](https://github.com/unraid/api/issues/1646)) ([c7c3bb5](https://github.com/unraid/api/commit/c7c3bb57ea482633a7acff064b39fbc8d4e07213))
|
||||
* use shared bg & border color for styled toasts ([#1647](https://github.com/unraid/api/issues/1647)) ([7c3aee8](https://github.com/unraid/api/commit/7c3aee8f3f9ba82ae8c8ed3840c20ab47f3cb00f))
|
||||
|
||||
## [4.18.1](https://github.com/unraid/api/compare/v4.18.0...v4.18.1) (2025-09-03)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* OIDC and API Key management issues ([#1642](https://github.com/unraid/api/issues/1642)) ([0fe2c2c](https://github.com/unraid/api/commit/0fe2c2c1c85dcc547e4b1217a3b5636d7dd6d4b4))
|
||||
* rm redundant emission to `$HOME/.pm2/logs` ([#1640](https://github.com/unraid/api/issues/1640)) ([a8e4119](https://github.com/unraid/api/commit/a8e4119270868a1dabccd405853a7340f8dcd8a5))
|
||||
|
||||
## [4.18.0](https://github.com/unraid/api/compare/v4.17.0...v4.18.0) (2025-09-02)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **api:** enhance OIDC redirect URI handling in service and tests ([#1618](https://github.com/unraid/api/issues/1618)) ([4e945f5](https://github.com/unraid/api/commit/4e945f5f56ce059eb275a9576caf3194a5df8a90))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* api key creation cli ([#1637](https://github.com/unraid/api/issues/1637)) ([c147a6b](https://github.com/unraid/api/commit/c147a6b5075969e77798210c4a5cfd1fa5b96ae3))
|
||||
* **cli:** support `--log-level` for `start` and `restart` cmds ([#1623](https://github.com/unraid/api/issues/1623)) ([a1ee915](https://github.com/unraid/api/commit/a1ee915ca52e5a063eccf8facbada911a63f37f6))
|
||||
* confusing server -> status query ([#1635](https://github.com/unraid/api/issues/1635)) ([9d42b36](https://github.com/unraid/api/commit/9d42b36f74274cad72490da5152fdb98fdc5b89b))
|
||||
* use unraid css variables in sonner ([#1634](https://github.com/unraid/api/issues/1634)) ([26a95af](https://github.com/unraid/api/commit/26a95af9539d05a837112d62dc6b7dd46761c83f))
|
||||
|
||||
## [4.17.0](https://github.com/unraid/api/compare/v4.16.0...v4.17.0) (2025-08-27)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add tailwind class sort plugin ([#1562](https://github.com/unraid/api/issues/1562)) ([ab11e7f](https://github.com/unraid/api/commit/ab11e7ff7ff74da1f1cd5e49938459d00bfc846b))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* cleanup obsoleted legacy api keys on api startup (cli / connect) ([#1630](https://github.com/unraid/api/issues/1630)) ([6469d00](https://github.com/unraid/api/commit/6469d002b7b18e49c77ee650a4255974ab43e790))
|
||||
|
||||
## [4.16.0](https://github.com/unraid/api/compare/v4.15.1...v4.16.0) (2025-08-27)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add `parityCheckStatus` field to `array` query ([#1611](https://github.com/unraid/api/issues/1611)) ([c508366](https://github.com/unraid/api/commit/c508366702b9fa20d9ed05559fe73da282116aa6))
|
||||
* generated UI API key management + OAuth-like API Key Flows ([#1609](https://github.com/unraid/api/issues/1609)) ([674323f](https://github.com/unraid/api/commit/674323fd87bbcc55932e6b28f6433a2de79b7ab0))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **connect:** clear `wanport` upon disabling remote access ([#1624](https://github.com/unraid/api/issues/1624)) ([9df6a3f](https://github.com/unraid/api/commit/9df6a3f5ebb0319aa7e3fe3be6159d39ec6f587f))
|
||||
* **connect:** valid LAN FQDN while remote access is enabled ([#1625](https://github.com/unraid/api/issues/1625)) ([aa58888](https://github.com/unraid/api/commit/aa588883cc2e2fe4aa4aea1d035236c888638f5b))
|
||||
* correctly parse periods in share names from ini file ([#1629](https://github.com/unraid/api/issues/1629)) ([7d67a40](https://github.com/unraid/api/commit/7d67a404333a38d6e1ba5c3febf02be8b1b71901))
|
||||
* **rc.unraid-api:** remove profile sourcing ([#1622](https://github.com/unraid/api/issues/1622)) ([6947b5d](https://github.com/unraid/api/commit/6947b5d4aff70319116eb65cf4c639444f3749e9))
|
||||
* remove unused api key calls ([#1628](https://github.com/unraid/api/issues/1628)) ([9cd0d6a](https://github.com/unraid/api/commit/9cd0d6ac658475efa25683ef6e3f2e1d68f7e903))
|
||||
* retry VMs init for up to 2 min ([#1612](https://github.com/unraid/api/issues/1612)) ([b2e7801](https://github.com/unraid/api/commit/b2e78012384e6b3f2630341281fc811026be23b9))
|
||||
|
||||
## [4.15.1](https://github.com/unraid/api/compare/v4.15.0...v4.15.1) (2025-08-20)
|
||||
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"version": "4.15.1",
|
||||
"version": "4.19.1",
|
||||
"extraOrigins": [],
|
||||
"sandbox": true,
|
||||
"ssoSubIds": [],
|
||||
|
||||
@@ -17,5 +17,6 @@
|
||||
],
|
||||
"buttonText": "Login With Unraid.net"
|
||||
}
|
||||
]
|
||||
],
|
||||
"defaultAllowedOrigins": []
|
||||
}
|
||||
@@ -21,7 +21,14 @@ unraid-api start [--log-level <level>]
|
||||
Starts the Unraid API service.
|
||||
|
||||
Options:
|
||||
- `--log-level`: Set logging level (trace|debug|info|warn|error)
|
||||
|
||||
- `--log-level`: Set logging level (trace|debug|info|warn|error|fatal)
|
||||
|
||||
Alternative: You can also set the log level using the `LOG_LEVEL` environment variable:
|
||||
|
||||
```bash
|
||||
LOG_LEVEL=trace unraid-api start
|
||||
```
|
||||
|
||||
### Stop
|
||||
|
||||
@@ -36,11 +43,21 @@ Stops the Unraid API service.
|
||||
### Restart
|
||||
|
||||
```bash
|
||||
unraid-api restart
|
||||
unraid-api restart [--log-level <level>]
|
||||
```
|
||||
|
||||
Restarts the Unraid API service.
|
||||
|
||||
Options:
|
||||
|
||||
- `--log-level`: Set logging level (trace|debug|info|warn|error|fatal)
|
||||
|
||||
Alternative: You can also set the log level using the `LOG_LEVEL` environment variable:
|
||||
|
||||
```bash
|
||||
LOG_LEVEL=trace unraid-api restart
|
||||
```
|
||||
|
||||
### Logs
|
||||
|
||||
```bash
|
||||
|
||||
252
api/docs/public/programmatic-api-key-management.md
Normal file
252
api/docs/public/programmatic-api-key-management.md
Normal file
@@ -0,0 +1,252 @@
|
||||
---
|
||||
title: Programmatic API Key Management
|
||||
description: Create, use, and delete API keys programmatically for automated workflows
|
||||
sidebar_position: 4
|
||||
---
|
||||
|
||||
# Programmatic API Key Management
|
||||
|
||||
This guide explains how to create, use, and delete API keys programmatically using the Unraid API CLI, enabling automated workflows and scripts.
|
||||
|
||||
## Overview
|
||||
|
||||
The `unraid-api apikey` command supports both interactive and non-interactive modes, making it suitable for:
|
||||
|
||||
- Automated deployment scripts
|
||||
- CI/CD pipelines
|
||||
- Temporary access provisioning
|
||||
- Infrastructure as code workflows
|
||||
|
||||
:::tip[Quick Start]
|
||||
Jump to the [Complete Workflow Example](#complete-workflow-example) to see everything in action.
|
||||
:::
|
||||
|
||||
## Creating API Keys Programmatically
|
||||
|
||||
### Basic Creation with JSON Output
|
||||
|
||||
Use the `--json` flag to get machine-readable output:
|
||||
|
||||
```bash
|
||||
unraid-api apikey --create --name "workflow key" --roles ADMIN --json
|
||||
```
|
||||
|
||||
**Output:**
|
||||
|
||||
```json
|
||||
{
|
||||
"key": "your-generated-api-key-here",
|
||||
"name": "workflow key",
|
||||
"id": "generated-uuid"
|
||||
}
|
||||
```
|
||||
|
||||
### Advanced Creation with Permissions
|
||||
|
||||
```bash
|
||||
unraid-api apikey --create \
|
||||
--name "limited access key" \
|
||||
--permissions "DOCKER:READ_ANY,ARRAY:READ_ANY" \
|
||||
--description "Read-only access for monitoring" \
|
||||
--json
|
||||
```
|
||||
|
||||
### Handling Existing Keys
|
||||
|
||||
If a key with the same name exists, use `--overwrite`:
|
||||
|
||||
```bash
|
||||
unraid-api apikey --create --name "existing key" --roles ADMIN --overwrite --json
|
||||
```
|
||||
|
||||
:::warning[Key Replacement]
|
||||
The `--overwrite` flag will permanently replace the existing key. The old key will be immediately invalidated.
|
||||
:::
|
||||
|
||||
## Deleting API Keys Programmatically
|
||||
|
||||
### Non-Interactive Deletion
|
||||
|
||||
Delete a key by name without prompts:
|
||||
|
||||
```bash
|
||||
unraid-api apikey --delete --name "workflow key"
|
||||
```
|
||||
|
||||
**Output:**
|
||||
|
||||
```
|
||||
Successfully deleted 1 API key
|
||||
```
|
||||
|
||||
### JSON Output for Deletion
|
||||
|
||||
Use `--json` flag for machine-readable delete confirmation:
|
||||
|
||||
```bash
|
||||
unraid-api apikey --delete --name "workflow key" --json
|
||||
```
|
||||
|
||||
**Success Output:**
|
||||
|
||||
```json
|
||||
{
|
||||
"deleted": 1,
|
||||
"keys": [
|
||||
{
|
||||
"id": "generated-uuid",
|
||||
"name": "workflow key"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
**Error Output:**
|
||||
|
||||
```json
|
||||
{
|
||||
"deleted": 0,
|
||||
"error": "No API key found with name: nonexistent key"
|
||||
}
|
||||
```
|
||||
|
||||
### Error Handling
|
||||
|
||||
When the specified key doesn't exist:
|
||||
|
||||
```bash
|
||||
unraid-api apikey --delete --name "nonexistent key"
|
||||
# Output: No API keys found to delete
|
||||
```
|
||||
|
||||
**JSON Error Output:**
|
||||
|
||||
```json
|
||||
{
|
||||
"deleted": 0,
|
||||
"message": "No API keys found to delete"
|
||||
}
|
||||
```
|
||||
|
||||
## Complete Workflow Example
|
||||
|
||||
Here's a complete example for temporary access provisioning:
|
||||
|
||||
```bash
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# 1. Create temporary API key
|
||||
echo "Creating temporary API key..."
|
||||
KEY_DATA=$(unraid-api apikey --create \
|
||||
--name "temp deployment key" \
|
||||
--roles ADMIN \
|
||||
--description "Temporary key for deployment $(date)" \
|
||||
--json)
|
||||
|
||||
# 2. Extract the API key
|
||||
API_KEY=$(echo "$KEY_DATA" | jq -r '.key')
|
||||
echo "API key created successfully"
|
||||
|
||||
# 3. Use the key for operations
|
||||
echo "Configuring services..."
|
||||
curl -H "Authorization: Bearer $API_KEY" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{"provider": "azure", "clientId": "your-client-id"}' \
|
||||
http://localhost:3001/graphql
|
||||
|
||||
# 4. Clean up (always runs, even on error)
|
||||
trap 'echo "Cleaning up..."; unraid-api apikey --delete --name "temp deployment key"' EXIT
|
||||
|
||||
echo "Deployment completed successfully"
|
||||
```
|
||||
|
||||
## Command Reference
|
||||
|
||||
### Create Command Options
|
||||
|
||||
| Flag | Description | Example |
|
||||
| ----------------------- | ----------------------- | --------------------------------- |
|
||||
| `--name <name>` | Key name (required) | `--name "my key"` |
|
||||
| `--roles <roles>` | Comma-separated roles | `--roles ADMIN,VIEWER` |
|
||||
| `--permissions <perms>` | Resource:action pairs | `--permissions "DOCKER:READ_ANY"` |
|
||||
| `--description <desc>` | Key description | `--description "CI/CD key"` |
|
||||
| `--overwrite` | Replace existing key | `--overwrite` |
|
||||
| `--json` | Machine-readable output | `--json` |
|
||||
|
||||
### Available Roles
|
||||
|
||||
- `ADMIN` - Full system access
|
||||
- `CONNECT` - Unraid Connect features
|
||||
- `VIEWER` - Read-only access
|
||||
- `GUEST` - Limited access
|
||||
|
||||
### Available Resources and Actions
|
||||
|
||||
**Resources:** `ACTIVATION_CODE`, `API_KEY`, `ARRAY`, `CLOUD`, `CONFIG`, `CONNECT`, `CONNECT__REMOTE_ACCESS`, `CUSTOMIZATIONS`, `DASHBOARD`, `DISK`, `DISPLAY`, `DOCKER`, `FLASH`, `INFO`, `LOGS`, `ME`, `NETWORK`, `NOTIFICATIONS`, `ONLINE`, `OS`, `OWNER`, `PERMISSION`, `REGISTRATION`, `SERVERS`, `SERVICES`, `SHARE`, `VARS`, `VMS`, `WELCOME`
|
||||
|
||||
**Actions:** `CREATE_ANY`, `CREATE_OWN`, `READ_ANY`, `READ_OWN`, `UPDATE_ANY`, `UPDATE_OWN`, `DELETE_ANY`, `DELETE_OWN`
|
||||
|
||||
### Delete Command Options
|
||||
|
||||
| Flag | Description | Example |
|
||||
| --------------- | ------------------------ | ----------------- |
|
||||
| `--delete` | Enable delete mode | `--delete` |
|
||||
| `--name <name>` | Key to delete (optional) | `--name "my key"` |
|
||||
|
||||
**Note:** If `--name` is omitted, the command runs interactively.
|
||||
|
||||
## Best Practices
|
||||
|
||||
:::info[Security Best Practices]
|
||||
**Minimal Permissions**
|
||||
|
||||
- Use specific permissions instead of ADMIN role when possible
|
||||
- Example: `--permissions "DOCKER:READ_ANY"` instead of `--roles ADMIN`
|
||||
|
||||
**Key Lifecycle Management**
|
||||
|
||||
- Always clean up temporary keys after use
|
||||
- Store API keys securely (environment variables, secrets management)
|
||||
- Use descriptive names and descriptions for audit trails
|
||||
:::
|
||||
|
||||
### Error Handling
|
||||
|
||||
- Check exit codes (`$?`) after each command
|
||||
- Use `set -e` in bash scripts to fail fast
|
||||
- Implement proper cleanup with `trap`
|
||||
|
||||
### Key Naming
|
||||
|
||||
- Use descriptive names that include purpose and date
|
||||
- Names must contain only letters, numbers, and spaces
|
||||
- Unicode letters are supported
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
:::note[Common Error Messages]
|
||||
|
||||
**"API key name must contain only letters, numbers, and spaces"**
|
||||
|
||||
- **Solution:** Remove special characters like hyphens, underscores, or symbols
|
||||
|
||||
**"API key with name 'x' already exists"**
|
||||
|
||||
- **Solution:** Use `--overwrite` flag or choose a different name
|
||||
|
||||
**"Please add at least one role or permission to the key"**
|
||||
|
||||
- **Solution:** Specify either `--roles` or `--permissions` (or both)
|
||||
|
||||
:::
|
||||
|
||||
### Debug Mode
|
||||
|
||||
For troubleshooting, run with debug logging:
|
||||
|
||||
```bash
|
||||
LOG_LEVEL=debug unraid-api apikey --create --name "debug key" --roles ADMIN
|
||||
```
|
||||
@@ -13,7 +13,9 @@
|
||||
"watch": false,
|
||||
"interpreter": "/usr/local/bin/node",
|
||||
"ignore_watch": ["node_modules", "src", ".env.*", "myservers.cfg"],
|
||||
"log_file": "/var/log/graphql-api.log",
|
||||
"out_file": "/var/log/graphql-api.log",
|
||||
"error_file": "/var/log/graphql-api.log",
|
||||
"merge_logs": true,
|
||||
"kill_timeout": 10000
|
||||
}
|
||||
]
|
||||
|
||||
@@ -1361,6 +1361,12 @@ type CpuLoad {
|
||||
|
||||
"""The percentage of time the CPU spent servicing hardware interrupts."""
|
||||
percentIrq: Float!
|
||||
|
||||
"""The percentage of time the CPU spent running virtual machines (guest)."""
|
||||
percentGuest: Float!
|
||||
|
||||
"""The percentage of CPU time stolen by the hypervisor."""
|
||||
percentSteal: Float!
|
||||
}
|
||||
|
||||
type CpuUtilization implements Node {
|
||||
@@ -1798,6 +1804,8 @@ type Server implements Node {
|
||||
guid: String!
|
||||
apikey: String!
|
||||
name: String!
|
||||
|
||||
"""Whether this server is online or offline"""
|
||||
status: ServerStatus!
|
||||
wanip: String!
|
||||
lanip: String!
|
||||
@@ -1854,7 +1862,7 @@ type OidcProvider {
|
||||
"""
|
||||
OIDC issuer URL (e.g., https://accounts.google.com). Required for auto-discovery via /.well-known/openid-configuration
|
||||
"""
|
||||
issuer: String!
|
||||
issuer: String
|
||||
|
||||
"""
|
||||
OAuth2 authorization endpoint URL. If omitted, will be auto-discovered from issuer/.well-known/openid-configuration
|
||||
@@ -1907,6 +1915,16 @@ enum AuthorizationRuleMode {
|
||||
AND
|
||||
}
|
||||
|
||||
type OidcConfiguration {
|
||||
"""List of configured OIDC providers"""
|
||||
providers: [OidcProvider!]!
|
||||
|
||||
"""
|
||||
Default allowed redirect origins that apply to all OIDC providers (e.g., Tailscale domains)
|
||||
"""
|
||||
defaultAllowedOrigins: [String!]
|
||||
}
|
||||
|
||||
type OidcSessionValidation {
|
||||
valid: Boolean!
|
||||
username: String
|
||||
@@ -2307,8 +2325,6 @@ type Query {
|
||||
getApiKeyCreationFormSchema: ApiKeyFormSettings!
|
||||
config: Config!
|
||||
flash: Flash!
|
||||
logFiles: [LogFile!]!
|
||||
logFile(path: String!, lines: Int, startLine: Int): LogFileContent!
|
||||
me: UserAccount!
|
||||
|
||||
"""Get all notifications"""
|
||||
@@ -2335,6 +2351,8 @@ type Query {
|
||||
disk(id: PrefixedID!): Disk!
|
||||
rclone: RCloneBackupSettings!
|
||||
info: Info!
|
||||
logFiles: [LogFile!]!
|
||||
logFile(path: String!, lines: Int, startLine: Int): LogFileContent!
|
||||
settings: Settings!
|
||||
isSSOEnabled: Boolean!
|
||||
|
||||
@@ -2347,6 +2365,9 @@ type Query {
|
||||
"""Get a specific OIDC provider by ID"""
|
||||
oidcProvider(id: PrefixedID!): OidcProvider
|
||||
|
||||
"""Get the full OIDC configuration (admin only)"""
|
||||
oidcConfiguration: OidcConfiguration!
|
||||
|
||||
"""Validate an OIDC session token (internal use for CLI validation)"""
|
||||
validateOidcSession(token: String!): OidcSessionValidation!
|
||||
metrics: Metrics!
|
||||
@@ -2590,13 +2611,13 @@ input AccessUrlInput {
|
||||
}
|
||||
|
||||
type Subscription {
|
||||
logFile(path: String!): LogFileContent!
|
||||
notificationAdded: Notification!
|
||||
notificationsOverview: NotificationOverview!
|
||||
ownerSubscription: Owner!
|
||||
serversSubscription: Server!
|
||||
parityHistorySubscription: ParityCheck!
|
||||
arraySubscription: UnraidArray!
|
||||
logFile(path: String!): LogFileContent!
|
||||
systemMetricsCpu: CpuUtilization!
|
||||
systemMetricsMemory: MemoryUtilization!
|
||||
upsUpdates: UPSDevice!
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@unraid/api",
|
||||
"version": "4.15.1",
|
||||
"version": "4.20.1",
|
||||
"main": "src/cli/index.ts",
|
||||
"type": "module",
|
||||
"corepack": {
|
||||
@@ -10,7 +10,7 @@
|
||||
"author": "Lime Technology, Inc. <unraid.net>",
|
||||
"license": "GPL-2.0-or-later",
|
||||
"engines": {
|
||||
"pnpm": "10.14.0"
|
||||
"pnpm": "10.15.0"
|
||||
},
|
||||
"scripts": {
|
||||
"// Development": "",
|
||||
@@ -51,7 +51,7 @@
|
||||
"unraid-api": "dist/cli.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"@apollo/client": "3.13.9",
|
||||
"@apollo/client": "3.14.0",
|
||||
"@apollo/server": "4.12.2",
|
||||
"@as-integrations/fastify": "2.1.1",
|
||||
"@fastify/cookie": "11.0.2",
|
||||
@@ -82,7 +82,7 @@
|
||||
"atomically": "2.0.3",
|
||||
"bycontract": "2.0.11",
|
||||
"bytes": "3.1.2",
|
||||
"cache-manager": "7.1.1",
|
||||
"cache-manager": "7.2.0",
|
||||
"cacheable-lookup": "7.0.0",
|
||||
"camelcase-keys": "9.1.3",
|
||||
"casbin": "5.38.0",
|
||||
@@ -99,6 +99,7 @@
|
||||
"diff": "8.0.2",
|
||||
"dockerode": "4.0.7",
|
||||
"dotenv": "17.2.1",
|
||||
"escape-html": "1.0.3",
|
||||
"execa": "9.6.0",
|
||||
"exit-hook": "4.0.0",
|
||||
"fastify": "5.5.0",
|
||||
@@ -113,24 +114,26 @@
|
||||
"graphql-subscriptions": "3.0.0",
|
||||
"graphql-tag": "2.12.6",
|
||||
"graphql-ws": "6.0.6",
|
||||
"i18next": "^25.5.2",
|
||||
"ini": "5.0.0",
|
||||
"ip": "2.0.1",
|
||||
"jose": "6.0.12",
|
||||
"jose": "6.0.13",
|
||||
"json-bigint-patch": "0.0.8",
|
||||
"lodash-es": "4.17.21",
|
||||
"multi-ini": "2.3.2",
|
||||
"mustache": "4.2.0",
|
||||
"nest-authz": "2.17.0",
|
||||
"nest-commander": "3.18.0",
|
||||
"nest-commander": "3.19.0",
|
||||
"nestjs-i18n": "^10.5.1",
|
||||
"nestjs-pino": "4.4.0",
|
||||
"node-cache": "5.1.2",
|
||||
"node-window-polyfill": "1.0.4",
|
||||
"openid-client": "6.6.2",
|
||||
"openid-client": "6.6.4",
|
||||
"p-retry": "6.2.1",
|
||||
"passport-custom": "1.1.1",
|
||||
"passport-http-header-strategy": "1.1.0",
|
||||
"path-type": "6.0.0",
|
||||
"pino": "9.8.0",
|
||||
"pino": "9.9.0",
|
||||
"pino-http": "10.5.0",
|
||||
"pino-pretty": "13.1.1",
|
||||
"pm2": "6.0.8",
|
||||
@@ -138,8 +141,8 @@
|
||||
"rxjs": "7.8.2",
|
||||
"semver": "7.7.2",
|
||||
"strftime": "0.10.3",
|
||||
"systeminformation": "5.27.7",
|
||||
"undici": "7.13.0",
|
||||
"systeminformation": "5.27.8",
|
||||
"undici": "7.15.0",
|
||||
"uuid": "11.1.0",
|
||||
"ws": "8.18.3",
|
||||
"zen-observable-ts": "1.1.0",
|
||||
@@ -154,7 +157,7 @@
|
||||
}
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/js": "9.33.0",
|
||||
"@eslint/js": "9.34.0",
|
||||
"@graphql-codegen/add": "5.0.3",
|
||||
"@graphql-codegen/cli": "5.0.7",
|
||||
"@graphql-codegen/fragment-matcher": "5.1.0",
|
||||
@@ -164,17 +167,17 @@
|
||||
"@graphql-codegen/typescript-operations": "4.6.1",
|
||||
"@graphql-codegen/typescript-resolvers": "4.5.1",
|
||||
"@graphql-typed-document-node/core": "3.2.0",
|
||||
"@ianvs/prettier-plugin-sort-imports": "4.6.1",
|
||||
"@ianvs/prettier-plugin-sort-imports": "4.6.3",
|
||||
"@nestjs/testing": "11.1.6",
|
||||
"@originjs/vite-plugin-commonjs": "1.0.3",
|
||||
"@rollup/plugin-node-resolve": "16.0.1",
|
||||
"@swc/core": "1.13.3",
|
||||
"@swc/core": "1.13.5",
|
||||
"@types/async-exit-hook": "2.0.2",
|
||||
"@types/bytes": "3.1.5",
|
||||
"@types/cli-table": "0.3.4",
|
||||
"@types/command-exists": "1.2.3",
|
||||
"@types/cors": "2.8.19",
|
||||
"@types/dockerode": "3.3.42",
|
||||
"@types/dockerode": "3.3.43",
|
||||
"@types/graphql-fields": "1.3.9",
|
||||
"@types/graphql-type-uuid": "0.2.6",
|
||||
"@types/ini": "4.1.1",
|
||||
@@ -182,7 +185,7 @@
|
||||
"@types/lodash": "4.17.20",
|
||||
"@types/lodash-es": "4.17.12",
|
||||
"@types/mustache": "4.2.6",
|
||||
"@types/node": "22.17.1",
|
||||
"@types/node": "22.18.0",
|
||||
"@types/pify": "6.1.0",
|
||||
"@types/semver": "7.7.0",
|
||||
"@types/sendmail": "1.4.7",
|
||||
@@ -191,28 +194,28 @@
|
||||
"@types/supertest": "6.0.3",
|
||||
"@types/uuid": "10.0.0",
|
||||
"@types/ws": "8.18.1",
|
||||
"@types/wtfnode": "0.7.3",
|
||||
"@types/wtfnode": "0.10.0",
|
||||
"@vitest/coverage-v8": "3.2.4",
|
||||
"@vitest/ui": "3.2.4",
|
||||
"eslint": "9.33.0",
|
||||
"eslint": "9.34.0",
|
||||
"eslint-plugin-import": "2.32.0",
|
||||
"eslint-plugin-no-relative-import-paths": "1.6.1",
|
||||
"eslint-plugin-prettier": "5.5.4",
|
||||
"jiti": "2.5.1",
|
||||
"nodemon": "3.1.10",
|
||||
"prettier": "3.6.2",
|
||||
"rollup-plugin-node-externals": "8.0.1",
|
||||
"rollup-plugin-node-externals": "8.1.0",
|
||||
"supertest": "7.1.4",
|
||||
"tsx": "4.20.3",
|
||||
"tsx": "4.20.5",
|
||||
"type-fest": "4.41.0",
|
||||
"typescript": "5.9.2",
|
||||
"typescript-eslint": "8.39.1",
|
||||
"unplugin-swc": "1.5.5",
|
||||
"vite": "7.1.1",
|
||||
"typescript-eslint": "8.41.0",
|
||||
"unplugin-swc": "1.5.7",
|
||||
"vite": "7.1.3",
|
||||
"vite-plugin-node": "7.0.0",
|
||||
"vite-tsconfig-paths": "5.1.4",
|
||||
"vitest": "3.2.4",
|
||||
"zx": "8.8.0"
|
||||
"zx": "8.8.1"
|
||||
},
|
||||
"overrides": {
|
||||
"eslint": {
|
||||
@@ -227,5 +230,5 @@
|
||||
}
|
||||
},
|
||||
"private": true,
|
||||
"packageManager": "pnpm@10.14.0"
|
||||
"packageManager": "pnpm@10.15.0"
|
||||
}
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
import { expect, test } from 'vitest';
|
||||
import { expect, test, vi } from 'vitest';
|
||||
|
||||
import { store } from '@app/store/index.js';
|
||||
import { FileLoadStatus, StateFileKey } from '@app/store/types.js';
|
||||
|
||||
import '@app/core/utils/misc/get-key-file.js';
|
||||
import '@app/store/modules/emhttp.js';
|
||||
|
||||
vi.mock('fs/promises');
|
||||
|
||||
test('Before loading key returns null', async () => {
|
||||
const { getKeyFile } = await import('@app/core/utils/misc/get-key-file.js');
|
||||
const { status } = store.getState().registration;
|
||||
@@ -48,21 +49,70 @@ test('Returns empty key if key location is empty', async () => {
|
||||
await expect(getKeyFile()).resolves.toBe('');
|
||||
});
|
||||
|
||||
test(
|
||||
'Returns decoded key file if key location exists',
|
||||
async () => {
|
||||
const { getKeyFile } = await import('@app/core/utils/misc/get-key-file.js');
|
||||
const { loadStateFiles } = await import('@app/store/modules/emhttp.js');
|
||||
const { loadRegistrationKey } = await import('@app/store/modules/registration.js');
|
||||
// Load state files into store
|
||||
await store.dispatch(loadStateFiles());
|
||||
await store.dispatch(loadRegistrationKey());
|
||||
// Check if store has state files loaded
|
||||
const { status } = store.getState().registration;
|
||||
expect(status).toBe(FileLoadStatus.LOADED);
|
||||
await expect(getKeyFile()).resolves.toMatchInlineSnapshot(
|
||||
'"hVs1tLjvC9FiiQsIwIQ7G1KszAcexf0IneThhnmf22SB0dGs5WzRkqMiSMmt2DtR5HOXFUD32YyxuzGeUXmky3zKpSu6xhZNKVg5atGM1OfvkzHBMldI3SeBLuUFSgejLbpNUMdTrbk64JJdbzle4O8wiQgkIpAMIGxeYLwLBD4zHBcfyzq40QnxG--HcX6j25eE0xqa2zWj-j0b0rCAXahJV2a3ySCbPzr1MvfPRTVb0rr7KJ-25R592hYrz4H7Sc1B3p0lr6QUxHE6o7bcYrWKDRtIVoZ8SMPpd1_0gzYIcl5GsDFzFumTXUh8NEnl0Q8hwW1YE-tRc6Y_rrvd7w"'
|
||||
);
|
||||
},
|
||||
{ timeout: 10000 }
|
||||
);
|
||||
test('Returns empty string when key file does not exist (ENOENT)', async () => {
|
||||
const { readFile } = await import('fs/promises');
|
||||
|
||||
// Mock readFile to throw ENOENT error
|
||||
const readFileMock = vi.mocked(readFile);
|
||||
readFileMock.mockRejectedValueOnce(
|
||||
Object.assign(new Error('ENOENT: no such file or directory'), { code: 'ENOENT' })
|
||||
);
|
||||
|
||||
// Clear the module cache and re-import to get fresh module with mock
|
||||
vi.resetModules();
|
||||
const { getKeyFile } = await import('@app/core/utils/misc/get-key-file.js');
|
||||
const { updateEmhttpState } = await import('@app/store/modules/emhttp.js');
|
||||
const { store: freshStore } = await import('@app/store/index.js');
|
||||
|
||||
// Set key file location to a non-existent file
|
||||
freshStore.dispatch(
|
||||
updateEmhttpState({
|
||||
field: StateFileKey.var,
|
||||
state: {
|
||||
regFile: '/boot/config/Pro.key',
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
// Should return empty string when file doesn't exist
|
||||
await expect(getKeyFile()).resolves.toBe('');
|
||||
|
||||
// Clear mock
|
||||
readFileMock.mockReset();
|
||||
vi.resetModules();
|
||||
});
|
||||
|
||||
test('Returns decoded key file if key location exists', async () => {
|
||||
const { readFile } = await import('fs/promises');
|
||||
|
||||
// Mock a valid key file content
|
||||
const mockKeyContent =
|
||||
'hVs1tLjvC9FiiQsIwIQ7G1KszAcexf0IneThhnmf22SB0dGs5WzRkqMiSMmt2DtR5HOXFUD32YyxuzGeUXmky3zKpSu6xhZNKVg5atGM1OfvkzHBMldI3SeBLuUFSgejLbpNUMdTrbk64JJdbzle4O8wiQgkIpAMIGxeYLwLBD4zHBcfyzq40QnxG--HcX6j25eE0xqa2zWj-j0b0rCAXahJV2a3ySCbPzr1MvfPRTVb0rr7KJ-25R592hYrz4H7Sc1B3p0lr6QUxHE6o7bcYrWKDRtIVoZ8SMPpd1_0gzYIcl5GsDFzFumTXUh8NEnl0Q8hwW1YE-tRc6Y_rrvd7w==';
|
||||
const binaryContent = Buffer.from(mockKeyContent, 'base64').toString('binary');
|
||||
|
||||
const readFileMock = vi.mocked(readFile);
|
||||
readFileMock.mockResolvedValue(binaryContent);
|
||||
|
||||
// Clear the module cache and re-import to get fresh module with mock
|
||||
vi.resetModules();
|
||||
const { getKeyFile } = await import('@app/core/utils/misc/get-key-file.js');
|
||||
const { loadStateFiles } = await import('@app/store/modules/emhttp.js');
|
||||
const { loadRegistrationKey } = await import('@app/store/modules/registration.js');
|
||||
const { store: freshStore } = await import('@app/store/index.js');
|
||||
|
||||
// Load state files into store
|
||||
await freshStore.dispatch(loadStateFiles());
|
||||
await freshStore.dispatch(loadRegistrationKey());
|
||||
// Check if store has state files loaded
|
||||
const { status } = freshStore.getState().registration;
|
||||
expect(status).toBe(FileLoadStatus.LOADED);
|
||||
|
||||
const result = await getKeyFile();
|
||||
expect(result).toBe(
|
||||
'hVs1tLjvC9FiiQsIwIQ7G1KszAcexf0IneThhnmf22SB0dGs5WzRkqMiSMmt2DtR5HOXFUD32YyxuzGeUXmky3zKpSu6xhZNKVg5atGM1OfvkzHBMldI3SeBLuUFSgejLbpNUMdTrbk64JJdbzle4O8wiQgkIpAMIGxeYLwLBD4zHBcfyzq40QnxG--HcX6j25eE0xqa2zWj-j0b0rCAXahJV2a3ySCbPzr1MvfPRTVb0rr7KJ-25R592hYrz4H7Sc1B3p0lr6QUxHE6o7bcYrWKDRtIVoZ8SMPpd1_0gzYIcl5GsDFzFumTXUh8NEnl0Q8hwW1YE-tRc6Y_rrvd7w'
|
||||
);
|
||||
|
||||
// Clear mock
|
||||
readFileMock.mockReset();
|
||||
vi.resetModules();
|
||||
}, 10000);
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import { existsSync } from 'node:fs';
|
||||
import { homedir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
|
||||
import { execa } from 'execa';
|
||||
import pm2 from 'pm2';
|
||||
import { afterAll, afterEach, beforeAll, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
import { afterAll, afterEach, beforeAll, describe, expect, it } from 'vitest';
|
||||
|
||||
import { isUnraidApiRunning } from '@app/core/utils/pm2/unraid-api-running.js';
|
||||
|
||||
@@ -17,11 +18,6 @@ const TEST_PROCESS_NAME = 'test-unraid-api';
|
||||
// Shared PM2 connection state
|
||||
let pm2Connected = false;
|
||||
|
||||
// Helper function to run CLI command (assumes CLI is built)
|
||||
async function runCliCommand(command: string, options: any = {}) {
|
||||
return await execa('node', [CLI_PATH, command], options);
|
||||
}
|
||||
|
||||
// Helper to ensure PM2 connection is established
|
||||
async function ensurePM2Connection() {
|
||||
if (pm2Connected) return;
|
||||
@@ -57,7 +53,7 @@ async function deleteTestProcesses() {
|
||||
}
|
||||
|
||||
const processName = processNames[deletedCount];
|
||||
pm2.delete(processName, (deleteErr) => {
|
||||
pm2.delete(processName, () => {
|
||||
// Ignore errors, process might not exist
|
||||
deletedCount++;
|
||||
deleteNext();
|
||||
@@ -92,7 +88,7 @@ async function cleanupAllPM2Processes() {
|
||||
}
|
||||
|
||||
// Kill the daemon to ensure fresh state
|
||||
pm2.killDaemon((killErr) => {
|
||||
pm2.killDaemon(() => {
|
||||
pm2.disconnect();
|
||||
pm2Connected = false;
|
||||
// Small delay to let PM2 fully shutdown
|
||||
@@ -104,6 +100,9 @@ async function cleanupAllPM2Processes() {
|
||||
|
||||
describe.skipIf(!!process.env.CI)('PM2 integration tests', () => {
|
||||
beforeAll(async () => {
|
||||
// Set PM2_HOME to use home directory for testing (not /var/log)
|
||||
process.env.PM2_HOME = join(homedir(), '.pm2');
|
||||
|
||||
// Build the CLI if it doesn't exist (only for CLI tests)
|
||||
if (!existsSync(CLI_PATH)) {
|
||||
console.log('Building CLI for integration tests...');
|
||||
@@ -198,6 +197,13 @@ describe.skipIf(!!process.env.CI)('PM2 integration tests', () => {
|
||||
}, 30000);
|
||||
|
||||
it('should handle PM2 connection errors gracefully', async () => {
|
||||
// Disconnect PM2 first to ensure we're testing fresh connection
|
||||
await new Promise<void>((resolve) => {
|
||||
pm2.disconnect();
|
||||
pm2Connected = false;
|
||||
setTimeout(resolve, 100);
|
||||
});
|
||||
|
||||
// Set an invalid PM2_HOME to force connection failure
|
||||
const originalPM2Home = process.env.PM2_HOME;
|
||||
process.env.PM2_HOME = '/invalid/path/that/does/not/exist';
|
||||
|
||||
@@ -211,6 +211,7 @@ test('After init returns values from cfg file for all fields', { timeout: 30000
|
||||
"fsUsed": null,
|
||||
"id": "ST18000NM000J-2TV103_ZR585CPY",
|
||||
"idx": 0,
|
||||
"isSpinning": true,
|
||||
"name": "parity",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
@@ -235,6 +236,7 @@ test('After init returns values from cfg file for all fields', { timeout: 30000
|
||||
"fsUsed": 4116003021,
|
||||
"id": "ST18000NM000J-2TV103_ZR5B1W9X",
|
||||
"idx": 1,
|
||||
"isSpinning": true,
|
||||
"name": "disk1",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
@@ -259,6 +261,7 @@ test('After init returns values from cfg file for all fields', { timeout: 30000
|
||||
"fsUsed": 11904860828,
|
||||
"id": "WDC_WD120EDAZ-11F3RA0_5PJRD45C",
|
||||
"idx": 2,
|
||||
"isSpinning": true,
|
||||
"name": "disk2",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
@@ -283,6 +286,7 @@ test('After init returns values from cfg file for all fields', { timeout: 30000
|
||||
"fsUsed": 6478056481,
|
||||
"id": "WDC_WD120EMAZ-11BLFA0_5PH8BTYD",
|
||||
"idx": 3,
|
||||
"isSpinning": true,
|
||||
"name": "disk3",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
@@ -307,6 +311,7 @@ test('After init returns values from cfg file for all fields', { timeout: 30000
|
||||
"fsUsed": 137273827,
|
||||
"id": "Samsung_SSD_850_EVO_250GB_S2R5NX0H643734Z",
|
||||
"idx": 30,
|
||||
"isSpinning": true,
|
||||
"name": "cache",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
@@ -331,6 +336,7 @@ test('After init returns values from cfg file for all fields', { timeout: 30000
|
||||
"fsUsed": null,
|
||||
"id": "KINGSTON_SA2000M8250G_50026B7282669D9E",
|
||||
"idx": 31,
|
||||
"isSpinning": true,
|
||||
"name": "cache2",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
@@ -355,6 +361,7 @@ test('After init returns values from cfg file for all fields', { timeout: 30000
|
||||
"fsUsed": 851325,
|
||||
"id": "Cruzer",
|
||||
"idx": 32,
|
||||
"isSpinning": true,
|
||||
"name": "flash",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
|
||||
@@ -28,6 +28,7 @@ test('Returns parsed state file', async () => {
|
||||
"fsUsed": null,
|
||||
"id": "ST18000NM000J-2TV103_ZR585CPY",
|
||||
"idx": 0,
|
||||
"isSpinning": true,
|
||||
"name": "parity",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
@@ -52,6 +53,7 @@ test('Returns parsed state file', async () => {
|
||||
"fsUsed": 4116003021,
|
||||
"id": "ST18000NM000J-2TV103_ZR5B1W9X",
|
||||
"idx": 1,
|
||||
"isSpinning": true,
|
||||
"name": "disk1",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
@@ -76,6 +78,7 @@ test('Returns parsed state file', async () => {
|
||||
"fsUsed": 11904860828,
|
||||
"id": "WDC_WD120EDAZ-11F3RA0_5PJRD45C",
|
||||
"idx": 2,
|
||||
"isSpinning": true,
|
||||
"name": "disk2",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
@@ -100,6 +103,7 @@ test('Returns parsed state file', async () => {
|
||||
"fsUsed": 6478056481,
|
||||
"id": "WDC_WD120EMAZ-11BLFA0_5PH8BTYD",
|
||||
"idx": 3,
|
||||
"isSpinning": true,
|
||||
"name": "disk3",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
@@ -124,6 +128,7 @@ test('Returns parsed state file', async () => {
|
||||
"fsUsed": 137273827,
|
||||
"id": "Samsung_SSD_850_EVO_250GB_S2R5NX0H643734Z",
|
||||
"idx": 30,
|
||||
"isSpinning": true,
|
||||
"name": "cache",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
@@ -148,6 +153,7 @@ test('Returns parsed state file', async () => {
|
||||
"fsUsed": null,
|
||||
"id": "KINGSTON_SA2000M8250G_50026B7282669D9E",
|
||||
"idx": 31,
|
||||
"isSpinning": true,
|
||||
"name": "cache2",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
@@ -172,6 +178,7 @@ test('Returns parsed state file', async () => {
|
||||
"fsUsed": 851325,
|
||||
"id": "Cruzer",
|
||||
"idx": 32,
|
||||
"isSpinning": true,
|
||||
"name": "flash",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
|
||||
@@ -29,8 +29,24 @@ const stream = SUPPRESS_LOGS
|
||||
singleLine: true,
|
||||
hideObject: false,
|
||||
colorize: true,
|
||||
colorizeObjects: true,
|
||||
levelFirst: false,
|
||||
ignore: 'hostname,pid',
|
||||
destination: logDestination,
|
||||
translateTime: 'HH:mm:ss',
|
||||
customPrettifiers: {
|
||||
time: (timestamp: string | object) => `[${timestamp}`,
|
||||
level: (logLevel: string | object, key: string, log: any, extras: any) => {
|
||||
// Use labelColorized which preserves the colors
|
||||
const { labelColorized } = extras;
|
||||
const context = log.context || log.logger || 'app';
|
||||
return `${labelColorized} ${context}]`;
|
||||
},
|
||||
},
|
||||
messageFormat: (log: any, messageKey: string) => {
|
||||
const msg = log[messageKey] || log.msg || '';
|
||||
return msg;
|
||||
},
|
||||
})
|
||||
: logDestination;
|
||||
|
||||
|
||||
@@ -13,10 +13,11 @@ export const pubsub = new PubSub({ eventEmitter });
|
||||
|
||||
/**
|
||||
* Create a pubsub subscription.
|
||||
* @param channel The pubsub channel to subscribe to.
|
||||
* @param channel The pubsub channel to subscribe to. Can be either a predefined GRAPHQL_PUBSUB_CHANNEL
|
||||
* or a dynamic string for runtime-generated topics (e.g., log file paths like "LOG_FILE:/var/log/test.log")
|
||||
*/
|
||||
export const createSubscription = <T = any>(
|
||||
channel: GRAPHQL_PUBSUB_CHANNEL
|
||||
channel: GRAPHQL_PUBSUB_CHANNEL | string
|
||||
): AsyncIterableIterator<T> => {
|
||||
return pubsub.asyncIterableIterator<T>(channel);
|
||||
};
|
||||
|
||||
@@ -16,11 +16,22 @@ export const getKeyFile = async function (appStore: RootState = store.getState()
|
||||
|
||||
const keyFileName = basename(emhttp.var?.regFile);
|
||||
const registrationKeyFilePath = join(paths['keyfile-base'], keyFileName);
|
||||
const keyFile = await readFile(registrationKeyFilePath, 'binary');
|
||||
return Buffer.from(keyFile, 'binary')
|
||||
.toString('base64')
|
||||
.trim()
|
||||
.replace(/\+/g, '-')
|
||||
.replace(/\//g, '_')
|
||||
.replace(/=/g, '');
|
||||
|
||||
try {
|
||||
const keyFile = await readFile(registrationKeyFilePath, 'binary');
|
||||
return Buffer.from(keyFile, 'binary')
|
||||
.toString('base64')
|
||||
.trim()
|
||||
.replace(/\+/g, '-')
|
||||
.replace(/\//g, '_')
|
||||
.replace(/=/g, '');
|
||||
} catch (error) {
|
||||
// Handle ENOENT error when Pro.key file doesn't exist
|
||||
if (error instanceof Error && 'code' in error && error.code === 'ENOENT') {
|
||||
// Return empty string when key file is missing (ENOKEYFILE state)
|
||||
return '';
|
||||
}
|
||||
// Re-throw other errors
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
// Non-function exports from this module are loaded into the NestJS Config at runtime.
|
||||
|
||||
import { readFileSync } from 'node:fs';
|
||||
import { homedir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
|
||||
@@ -99,7 +98,7 @@ export const MOTHERSHIP_GRAPHQL_LINK = process.env.MOTHERSHIP_GRAPHQL_LINK
|
||||
? 'https://staging.mothership.unraid.net/ws'
|
||||
: 'https://mothership.unraid.net/ws';
|
||||
|
||||
export const PM2_HOME = process.env.PM2_HOME ?? join(homedir(), '.pm2');
|
||||
export const PM2_HOME = process.env.PM2_HOME ?? '/var/log/.pm2';
|
||||
export const PM2_PATH = join(import.meta.dirname, '../../', 'node_modules', 'pm2', 'bin', 'pm2');
|
||||
export const ECOSYSTEM_PATH = join(import.meta.dirname, '../../', 'ecosystem.config.json');
|
||||
export const PATHS_LOGS_DIR =
|
||||
|
||||
29
api/src/i18n/en/common.json
Normal file
29
api/src/i18n/en/common.json
Normal file
@@ -0,0 +1,29 @@
|
||||
{
|
||||
"hello": "Hello",
|
||||
"welcome": "Welcome to Unraid API",
|
||||
"server": {
|
||||
"started": "Server started successfully",
|
||||
"stopped": "Server stopped",
|
||||
"error": "Server error occurred"
|
||||
},
|
||||
"auth": {
|
||||
"unauthorized": "Unauthorized access",
|
||||
"forbidden": "Access forbidden",
|
||||
"invalidToken": "Invalid authentication token",
|
||||
"tokenExpired": "Authentication token expired",
|
||||
"loginSuccess": "Login successful",
|
||||
"logoutSuccess": "Logout successful"
|
||||
},
|
||||
"docker": {
|
||||
"containerStarted": "Container {{name}} started",
|
||||
"containerStopped": "Container {{name}} stopped",
|
||||
"containerRemoved": "Container {{name}} removed",
|
||||
"imageDeleted": "Image {{name}} deleted"
|
||||
},
|
||||
"vm": {
|
||||
"started": "Virtual machine {{name}} started",
|
||||
"stopped": "Virtual machine {{name}} stopped",
|
||||
"paused": "Virtual machine {{name}} paused",
|
||||
"resumed": "Virtual machine {{name}} resumed"
|
||||
}
|
||||
}
|
||||
38
api/src/i18n/en/errors.json
Normal file
38
api/src/i18n/en/errors.json
Normal file
@@ -0,0 +1,38 @@
|
||||
{
|
||||
"notFound": "Resource not found",
|
||||
"internalError": "Internal server error",
|
||||
"badRequest": "Bad request",
|
||||
"validation": {
|
||||
"required": "{{field}} is required",
|
||||
"invalid": "{{field}} is invalid",
|
||||
"minLength": "{{field}} must be at least {{min}} characters",
|
||||
"maxLength": "{{field}} must not exceed {{max}} characters",
|
||||
"email": "Invalid email format",
|
||||
"numeric": "{{field}} must be a number",
|
||||
"range": "{{field}} must be between {{min}} and {{max}}"
|
||||
},
|
||||
"docker": {
|
||||
"containerNotFound": "Container {{id}} not found",
|
||||
"imageNotFound": "Image {{id}} not found",
|
||||
"networkNotFound": "Network {{id}} not found",
|
||||
"volumeNotFound": "Volume {{id}} not found",
|
||||
"operationFailed": "Docker operation failed: {{error}}"
|
||||
},
|
||||
"vm": {
|
||||
"notFound": "Virtual machine {{name}} not found",
|
||||
"invalidState": "Invalid VM state for operation",
|
||||
"operationFailed": "VM operation failed: {{error}}"
|
||||
},
|
||||
"plugin": {
|
||||
"notFound": "Plugin {{name}} not found",
|
||||
"installFailed": "Failed to install plugin {{name}}",
|
||||
"uninstallFailed": "Failed to uninstall plugin {{name}}",
|
||||
"invalidManifest": "Invalid plugin manifest"
|
||||
},
|
||||
"file": {
|
||||
"notFound": "File not found: {{path}}",
|
||||
"accessDenied": "Access denied: {{path}}",
|
||||
"readError": "Failed to read file: {{path}}",
|
||||
"writeError": "Failed to write file: {{path}}"
|
||||
}
|
||||
}
|
||||
20
api/src/i18n/en/validation.json
Normal file
20
api/src/i18n/en/validation.json
Normal file
@@ -0,0 +1,20 @@
|
||||
{
|
||||
"isNotEmpty": "{{property}} should not be empty",
|
||||
"isEmail": "{{property}} must be a valid email",
|
||||
"isString": "{{property}} must be a string",
|
||||
"isNumber": "{{property}} must be a number",
|
||||
"isBoolean": "{{property}} must be a boolean",
|
||||
"isArray": "{{property}} must be an array",
|
||||
"isObject": "{{property}} must be an object",
|
||||
"isEnum": "{{property}} must be one of: {{values}}",
|
||||
"minLength": "{{property}} must be at least {{min}} characters",
|
||||
"maxLength": "{{property}} must not exceed {{max}} characters",
|
||||
"min": "{{property}} must be at least {{min}}",
|
||||
"max": "{{property}} must not exceed {{max}}",
|
||||
"matches": "{{property}} format is invalid",
|
||||
"isUUID": "{{property}} must be a valid UUID",
|
||||
"isURL": "{{property}} must be a valid URL",
|
||||
"isIP": "{{property}} must be a valid IP address",
|
||||
"isPort": "{{property}} must be a valid port number",
|
||||
"isPath": "{{property}} must be a valid path"
|
||||
}
|
||||
28
api/src/i18n/i18n.module.ts
Normal file
28
api/src/i18n/i18n.module.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
import { ConfigModule, ConfigService } from '@nestjs/config';
|
||||
import { HeaderResolver, I18nModule, QueryResolver, AcceptLanguageResolver } from 'nestjs-i18n';
|
||||
import * as path from 'path';
|
||||
|
||||
@Module({
|
||||
imports: [
|
||||
I18nModule.forRootAsync({
|
||||
imports: [ConfigModule],
|
||||
inject: [ConfigService],
|
||||
useFactory: (configService: ConfigService) => ({
|
||||
fallbackLanguage: 'en',
|
||||
loaderOptions: {
|
||||
path: path.join(__dirname),
|
||||
watch: configService.get('NODE_ENV') === 'development',
|
||||
},
|
||||
resolvers: [
|
||||
new QueryResolver(['lang', 'locale', 'l']),
|
||||
new HeaderResolver(['x-locale', 'x-lang']),
|
||||
new AcceptLanguageResolver(),
|
||||
],
|
||||
typesOutputPath: path.join(__dirname, '../../src/generated/i18n.generated.ts'),
|
||||
}),
|
||||
}),
|
||||
],
|
||||
exports: [I18nModule],
|
||||
})
|
||||
export class AppI18nModule {}
|
||||
36
api/src/i18n/i18n.service.example.ts
Normal file
36
api/src/i18n/i18n.service.example.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { I18nService, I18nContext } from 'nestjs-i18n';
|
||||
|
||||
@Injectable()
|
||||
export class ExampleI18nService {
|
||||
constructor(private readonly i18n: I18nService) {}
|
||||
|
||||
// Basic translation
|
||||
getWelcomeMessage(lang?: string): string {
|
||||
return this.i18n.translate('common.welcome', { lang });
|
||||
}
|
||||
|
||||
// Translation with interpolation
|
||||
getContainerStartedMessage(containerName: string, lang?: string): string {
|
||||
return this.i18n.translate('common.docker.containerStarted', {
|
||||
args: { name: containerName },
|
||||
lang,
|
||||
});
|
||||
}
|
||||
|
||||
// Using context from request
|
||||
async getErrorMessage(errorKey: string): Promise<string> {
|
||||
const context = I18nContext.current();
|
||||
return this.i18n.translate(`errors.${errorKey}`, {
|
||||
lang: context?.lang,
|
||||
});
|
||||
}
|
||||
|
||||
// Validation message with parameters
|
||||
getValidationMessage(field: string, min: number, max: number, lang?: string): string {
|
||||
return this.i18n.translate('errors.validation.range', {
|
||||
args: { field, min, max },
|
||||
lang,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -36,6 +36,7 @@ export type IniSlot = {
|
||||
size: string;
|
||||
sizeSb: string;
|
||||
slots: string;
|
||||
spundown: string;
|
||||
status: SlotStatus;
|
||||
temp: string;
|
||||
type: SlotType;
|
||||
@@ -82,6 +83,7 @@ export const parse: StateFileToIniParserMap['disks'] = (disksIni) =>
|
||||
fsType: slot.fsType ?? null,
|
||||
format: slot.format === '-' ? null : slot.format,
|
||||
transport: slot.transport ?? null,
|
||||
isSpinning: slot.spundown ? slot.spundown === '0' : null,
|
||||
};
|
||||
// @TODO Zod Parse This
|
||||
return result;
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { CacheModule } from '@nestjs/cache-manager';
|
||||
import { Test } from '@nestjs/testing';
|
||||
|
||||
import { describe, expect, it } from 'vitest';
|
||||
@@ -9,7 +10,7 @@ describe('Module Dependencies Integration', () => {
|
||||
let module;
|
||||
try {
|
||||
module = await Test.createTestingModule({
|
||||
imports: [RestModule],
|
||||
imports: [CacheModule.register({ isGlobal: true }), RestModule],
|
||||
}).compile();
|
||||
|
||||
expect(module).toBeDefined();
|
||||
|
||||
@@ -9,6 +9,7 @@ import { LoggerModule } from 'nestjs-pino';
|
||||
|
||||
import { apiLogger } from '@app/core/log.js';
|
||||
import { LOG_LEVEL } from '@app/environment.js';
|
||||
import { AppI18nModule } from '@app/i18n/i18n.module.js';
|
||||
import { PubSubModule } from '@app/unraid-api/app/pubsub.module.js';
|
||||
import { AuthModule } from '@app/unraid-api/auth/auth.module.js';
|
||||
import { AuthenticationGuard } from '@app/unraid-api/auth/authentication.guard.js';
|
||||
@@ -23,6 +24,7 @@ import { UnraidFileModifierModule } from '@app/unraid-api/unraid-file-modifier/u
|
||||
imports: [
|
||||
GlobalDepsModule,
|
||||
LegacyConfigModule,
|
||||
AppI18nModule,
|
||||
PubSubModule,
|
||||
ScheduleModule.forRoot(),
|
||||
LoggerModule.forRoot({
|
||||
@@ -34,6 +36,15 @@ import { UnraidFileModifierModule } from '@app/unraid-api/unraid-file-modifier/u
|
||||
req: () => undefined,
|
||||
res: () => undefined,
|
||||
},
|
||||
formatters: {
|
||||
log: (obj) => {
|
||||
// Map NestJS context to Pino context field for pino-pretty
|
||||
if (obj.context && !obj.logger) {
|
||||
return { ...obj, logger: obj.context };
|
||||
}
|
||||
return obj;
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
AuthModule,
|
||||
|
||||
@@ -681,4 +681,104 @@ describe('ApiKeyService', () => {
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('convertRolesStringArrayToRoles', () => {
|
||||
beforeEach(async () => {
|
||||
vi.mocked(getters.paths).mockReturnValue({
|
||||
'auth-keys': mockBasePath,
|
||||
} as ReturnType<typeof getters.paths>);
|
||||
|
||||
// Create a fresh mock logger for each test
|
||||
mockLogger = {
|
||||
log: vi.fn(),
|
||||
error: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
verbose: vi.fn(),
|
||||
};
|
||||
|
||||
apiKeyService = new ApiKeyService();
|
||||
// Replace the logger with our mock
|
||||
(apiKeyService as any).logger = mockLogger;
|
||||
});
|
||||
|
||||
it('should convert uppercase role strings to Role enum values', () => {
|
||||
const roles = ['ADMIN', 'CONNECT', 'VIEWER'];
|
||||
const result = apiKeyService.convertRolesStringArrayToRoles(roles);
|
||||
|
||||
expect(result).toEqual([Role.ADMIN, Role.CONNECT, Role.VIEWER]);
|
||||
});
|
||||
|
||||
it('should convert lowercase role strings to Role enum values', () => {
|
||||
const roles = ['admin', 'connect', 'guest'];
|
||||
const result = apiKeyService.convertRolesStringArrayToRoles(roles);
|
||||
|
||||
expect(result).toEqual([Role.ADMIN, Role.CONNECT, Role.GUEST]);
|
||||
});
|
||||
|
||||
it('should convert mixed case role strings to Role enum values', () => {
|
||||
const roles = ['Admin', 'CoNnEcT', 'ViEwEr'];
|
||||
const result = apiKeyService.convertRolesStringArrayToRoles(roles);
|
||||
|
||||
expect(result).toEqual([Role.ADMIN, Role.CONNECT, Role.VIEWER]);
|
||||
});
|
||||
|
||||
it('should handle roles with whitespace', () => {
|
||||
const roles = [' ADMIN ', ' CONNECT ', 'VIEWER '];
|
||||
const result = apiKeyService.convertRolesStringArrayToRoles(roles);
|
||||
|
||||
expect(result).toEqual([Role.ADMIN, Role.CONNECT, Role.VIEWER]);
|
||||
});
|
||||
|
||||
it('should filter out invalid roles and warn', () => {
|
||||
const roles = ['ADMIN', 'INVALID_ROLE', 'VIEWER', 'ANOTHER_INVALID'];
|
||||
const result = apiKeyService.convertRolesStringArrayToRoles(roles);
|
||||
|
||||
expect(result).toEqual([Role.ADMIN, Role.VIEWER]);
|
||||
expect(mockLogger.warn).toHaveBeenCalledWith(
|
||||
'Ignoring invalid roles: INVALID_ROLE, ANOTHER_INVALID'
|
||||
);
|
||||
});
|
||||
|
||||
it('should return empty array when all roles are invalid', () => {
|
||||
const roles = ['INVALID1', 'INVALID2', 'INVALID3'];
|
||||
const result = apiKeyService.convertRolesStringArrayToRoles(roles);
|
||||
|
||||
expect(result).toEqual([]);
|
||||
expect(mockLogger.warn).toHaveBeenCalledWith(
|
||||
'Ignoring invalid roles: INVALID1, INVALID2, INVALID3'
|
||||
);
|
||||
});
|
||||
|
||||
it('should return empty array for empty input', () => {
|
||||
const result = apiKeyService.convertRolesStringArrayToRoles([]);
|
||||
|
||||
expect(result).toEqual([]);
|
||||
expect(mockLogger.warn).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle all valid Role enum values', () => {
|
||||
const roles = Object.values(Role);
|
||||
const result = apiKeyService.convertRolesStringArrayToRoles(roles);
|
||||
|
||||
expect(result).toEqual(Object.values(Role));
|
||||
expect(mockLogger.warn).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should deduplicate roles', () => {
|
||||
const roles = ['ADMIN', 'admin', 'ADMIN', 'VIEWER', 'viewer'];
|
||||
const result = apiKeyService.convertRolesStringArrayToRoles(roles);
|
||||
|
||||
// Note: Current implementation doesn't deduplicate, but this test documents the behavior
|
||||
expect(result).toEqual([Role.ADMIN, Role.ADMIN, Role.ADMIN, Role.VIEWER, Role.VIEWER]);
|
||||
});
|
||||
|
||||
it('should handle mixed valid and invalid roles correctly', () => {
|
||||
const roles = ['ADMIN', 'invalid', 'CONNECT', 'bad_role', 'GUEST', 'VIEWER'];
|
||||
const result = apiKeyService.convertRolesStringArrayToRoles(roles);
|
||||
|
||||
expect(result).toEqual([Role.ADMIN, Role.CONNECT, Role.GUEST, Role.VIEWER]);
|
||||
expect(mockLogger.warn).toHaveBeenCalledWith('Ignoring invalid roles: invalid, bad_role');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -35,11 +35,29 @@ export class ApiKeyService implements OnModuleInit {
|
||||
|
||||
async onModuleInit() {
|
||||
this.memoryApiKeys = await this.loadAllFromDisk();
|
||||
await this.cleanupLegacyInternalKeys();
|
||||
if (environment.IS_MAIN_PROCESS) {
|
||||
this.setupWatch();
|
||||
}
|
||||
}
|
||||
|
||||
private async cleanupLegacyInternalKeys() {
|
||||
const legacyNames = ['CliInternal', 'ConnectInternal'];
|
||||
const keysToDelete = this.memoryApiKeys.filter((key) => legacyNames.includes(key.name));
|
||||
|
||||
if (keysToDelete.length > 0) {
|
||||
try {
|
||||
await this.deleteApiKeys(keysToDelete.map((key) => key.id));
|
||||
this.logger.log(`Cleaned up ${keysToDelete.length} legacy internal keys`);
|
||||
} catch (error) {
|
||||
this.logger.debug(
|
||||
error,
|
||||
`Failed to delete legacy internal keys: ${keysToDelete.map((key) => key.name).join(', ')}`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public async findAll(): Promise<ApiKey[]> {
|
||||
return this.memoryApiKeys;
|
||||
}
|
||||
@@ -92,9 +110,25 @@ export class ApiKeyService implements OnModuleInit {
|
||||
}
|
||||
|
||||
public convertRolesStringArrayToRoles(roles: string[]): Role[] {
|
||||
return roles
|
||||
.map((roleStr) => Role[roleStr.trim().toUpperCase() as keyof typeof Role])
|
||||
.filter(Boolean);
|
||||
const validRoles: Role[] = [];
|
||||
const invalidRoles: string[] = [];
|
||||
|
||||
for (const roleStr of roles) {
|
||||
const upperRole = roleStr.trim().toUpperCase();
|
||||
const role = Role[upperRole as keyof typeof Role];
|
||||
|
||||
if (role && ApiKeyService.validRoles.has(role)) {
|
||||
validRoles.push(role);
|
||||
} else {
|
||||
invalidRoles.push(roleStr);
|
||||
}
|
||||
}
|
||||
|
||||
if (invalidRoles.length > 0) {
|
||||
this.logger.warn(`Ignoring invalid roles: ${invalidRoles.join(', ')}`);
|
||||
}
|
||||
|
||||
return validRoles;
|
||||
}
|
||||
|
||||
async create({
|
||||
|
||||
192
api/src/unraid-api/cli/__test__/api-key.command.test.ts
Normal file
192
api/src/unraid-api/cli/__test__/api-key.command.test.ts
Normal file
@@ -0,0 +1,192 @@
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
|
||||
import { InquirerService } from 'nest-commander';
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { ApiKeyService } from '@app/unraid-api/auth/api-key.service.js';
|
||||
import { AddApiKeyQuestionSet } from '@app/unraid-api/cli/apikey/add-api-key.questions.js';
|
||||
import { ApiKeyCommand } from '@app/unraid-api/cli/apikey/api-key.command.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
|
||||
describe('ApiKeyCommand', () => {
|
||||
let command: ApiKeyCommand;
|
||||
let apiKeyService: ApiKeyService;
|
||||
let logService: LogService;
|
||||
let inquirerService: InquirerService;
|
||||
let questionSet: AddApiKeyQuestionSet;
|
||||
|
||||
beforeEach(async () => {
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [
|
||||
ApiKeyCommand,
|
||||
AddApiKeyQuestionSet,
|
||||
{
|
||||
provide: ApiKeyService,
|
||||
useValue: {
|
||||
findByField: vi.fn(),
|
||||
create: vi.fn(),
|
||||
findAll: vi.fn(),
|
||||
deleteApiKeys: vi.fn(),
|
||||
convertRolesStringArrayToRoles: vi.fn((roles) => roles),
|
||||
convertPermissionsStringArrayToPermissions: vi.fn((perms) => perms),
|
||||
getAllValidPermissions: vi.fn(() => []),
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: LogService,
|
||||
useValue: {
|
||||
log: vi.fn(),
|
||||
error: vi.fn(),
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: InquirerService,
|
||||
useValue: {
|
||||
prompt: vi.fn(),
|
||||
},
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
command = module.get<ApiKeyCommand>(ApiKeyCommand);
|
||||
apiKeyService = module.get<ApiKeyService>(ApiKeyService);
|
||||
logService = module.get<LogService>(LogService);
|
||||
inquirerService = module.get<InquirerService>(InquirerService);
|
||||
questionSet = module.get<AddApiKeyQuestionSet>(AddApiKeyQuestionSet);
|
||||
});
|
||||
|
||||
describe('AddApiKeyQuestionSet', () => {
|
||||
describe('shouldAskOverwrite', () => {
|
||||
it('should return true when an API key with the given name exists', () => {
|
||||
vi.mocked(apiKeyService.findByField).mockReturnValue({
|
||||
key: 'existing-key',
|
||||
name: 'test-key',
|
||||
description: 'Test key',
|
||||
roles: [],
|
||||
permissions: [],
|
||||
} as any);
|
||||
|
||||
const result = questionSet.shouldAskOverwrite({ name: 'test-key' });
|
||||
|
||||
expect(result).toBe(true);
|
||||
expect(apiKeyService.findByField).toHaveBeenCalledWith('name', 'test-key');
|
||||
});
|
||||
|
||||
it('should return false when no API key with the given name exists', () => {
|
||||
vi.mocked(apiKeyService.findByField).mockReturnValue(null);
|
||||
|
||||
const result = questionSet.shouldAskOverwrite({ name: 'non-existent-key' });
|
||||
|
||||
expect(result).toBe(false);
|
||||
expect(apiKeyService.findByField).toHaveBeenCalledWith('name', 'non-existent-key');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('run', () => {
|
||||
it('should find and return existing key when not creating', async () => {
|
||||
const mockKey = { key: 'test-api-key-123', name: 'test-key' };
|
||||
vi.mocked(apiKeyService.findByField).mockReturnValue(mockKey as any);
|
||||
|
||||
await command.run([], { name: 'test-key', create: false });
|
||||
|
||||
expect(apiKeyService.findByField).toHaveBeenCalledWith('name', 'test-key');
|
||||
expect(logService.log).toHaveBeenCalledWith('test-api-key-123');
|
||||
});
|
||||
|
||||
it('should create new key when key does not exist and create flag is set', async () => {
|
||||
vi.mocked(apiKeyService.findByField).mockReturnValue(null);
|
||||
vi.mocked(apiKeyService.create).mockResolvedValue({ key: 'new-api-key-456' } as any);
|
||||
|
||||
await command.run([], {
|
||||
name: 'new-key',
|
||||
create: true,
|
||||
roles: ['ADMIN'] as any,
|
||||
description: 'Test description',
|
||||
});
|
||||
|
||||
expect(apiKeyService.create).toHaveBeenCalledWith({
|
||||
name: 'new-key',
|
||||
description: 'Test description',
|
||||
roles: ['ADMIN'],
|
||||
permissions: undefined,
|
||||
overwrite: false,
|
||||
});
|
||||
expect(logService.log).toHaveBeenCalledWith('new-api-key-456');
|
||||
});
|
||||
|
||||
it('should error when key exists and overwrite is not set in non-interactive mode', async () => {
|
||||
const mockKey = { key: 'existing-key', name: 'test-key' };
|
||||
vi.mocked(apiKeyService.findByField)
|
||||
.mockReturnValueOnce(null) // First call in line 131
|
||||
.mockReturnValueOnce(mockKey as any); // Second call in non-interactive check
|
||||
const exitSpy = vi.spyOn(process, 'exit').mockImplementation(() => {
|
||||
throw new Error('process.exit');
|
||||
});
|
||||
|
||||
await expect(
|
||||
command.run([], {
|
||||
name: 'test-key',
|
||||
create: true,
|
||||
roles: ['ADMIN'] as any,
|
||||
})
|
||||
).rejects.toThrow();
|
||||
|
||||
expect(logService.error).toHaveBeenCalledWith(
|
||||
"API key with name 'test-key' already exists. Use --overwrite to replace it."
|
||||
);
|
||||
expect(exitSpy).toHaveBeenCalledWith(1);
|
||||
exitSpy.mockRestore();
|
||||
});
|
||||
|
||||
it('should create key with overwrite when key exists and overwrite is set', async () => {
|
||||
const mockKey = { key: 'existing-key', name: 'test-key' };
|
||||
vi.mocked(apiKeyService.findByField)
|
||||
.mockReturnValueOnce(null) // First call in line 131
|
||||
.mockReturnValueOnce(mockKey as any); // Second call in non-interactive check
|
||||
vi.mocked(apiKeyService.create).mockResolvedValue({ key: 'overwritten-key' } as any);
|
||||
|
||||
await command.run([], {
|
||||
name: 'test-key',
|
||||
create: true,
|
||||
roles: ['ADMIN'] as any,
|
||||
overwrite: true,
|
||||
});
|
||||
|
||||
expect(apiKeyService.create).toHaveBeenCalledWith({
|
||||
name: 'test-key',
|
||||
description: 'CLI generated key: test-key',
|
||||
roles: ['ADMIN'],
|
||||
permissions: undefined,
|
||||
overwrite: true,
|
||||
});
|
||||
expect(logService.log).toHaveBeenCalledWith('overwritten-key');
|
||||
});
|
||||
|
||||
it('should prompt for missing fields when creating without sufficient info', async () => {
|
||||
vi.mocked(apiKeyService.findByField).mockReturnValue(null);
|
||||
vi.mocked(inquirerService.prompt).mockResolvedValue({
|
||||
name: 'prompted-key',
|
||||
roles: ['USER'],
|
||||
permissions: [],
|
||||
description: 'Prompted description',
|
||||
overwrite: false,
|
||||
} as any);
|
||||
vi.mocked(apiKeyService.create).mockResolvedValue({ key: 'prompted-api-key' } as any);
|
||||
|
||||
await command.run([], { name: '', create: true });
|
||||
|
||||
expect(inquirerService.prompt).toHaveBeenCalledWith('add-api-key', {
|
||||
name: '',
|
||||
create: true,
|
||||
});
|
||||
expect(apiKeyService.create).toHaveBeenCalledWith({
|
||||
name: 'prompted-key',
|
||||
description: 'Prompted description',
|
||||
roles: ['USER'],
|
||||
permissions: [],
|
||||
overwrite: false,
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
111
api/src/unraid-api/cli/__test__/version.command.test.ts
Normal file
111
api/src/unraid-api/cli/__test__/version.command.test.ts
Normal file
@@ -0,0 +1,111 @@
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
|
||||
import { afterEach, beforeEach, describe, expect, it, MockInstance, vi } from 'vitest';
|
||||
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
import { VersionCommand } from '@app/unraid-api/cli/version.command.js';
|
||||
|
||||
let API_VERSION_MOCK = '4.18.2+build123';
|
||||
|
||||
vi.mock('@app/environment.js', async (importOriginal) => {
|
||||
const actual = (await importOriginal()) as any;
|
||||
return {
|
||||
...actual,
|
||||
get API_VERSION() {
|
||||
return API_VERSION_MOCK;
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
describe('VersionCommand', () => {
|
||||
let command: VersionCommand;
|
||||
let logService: LogService;
|
||||
let consoleLogSpy: MockInstance<typeof console.log>;
|
||||
|
||||
beforeEach(async () => {
|
||||
API_VERSION_MOCK = '4.18.2+build123'; // Reset to default before each test
|
||||
consoleLogSpy = vi.spyOn(console, 'log').mockImplementation(() => {});
|
||||
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [
|
||||
VersionCommand,
|
||||
{
|
||||
provide: LogService,
|
||||
useValue: {
|
||||
info: vi.fn(),
|
||||
},
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
command = module.get<VersionCommand>(VersionCommand);
|
||||
logService = module.get<LogService>(LogService);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
describe('run', () => {
|
||||
it('should output version with logger when no options provided', async () => {
|
||||
await command.run([]);
|
||||
|
||||
expect(logService.info).toHaveBeenCalledWith('Unraid API v4.18.2+build123');
|
||||
expect(consoleLogSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should output version with logger when json option is false', async () => {
|
||||
await command.run([], { json: false });
|
||||
|
||||
expect(logService.info).toHaveBeenCalledWith('Unraid API v4.18.2+build123');
|
||||
expect(consoleLogSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should output JSON when json option is true', async () => {
|
||||
await command.run([], { json: true });
|
||||
|
||||
expect(logService.info).not.toHaveBeenCalled();
|
||||
expect(consoleLogSpy).toHaveBeenCalledWith(
|
||||
JSON.stringify({
|
||||
version: '4.18.2',
|
||||
build: 'build123',
|
||||
combined: '4.18.2+build123',
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle version without build info', async () => {
|
||||
API_VERSION_MOCK = '4.18.2'; // Set version without build info
|
||||
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [
|
||||
VersionCommand,
|
||||
{
|
||||
provide: LogService,
|
||||
useValue: {
|
||||
info: vi.fn(),
|
||||
},
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
const commandWithoutBuild = module.get<VersionCommand>(VersionCommand);
|
||||
|
||||
await commandWithoutBuild.run([], { json: true });
|
||||
|
||||
expect(consoleLogSpy).toHaveBeenCalledWith(
|
||||
JSON.stringify({
|
||||
version: '4.18.2',
|
||||
build: undefined,
|
||||
combined: '4.18.2',
|
||||
})
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseJson', () => {
|
||||
it('should return true', () => {
|
||||
expect(command.parseJson()).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -39,6 +39,12 @@ export class AddApiKeyQuestionSet {
|
||||
return this.apiKeyService.convertRolesStringArrayToRoles(val);
|
||||
}
|
||||
|
||||
@WhenFor({ name: 'roles' })
|
||||
shouldAskRoles(options: { roles?: Role[]; permissions?: Permission[] }): boolean {
|
||||
// Ask for roles if they weren't provided or are empty
|
||||
return !options.roles || options.roles.length === 0;
|
||||
}
|
||||
|
||||
@ChoicesFor({ name: 'roles' })
|
||||
async getRoles() {
|
||||
return Object.values(Role);
|
||||
@@ -53,6 +59,12 @@ export class AddApiKeyQuestionSet {
|
||||
return this.apiKeyService.convertPermissionsStringArrayToPermissions(val);
|
||||
}
|
||||
|
||||
@WhenFor({ name: 'permissions' })
|
||||
shouldAskPermissions(options: { roles?: Role[]; permissions?: Permission[] }): boolean {
|
||||
// Ask for permissions if they weren't provided or are empty
|
||||
return !options.permissions || options.permissions.length === 0;
|
||||
}
|
||||
|
||||
@ChoicesFor({ name: 'permissions' })
|
||||
async getPermissions() {
|
||||
return this.apiKeyService
|
||||
@@ -72,6 +84,6 @@ export class AddApiKeyQuestionSet {
|
||||
|
||||
@WhenFor({ name: 'overwrite' })
|
||||
shouldAskOverwrite(options: { name: string }): boolean {
|
||||
return Boolean(this.apiKeyService.findByKey(options.name));
|
||||
return Boolean(this.apiKeyService.findByField('name', options.name));
|
||||
}
|
||||
}
|
||||
|
||||
434
api/src/unraid-api/cli/apikey/api-key.command.spec.ts
Normal file
434
api/src/unraid-api/cli/apikey/api-key.command.spec.ts
Normal file
@@ -0,0 +1,434 @@
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
|
||||
import { AuthAction, Resource, Role } from '@unraid/shared/graphql.model.js';
|
||||
import { InquirerService } from 'nest-commander';
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { ApiKeyService } from '@app/unraid-api/auth/api-key.service.js';
|
||||
import { ApiKeyCommand } from '@app/unraid-api/cli/apikey/api-key.command.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
|
||||
describe('ApiKeyCommand', () => {
|
||||
let command: ApiKeyCommand;
|
||||
let apiKeyService: ApiKeyService;
|
||||
let logService: LogService;
|
||||
|
||||
beforeEach(async () => {
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [
|
||||
ApiKeyCommand,
|
||||
{
|
||||
provide: ApiKeyService,
|
||||
useValue: {
|
||||
findByField: vi.fn(),
|
||||
create: vi.fn(),
|
||||
convertRolesStringArrayToRoles: vi.fn(),
|
||||
convertPermissionsStringArrayToPermissions: vi.fn(),
|
||||
findAll: vi.fn(),
|
||||
deleteApiKeys: vi.fn(),
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: LogService,
|
||||
useValue: {
|
||||
log: vi.fn(),
|
||||
error: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: InquirerService,
|
||||
useValue: {
|
||||
prompt: vi.fn(),
|
||||
},
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
command = module.get<ApiKeyCommand>(ApiKeyCommand);
|
||||
apiKeyService = module.get<ApiKeyService>(ApiKeyService);
|
||||
logService = module.get<LogService>(LogService);
|
||||
});
|
||||
|
||||
describe('parseRoles', () => {
|
||||
it('should parse valid roles correctly', () => {
|
||||
const mockConvert = vi
|
||||
.spyOn(apiKeyService, 'convertRolesStringArrayToRoles')
|
||||
.mockReturnValue([Role.ADMIN, Role.CONNECT]);
|
||||
|
||||
const result = command.parseRoles('ADMIN,CONNECT');
|
||||
|
||||
expect(mockConvert).toHaveBeenCalledWith(['ADMIN', 'CONNECT']);
|
||||
expect(result).toEqual([Role.ADMIN, Role.CONNECT]);
|
||||
});
|
||||
|
||||
it('should return GUEST role when no roles provided', () => {
|
||||
const result = command.parseRoles('');
|
||||
|
||||
expect(result).toEqual([Role.GUEST]);
|
||||
});
|
||||
|
||||
it('should handle roles with spaces', () => {
|
||||
const mockConvert = vi
|
||||
.spyOn(apiKeyService, 'convertRolesStringArrayToRoles')
|
||||
.mockReturnValue([Role.ADMIN, Role.VIEWER]);
|
||||
|
||||
const result = command.parseRoles('ADMIN, VIEWER');
|
||||
|
||||
expect(mockConvert).toHaveBeenCalledWith(['ADMIN', ' VIEWER']);
|
||||
expect(result).toEqual([Role.ADMIN, Role.VIEWER]);
|
||||
});
|
||||
|
||||
it('should throw error when no valid roles found', () => {
|
||||
vi.spyOn(apiKeyService, 'convertRolesStringArrayToRoles').mockReturnValue([]);
|
||||
|
||||
expect(() => command.parseRoles('INVALID_ROLE')).toThrow(
|
||||
`Invalid roles. Valid options are: ${Object.values(Role).join(', ')}`
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle mixed valid and invalid roles with warning', () => {
|
||||
const mockConvert = vi
|
||||
.spyOn(apiKeyService, 'convertRolesStringArrayToRoles')
|
||||
.mockImplementation((roles) => {
|
||||
const validRoles: Role[] = [];
|
||||
const invalidRoles: string[] = [];
|
||||
|
||||
for (const roleStr of roles) {
|
||||
const upperRole = roleStr.trim().toUpperCase();
|
||||
const role = Role[upperRole as keyof typeof Role];
|
||||
|
||||
if (role) {
|
||||
validRoles.push(role);
|
||||
} else {
|
||||
invalidRoles.push(roleStr);
|
||||
}
|
||||
}
|
||||
|
||||
if (invalidRoles.length > 0) {
|
||||
logService.warn(`Ignoring invalid roles: ${invalidRoles.join(', ')}`);
|
||||
}
|
||||
|
||||
return validRoles;
|
||||
});
|
||||
|
||||
const result = command.parseRoles('ADMIN,INVALID,VIEWER');
|
||||
|
||||
expect(mockConvert).toHaveBeenCalledWith(['ADMIN', 'INVALID', 'VIEWER']);
|
||||
expect(logService.warn).toHaveBeenCalledWith('Ignoring invalid roles: INVALID');
|
||||
expect(result).toEqual([Role.ADMIN, Role.VIEWER]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('run', () => {
|
||||
it('should create API key with roles without prompting', async () => {
|
||||
const mockKey = {
|
||||
id: 'test-id',
|
||||
key: 'test-key-123',
|
||||
name: 'TEST',
|
||||
roles: [Role.ADMIN],
|
||||
createdAt: new Date().toISOString(),
|
||||
permissions: [],
|
||||
};
|
||||
vi.spyOn(apiKeyService, 'findByField').mockReturnValue(null);
|
||||
vi.spyOn(apiKeyService, 'create').mockResolvedValue(mockKey);
|
||||
|
||||
await command.run([], {
|
||||
name: 'TEST',
|
||||
create: true,
|
||||
roles: [Role.ADMIN],
|
||||
permissions: undefined,
|
||||
description: 'Test description',
|
||||
});
|
||||
|
||||
expect(apiKeyService.create).toHaveBeenCalledWith({
|
||||
name: 'TEST',
|
||||
description: 'Test description',
|
||||
roles: [Role.ADMIN],
|
||||
permissions: undefined,
|
||||
overwrite: false,
|
||||
});
|
||||
expect(logService.log).toHaveBeenCalledWith('test-key-123');
|
||||
});
|
||||
|
||||
it('should create API key with permissions only without prompting', async () => {
|
||||
const mockKey = {
|
||||
id: 'test-id',
|
||||
key: 'test-key-456',
|
||||
name: 'TEST_PERMS',
|
||||
roles: [],
|
||||
createdAt: new Date().toISOString(),
|
||||
permissions: [],
|
||||
};
|
||||
const mockPermissions = [
|
||||
{
|
||||
resource: Resource.DOCKER,
|
||||
actions: [AuthAction.READ_ANY],
|
||||
},
|
||||
];
|
||||
|
||||
vi.spyOn(apiKeyService, 'findByField').mockReturnValue(null);
|
||||
vi.spyOn(apiKeyService, 'create').mockResolvedValue(mockKey);
|
||||
|
||||
await command.run([], {
|
||||
name: 'TEST_PERMS',
|
||||
create: true,
|
||||
roles: undefined,
|
||||
permissions: mockPermissions,
|
||||
description: 'Test with permissions',
|
||||
});
|
||||
|
||||
expect(apiKeyService.create).toHaveBeenCalledWith({
|
||||
name: 'TEST_PERMS',
|
||||
description: 'Test with permissions',
|
||||
roles: undefined,
|
||||
permissions: mockPermissions,
|
||||
overwrite: false,
|
||||
});
|
||||
expect(logService.log).toHaveBeenCalledWith('test-key-456');
|
||||
});
|
||||
|
||||
it('should use default description when not provided', async () => {
|
||||
const mockKey = {
|
||||
id: 'test-id',
|
||||
key: 'test-key-789',
|
||||
name: 'NO_DESC',
|
||||
roles: [Role.VIEWER],
|
||||
createdAt: new Date().toISOString(),
|
||||
permissions: [],
|
||||
};
|
||||
vi.spyOn(apiKeyService, 'findByField').mockReturnValue(null);
|
||||
vi.spyOn(apiKeyService, 'create').mockResolvedValue(mockKey);
|
||||
|
||||
await command.run([], {
|
||||
name: 'NO_DESC',
|
||||
create: true,
|
||||
roles: [Role.VIEWER],
|
||||
permissions: undefined,
|
||||
});
|
||||
|
||||
expect(apiKeyService.create).toHaveBeenCalledWith({
|
||||
name: 'NO_DESC',
|
||||
description: 'CLI generated key: NO_DESC',
|
||||
roles: [Role.VIEWER],
|
||||
permissions: undefined,
|
||||
overwrite: false,
|
||||
});
|
||||
});
|
||||
|
||||
it('should return existing key when found', async () => {
|
||||
const existingKey = {
|
||||
id: 'existing-id',
|
||||
key: 'existing-key-123',
|
||||
name: 'EXISTING',
|
||||
roles: [Role.ADMIN],
|
||||
createdAt: new Date().toISOString(),
|
||||
permissions: [],
|
||||
};
|
||||
vi.spyOn(apiKeyService, 'findByField').mockReturnValue(existingKey);
|
||||
|
||||
await command.run([], {
|
||||
name: 'EXISTING',
|
||||
create: false,
|
||||
});
|
||||
|
||||
expect(apiKeyService.findByField).toHaveBeenCalledWith('name', 'EXISTING');
|
||||
expect(logService.log).toHaveBeenCalledWith('existing-key-123');
|
||||
expect(apiKeyService.create).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle uppercase role conversion', () => {
|
||||
const mockConvert = vi
|
||||
.spyOn(apiKeyService, 'convertRolesStringArrayToRoles')
|
||||
.mockImplementation((roles) => {
|
||||
return roles
|
||||
.map((roleStr) => Role[roleStr.trim().toUpperCase() as keyof typeof Role])
|
||||
.filter(Boolean);
|
||||
});
|
||||
|
||||
const result = command.parseRoles('admin,connect');
|
||||
|
||||
expect(mockConvert).toHaveBeenCalledWith(['admin', 'connect']);
|
||||
expect(result).toEqual([Role.ADMIN, Role.CONNECT]);
|
||||
});
|
||||
|
||||
it('should handle lowercase role conversion', () => {
|
||||
const mockConvert = vi
|
||||
.spyOn(apiKeyService, 'convertRolesStringArrayToRoles')
|
||||
.mockImplementation((roles) => {
|
||||
return roles
|
||||
.map((roleStr) => Role[roleStr.trim().toUpperCase() as keyof typeof Role])
|
||||
.filter(Boolean);
|
||||
});
|
||||
|
||||
const result = command.parseRoles('viewer');
|
||||
|
||||
expect(mockConvert).toHaveBeenCalledWith(['viewer']);
|
||||
expect(result).toEqual([Role.VIEWER]);
|
||||
});
|
||||
|
||||
it('should handle mixed case role conversion', () => {
|
||||
const mockConvert = vi
|
||||
.spyOn(apiKeyService, 'convertRolesStringArrayToRoles')
|
||||
.mockImplementation((roles) => {
|
||||
return roles
|
||||
.map((roleStr) => Role[roleStr.trim().toUpperCase() as keyof typeof Role])
|
||||
.filter(Boolean);
|
||||
});
|
||||
|
||||
const result = command.parseRoles('Admin,CoNnEcT');
|
||||
|
||||
expect(mockConvert).toHaveBeenCalledWith(['Admin', 'CoNnEcT']);
|
||||
expect(result).toEqual([Role.ADMIN, Role.CONNECT]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('JSON output functionality', () => {
|
||||
let consoleSpy: ReturnType<typeof vi.spyOn>;
|
||||
|
||||
beforeEach(() => {
|
||||
consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {});
|
||||
});
|
||||
|
||||
it('should output JSON when creating key with --json flag', async () => {
|
||||
const mockKey = {
|
||||
id: 'test-id-123',
|
||||
key: 'test-key-456',
|
||||
name: 'JSON_TEST',
|
||||
roles: [Role.ADMIN],
|
||||
createdAt: new Date().toISOString(),
|
||||
permissions: [],
|
||||
};
|
||||
vi.spyOn(apiKeyService, 'findByField').mockReturnValue(null);
|
||||
vi.spyOn(apiKeyService, 'create').mockResolvedValue(mockKey);
|
||||
|
||||
await command.run([], {
|
||||
name: 'JSON_TEST',
|
||||
create: true,
|
||||
roles: [Role.ADMIN],
|
||||
json: true,
|
||||
});
|
||||
|
||||
expect(consoleSpy).toHaveBeenCalledWith(
|
||||
JSON.stringify({ key: 'test-key-456', name: 'JSON_TEST', id: 'test-id-123' })
|
||||
);
|
||||
expect(logService.log).not.toHaveBeenCalledWith('test-key-456');
|
||||
});
|
||||
|
||||
it('should output JSON when fetching existing key with --json flag', async () => {
|
||||
const existingKey = {
|
||||
id: 'existing-id-456',
|
||||
key: 'existing-key-789',
|
||||
name: 'EXISTING_JSON',
|
||||
roles: [Role.VIEWER],
|
||||
createdAt: new Date().toISOString(),
|
||||
permissions: [],
|
||||
};
|
||||
vi.spyOn(apiKeyService, 'findByField').mockReturnValue(existingKey);
|
||||
|
||||
await command.run([], {
|
||||
name: 'EXISTING_JSON',
|
||||
create: false,
|
||||
json: true,
|
||||
});
|
||||
|
||||
expect(consoleSpy).toHaveBeenCalledWith(
|
||||
JSON.stringify({ key: 'existing-key-789', name: 'EXISTING_JSON', id: 'existing-id-456' })
|
||||
);
|
||||
expect(logService.log).not.toHaveBeenCalledWith('existing-key-789');
|
||||
});
|
||||
|
||||
it('should output JSON when deleting key with --json flag', async () => {
|
||||
const existingKeys = [
|
||||
{
|
||||
id: 'delete-id-123',
|
||||
name: 'DELETE_JSON',
|
||||
key: 'delete-key-456',
|
||||
roles: [Role.GUEST],
|
||||
createdAt: new Date().toISOString(),
|
||||
permissions: [],
|
||||
},
|
||||
];
|
||||
vi.spyOn(apiKeyService, 'findAll').mockResolvedValue(existingKeys);
|
||||
vi.spyOn(apiKeyService, 'deleteApiKeys').mockResolvedValue();
|
||||
|
||||
await command.run([], {
|
||||
name: 'DELETE_JSON',
|
||||
delete: true,
|
||||
json: true,
|
||||
});
|
||||
|
||||
expect(consoleSpy).toHaveBeenCalledWith(
|
||||
JSON.stringify({
|
||||
deleted: 1,
|
||||
keys: [{ id: 'delete-id-123', name: 'DELETE_JSON' }],
|
||||
})
|
||||
);
|
||||
expect(logService.log).not.toHaveBeenCalledWith('Successfully deleted 1 API key');
|
||||
});
|
||||
|
||||
it('should output JSON error when deleting non-existent key with --json flag', async () => {
|
||||
vi.spyOn(apiKeyService, 'findAll').mockResolvedValue([]);
|
||||
|
||||
await command.run([], {
|
||||
name: 'NONEXISTENT',
|
||||
delete: true,
|
||||
json: true,
|
||||
});
|
||||
|
||||
expect(consoleSpy).toHaveBeenCalledWith(
|
||||
JSON.stringify({ deleted: 0, message: 'No API keys found to delete' })
|
||||
);
|
||||
expect(logService.log).not.toHaveBeenCalledWith('No API keys found to delete');
|
||||
});
|
||||
|
||||
it('should not suppress creation message when not using JSON', async () => {
|
||||
const mockKey = {
|
||||
id: 'test-id',
|
||||
key: 'test-key',
|
||||
name: 'NO_JSON_TEST',
|
||||
roles: [Role.ADMIN],
|
||||
createdAt: new Date().toISOString(),
|
||||
permissions: [],
|
||||
};
|
||||
vi.spyOn(apiKeyService, 'findByField').mockReturnValue(null);
|
||||
vi.spyOn(apiKeyService, 'create').mockResolvedValue(mockKey);
|
||||
|
||||
await command.run([], {
|
||||
name: 'NO_JSON_TEST',
|
||||
create: true,
|
||||
roles: [Role.ADMIN],
|
||||
json: false,
|
||||
});
|
||||
|
||||
expect(logService.log).toHaveBeenCalledWith('Creating API Key...');
|
||||
expect(logService.log).toHaveBeenCalledWith('test-key');
|
||||
expect(consoleSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should suppress creation message when using JSON', async () => {
|
||||
const mockKey = {
|
||||
id: 'test-id',
|
||||
key: 'test-key',
|
||||
name: 'JSON_SUPPRESS_TEST',
|
||||
roles: [Role.ADMIN],
|
||||
createdAt: new Date().toISOString(),
|
||||
permissions: [],
|
||||
};
|
||||
vi.spyOn(apiKeyService, 'findByField').mockReturnValue(null);
|
||||
vi.spyOn(apiKeyService, 'create').mockResolvedValue(mockKey);
|
||||
|
||||
await command.run([], {
|
||||
name: 'JSON_SUPPRESS_TEST',
|
||||
create: true,
|
||||
roles: [Role.ADMIN],
|
||||
json: true,
|
||||
});
|
||||
|
||||
expect(logService.log).not.toHaveBeenCalledWith('Creating API Key...');
|
||||
expect(consoleSpy).toHaveBeenCalledWith(
|
||||
JSON.stringify({ key: 'test-key', name: 'JSON_SUPPRESS_TEST', id: 'test-id' })
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -10,11 +10,13 @@ import { Permission } from '@app/unraid-api/graph/resolvers/api-key/api-key.mode
|
||||
|
||||
interface KeyOptions {
|
||||
name: string;
|
||||
create: boolean;
|
||||
create?: boolean;
|
||||
delete?: boolean;
|
||||
description?: string;
|
||||
roles?: Role[];
|
||||
permissions?: Permission[];
|
||||
overwrite?: boolean;
|
||||
json?: boolean;
|
||||
}
|
||||
|
||||
@Command({
|
||||
@@ -52,22 +54,15 @@ export class ApiKeyCommand extends CommandRunner {
|
||||
})
|
||||
parseRoles(roles: string): Role[] {
|
||||
if (!roles) return [Role.GUEST];
|
||||
const validRoles: Set<Role> = new Set(Object.values(Role));
|
||||
|
||||
const requestedRoles = roles.split(',').map((role) => role.trim().toLocaleLowerCase() as Role);
|
||||
const validRequestedRoles = requestedRoles.filter((role) => validRoles.has(role));
|
||||
const roleArray = roles.split(',').filter(Boolean);
|
||||
const validRoles = this.apiKeyService.convertRolesStringArrayToRoles(roleArray);
|
||||
|
||||
if (validRequestedRoles.length === 0) {
|
||||
throw new Error(`Invalid roles. Valid options are: ${Array.from(validRoles).join(', ')}`);
|
||||
if (validRoles.length === 0) {
|
||||
throw new Error(`Invalid roles. Valid options are: ${Object.values(Role).join(', ')}`);
|
||||
}
|
||||
|
||||
const invalidRoles = requestedRoles.filter((role) => !validRoles.has(role));
|
||||
|
||||
if (invalidRoles.length > 0) {
|
||||
this.logger.warn(`Ignoring invalid roles: ${invalidRoles.join(', ')}`);
|
||||
}
|
||||
|
||||
return validRequestedRoles;
|
||||
return validRoles;
|
||||
}
|
||||
|
||||
@Option({
|
||||
@@ -98,48 +93,137 @@ ACTIONS: ${Object.values(AuthAction).join(', ')}`,
|
||||
return true;
|
||||
}
|
||||
|
||||
/** Prompt the user to select API keys to delete. Then, delete the selected keys. */
|
||||
private async deleteKeys() {
|
||||
@Option({
|
||||
flags: '--overwrite',
|
||||
description: 'Overwrite existing API key if it exists',
|
||||
})
|
||||
parseOverwrite(): boolean {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Option({
|
||||
flags: '--json',
|
||||
description: 'Output machine-readable JSON format',
|
||||
})
|
||||
parseJson(): boolean {
|
||||
return true;
|
||||
}
|
||||
|
||||
/** Helper to output either JSON or regular log message */
|
||||
private output(message: string, jsonData?: object, jsonOutput?: boolean): void {
|
||||
if (jsonOutput && jsonData) {
|
||||
console.log(JSON.stringify(jsonData));
|
||||
} else {
|
||||
this.logger.log(message);
|
||||
}
|
||||
}
|
||||
|
||||
/** Helper to output either JSON or regular error message */
|
||||
private outputError(message: string, jsonData?: object, jsonOutput?: boolean): void {
|
||||
if (jsonOutput && jsonData) {
|
||||
console.log(JSON.stringify(jsonData));
|
||||
} else {
|
||||
this.logger.error(message);
|
||||
}
|
||||
}
|
||||
|
||||
/** Delete API keys either by name (non-interactive) or by prompting user selection (interactive). */
|
||||
private async deleteKeys(name?: string, jsonOutput?: boolean) {
|
||||
const allKeys = await this.apiKeyService.findAll();
|
||||
if (allKeys.length === 0) {
|
||||
this.logger.log('No API keys found to delete');
|
||||
this.output(
|
||||
'No API keys found to delete',
|
||||
{ deleted: 0, message: 'No API keys found to delete' },
|
||||
jsonOutput
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const answers = await this.inquirerService.prompt<DeleteApiKeyAnswers>(
|
||||
DeleteApiKeyQuestionSet.name,
|
||||
{}
|
||||
);
|
||||
if (!answers.selectedKeys || answers.selectedKeys.length === 0) {
|
||||
this.logger.log('No keys selected for deletion');
|
||||
return;
|
||||
let selectedKeyIds: string[];
|
||||
let deletedKeys: { id: string; name: string }[] = [];
|
||||
|
||||
if (name) {
|
||||
// Non-interactive mode: delete by name
|
||||
const keyToDelete = allKeys.find((key) => key.name === name);
|
||||
if (!keyToDelete) {
|
||||
this.outputError(
|
||||
`No API key found with name: ${name}`,
|
||||
{ deleted: 0, error: `No API key found with name: ${name}` },
|
||||
jsonOutput
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
selectedKeyIds = [keyToDelete.id];
|
||||
deletedKeys = [{ id: keyToDelete.id, name: keyToDelete.name }];
|
||||
} else {
|
||||
// Interactive mode: prompt user to select keys
|
||||
const answers = await this.inquirerService.prompt<DeleteApiKeyAnswers>(
|
||||
DeleteApiKeyQuestionSet.name,
|
||||
{}
|
||||
);
|
||||
if (!answers.selectedKeys || answers.selectedKeys.length === 0) {
|
||||
this.output(
|
||||
'No keys selected for deletion',
|
||||
{ deleted: 0, message: 'No keys selected for deletion' },
|
||||
jsonOutput
|
||||
);
|
||||
return;
|
||||
}
|
||||
selectedKeyIds = answers.selectedKeys;
|
||||
deletedKeys = allKeys
|
||||
.filter((key) => selectedKeyIds.includes(key.id))
|
||||
.map((key) => ({ id: key.id, name: key.name }));
|
||||
}
|
||||
|
||||
try {
|
||||
await this.apiKeyService.deleteApiKeys(answers.selectedKeys);
|
||||
this.logger.log(`Successfully deleted ${answers.selectedKeys.length} API keys`);
|
||||
await this.apiKeyService.deleteApiKeys(selectedKeyIds);
|
||||
const message = `Successfully deleted ${selectedKeyIds.length} API key${selectedKeyIds.length === 1 ? '' : 's'}`;
|
||||
this.output(message, { deleted: selectedKeyIds.length, keys: deletedKeys }, jsonOutput);
|
||||
} catch (error) {
|
||||
this.logger.error(error as any);
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
this.outputError(errorMessage, { deleted: 0, error: errorMessage }, jsonOutput);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
async run(
|
||||
_: string[],
|
||||
options: KeyOptions = { create: false, name: '', delete: false }
|
||||
): Promise<void> {
|
||||
async run(_: string[], options: KeyOptions = { name: '', delete: false }): Promise<void> {
|
||||
try {
|
||||
if (options.delete) {
|
||||
await this.deleteKeys();
|
||||
await this.deleteKeys(options.name, options.json);
|
||||
return;
|
||||
}
|
||||
|
||||
const key = this.apiKeyService.findByField('name', options.name);
|
||||
if (key) {
|
||||
this.logger.log(key.key);
|
||||
} else if (options.create) {
|
||||
options = await this.inquirerService.prompt(AddApiKeyQuestionSet.name, options);
|
||||
this.logger.log('Creating API Key...' + JSON.stringify(options));
|
||||
this.output(key.key, { key: key.key, name: key.name, id: key.id }, options.json);
|
||||
} else if (options.create === true) {
|
||||
// Check if we have minimum required info from flags (name + at least one role or permission)
|
||||
const hasMinimumInfo =
|
||||
options.name &&
|
||||
((options.roles && options.roles.length > 0) ||
|
||||
(options.permissions && options.permissions.length > 0));
|
||||
|
||||
if (!hasMinimumInfo) {
|
||||
// Interactive mode - prompt for missing fields
|
||||
options = await this.inquirerService.prompt(AddApiKeyQuestionSet.name, options);
|
||||
} else {
|
||||
// Non-interactive mode - check if key exists and handle overwrite
|
||||
const existingKey = this.apiKeyService.findByField('name', options.name);
|
||||
if (existingKey && !options.overwrite) {
|
||||
this.outputError(
|
||||
`API key with name '${options.name}' already exists. Use --overwrite to replace it.`,
|
||||
{
|
||||
error: `API key with name '${options.name}' already exists. Use --overwrite to replace it.`,
|
||||
},
|
||||
options.json
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
if (!options.json) {
|
||||
this.logger.log('Creating API Key...');
|
||||
}
|
||||
|
||||
if (!options.roles && !options.permissions) {
|
||||
this.logger.error('Please add at least one role or permission to the key.');
|
||||
@@ -154,10 +238,10 @@ ACTIONS: ${Object.values(AuthAction).join(', ')}`,
|
||||
description: options.description || `CLI generated key: ${options.name}`,
|
||||
roles: options.roles,
|
||||
permissions: options.permissions,
|
||||
overwrite: true,
|
||||
overwrite: options.overwrite ?? false,
|
||||
});
|
||||
|
||||
this.logger.log(key.key);
|
||||
this.output(key.key, { key: key.key, name: key.name, id: key.id }, options.json);
|
||||
} else {
|
||||
this.logger.log('No Key Found');
|
||||
process.exit(1);
|
||||
|
||||
@@ -539,12 +539,16 @@ export type CoreVersions = {
|
||||
/** CPU load for a single core */
|
||||
export type CpuLoad = {
|
||||
__typename?: 'CpuLoad';
|
||||
/** The percentage of time the CPU spent running virtual machines (guest). */
|
||||
percentGuest: Scalars['Float']['output'];
|
||||
/** The percentage of time the CPU was idle. */
|
||||
percentIdle: Scalars['Float']['output'];
|
||||
/** The percentage of time the CPU spent servicing hardware interrupts. */
|
||||
percentIrq: Scalars['Float']['output'];
|
||||
/** The percentage of time the CPU spent on low-priority (niced) user space processes. */
|
||||
percentNice: Scalars['Float']['output'];
|
||||
/** The percentage of CPU time stolen by the hypervisor. */
|
||||
percentSteal: Scalars['Float']['output'];
|
||||
/** The percentage of time the CPU spent in kernel space. */
|
||||
percentSystem: Scalars['Float']['output'];
|
||||
/** The total CPU load on a single core, in percent. */
|
||||
@@ -1432,6 +1436,14 @@ export type OidcAuthorizationRule = {
|
||||
value: Array<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
export type OidcConfiguration = {
|
||||
__typename?: 'OidcConfiguration';
|
||||
/** Default allowed redirect origins that apply to all OIDC providers (e.g., Tailscale domains) */
|
||||
defaultAllowedOrigins?: Maybe<Array<Scalars['String']['output']>>;
|
||||
/** List of configured OIDC providers */
|
||||
providers: Array<OidcProvider>;
|
||||
};
|
||||
|
||||
export type OidcProvider = {
|
||||
__typename?: 'OidcProvider';
|
||||
/** OAuth2 authorization endpoint URL. If omitted, will be auto-discovered from issuer/.well-known/openid-configuration */
|
||||
@@ -1455,7 +1467,7 @@ export type OidcProvider = {
|
||||
/** The unique identifier for the OIDC provider */
|
||||
id: Scalars['PrefixedID']['output'];
|
||||
/** OIDC issuer URL (e.g., https://accounts.google.com). Required for auto-discovery via /.well-known/openid-configuration */
|
||||
issuer: Scalars['String']['output'];
|
||||
issuer?: Maybe<Scalars['String']['output']>;
|
||||
/** JSON Web Key Set URI for token validation. If omitted, will be auto-discovered from issuer/.well-known/openid-configuration */
|
||||
jwksUri?: Maybe<Scalars['String']['output']>;
|
||||
/** Display name of the OIDC provider */
|
||||
@@ -1654,6 +1666,8 @@ export type Query = {
|
||||
network: Network;
|
||||
/** Get all notifications */
|
||||
notifications: Notifications;
|
||||
/** Get the full OIDC configuration (admin only) */
|
||||
oidcConfiguration: OidcConfiguration;
|
||||
/** Get a specific OIDC provider by ID */
|
||||
oidcProvider?: Maybe<OidcProvider>;
|
||||
/** Get all configured OIDC providers (admin only) */
|
||||
@@ -1933,6 +1947,7 @@ export type Server = Node & {
|
||||
name: Scalars['String']['output'];
|
||||
owner: ProfileModel;
|
||||
remoteurl: Scalars['String']['output'];
|
||||
/** Whether this server is online or offline */
|
||||
status: ServerStatus;
|
||||
wanip: Scalars['String']['output'];
|
||||
};
|
||||
|
||||
76
api/src/unraid-api/cli/pm2.service.spec.ts
Normal file
76
api/src/unraid-api/cli/pm2.service.spec.ts
Normal file
@@ -0,0 +1,76 @@
|
||||
import * as fs from 'node:fs/promises';
|
||||
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
import { PM2Service } from '@app/unraid-api/cli/pm2.service.js';
|
||||
|
||||
vi.mock('node:fs/promises');
|
||||
vi.mock('execa');
|
||||
vi.mock('@app/core/utils/files/file-exists.js', () => ({
|
||||
fileExists: vi.fn().mockResolvedValue(false),
|
||||
}));
|
||||
vi.mock('@app/environment.js', () => ({
|
||||
PATHS_LOGS_DIR: '/var/log/unraid-api',
|
||||
PM2_HOME: '/var/log/.pm2',
|
||||
PM2_PATH: '/path/to/pm2',
|
||||
ECOSYSTEM_PATH: '/path/to/ecosystem.config.json',
|
||||
SUPPRESS_LOGS: false,
|
||||
LOG_LEVEL: 'info',
|
||||
}));
|
||||
|
||||
describe('PM2Service', () => {
|
||||
let pm2Service: PM2Service;
|
||||
let logService: LogService;
|
||||
const mockMkdir = vi.mocked(fs.mkdir);
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
logService = {
|
||||
trace: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
log: vi.fn(),
|
||||
info: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
} as unknown as LogService;
|
||||
pm2Service = new PM2Service(logService);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
describe('ensurePm2Dependencies', () => {
|
||||
it('should create logs directory and log that PM2 will handle its own directory', async () => {
|
||||
mockMkdir.mockResolvedValue(undefined);
|
||||
|
||||
await pm2Service.ensurePm2Dependencies();
|
||||
|
||||
expect(mockMkdir).toHaveBeenCalledWith('/var/log/unraid-api', { recursive: true });
|
||||
expect(mockMkdir).toHaveBeenCalledTimes(1); // Only logs directory, not PM2_HOME
|
||||
expect(logService.trace).toHaveBeenCalledWith(
|
||||
'PM2_HOME will be created at /var/log/.pm2 when PM2 daemon starts'
|
||||
);
|
||||
});
|
||||
|
||||
it('should log error but not throw when logs directory creation fails', async () => {
|
||||
mockMkdir.mockRejectedValue(new Error('Disk full'));
|
||||
|
||||
await expect(pm2Service.ensurePm2Dependencies()).resolves.not.toThrow();
|
||||
|
||||
expect(logService.error).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Failed to fully ensure PM2 dependencies: Disk full')
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle mkdir with recursive flag for nested logs path', async () => {
|
||||
mockMkdir.mockResolvedValue(undefined);
|
||||
|
||||
await pm2Service.ensurePm2Dependencies();
|
||||
|
||||
expect(mockMkdir).toHaveBeenCalledWith('/var/log/unraid-api', { recursive: true });
|
||||
expect(mockMkdir).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -42,8 +42,22 @@ export class PM2Service {
|
||||
|
||||
async run(context: CmdContext, ...args: string[]) {
|
||||
const { tag, raw, ...execOptions } = context;
|
||||
execOptions.extendEnv ??= false;
|
||||
// Default to true to match execa's default behavior
|
||||
execOptions.extendEnv ??= true;
|
||||
execOptions.shell ??= 'bash';
|
||||
|
||||
// Ensure /usr/local/bin is in PATH for Node.js
|
||||
const currentPath = execOptions.env?.PATH || process.env.PATH || '/usr/bin:/bin:/usr/sbin:/sbin';
|
||||
const needsPathUpdate = !currentPath.includes('/usr/local/bin');
|
||||
const finalPath = needsPathUpdate ? `/usr/local/bin:${currentPath}` : currentPath;
|
||||
|
||||
// Always ensure PM2_HOME is set in the environment for every PM2 command
|
||||
execOptions.env = {
|
||||
...execOptions.env,
|
||||
PM2_HOME,
|
||||
...(needsPathUpdate && { PATH: finalPath }),
|
||||
};
|
||||
|
||||
const runCommand = () => execa(PM2_PATH, [...args], execOptions satisfies Options);
|
||||
if (raw) {
|
||||
return runCommand();
|
||||
@@ -100,8 +114,20 @@ export class PM2Service {
|
||||
|
||||
/**
|
||||
* Ensures that the dependencies necessary for PM2 to start and operate are present.
|
||||
* Creates PM2_HOME directory with proper permissions if it doesn't exist.
|
||||
*/
|
||||
async ensurePm2Dependencies() {
|
||||
await mkdir(PATHS_LOGS_DIR, { recursive: true });
|
||||
try {
|
||||
// Create logs directory
|
||||
await mkdir(PATHS_LOGS_DIR, { recursive: true });
|
||||
|
||||
// PM2 automatically creates and manages its home directory when the daemon starts
|
||||
this.logger.trace(`PM2_HOME will be created at ${PM2_HOME} when PM2 daemon starts`);
|
||||
} catch (error) {
|
||||
// Log error but don't throw - let PM2 fail with its own error messages if the setup is incomplete
|
||||
this.logger.error(
|
||||
`Failed to fully ensure PM2 dependencies: ${error instanceof Error ? error.message : error}. PM2 may encounter issues during operation.`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +1,23 @@
|
||||
import { Command, CommandRunner } from 'nest-commander';
|
||||
import { Command, CommandRunner, Option } from 'nest-commander';
|
||||
|
||||
import { ECOSYSTEM_PATH } from '@app/environment.js';
|
||||
import type { LogLevel } from '@app/core/log.js';
|
||||
import { levels } from '@app/core/log.js';
|
||||
import { ECOSYSTEM_PATH, LOG_LEVEL } from '@app/environment.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
import { PM2Service } from '@app/unraid-api/cli/pm2.service.js';
|
||||
|
||||
export interface LogLevelOptions {
|
||||
logLevel?: LogLevel;
|
||||
}
|
||||
|
||||
export function parseLogLevelOption(val: string, allowedLevels: string[] = [...levels]): LogLevel {
|
||||
const normalized = val.toLowerCase() as LogLevel;
|
||||
if (allowedLevels.includes(normalized)) {
|
||||
return normalized;
|
||||
}
|
||||
throw new Error(`Invalid --log-level "${val}". Allowed: ${allowedLevels.join(', ')}`);
|
||||
}
|
||||
|
||||
@Command({ name: 'restart', description: 'Restart the Unraid API' })
|
||||
export class RestartCommand extends CommandRunner {
|
||||
constructor(
|
||||
@@ -13,11 +27,12 @@ export class RestartCommand extends CommandRunner {
|
||||
super();
|
||||
}
|
||||
|
||||
async run(): Promise<void> {
|
||||
async run(_?: string[], options: LogLevelOptions = {}): Promise<void> {
|
||||
try {
|
||||
this.logger.info('Restarting the Unraid API...');
|
||||
const env = { LOG_LEVEL: options.logLevel };
|
||||
const { stderr, stdout } = await this.pm2.run(
|
||||
{ tag: 'PM2 Restart', raw: true },
|
||||
{ tag: 'PM2 Restart', raw: true, extendEnv: true, env },
|
||||
'restart',
|
||||
ECOSYSTEM_PATH,
|
||||
'--update-env'
|
||||
@@ -40,4 +55,13 @@ export class RestartCommand extends CommandRunner {
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
@Option({
|
||||
flags: `--log-level <${levels.join('|')}>`,
|
||||
description: 'log level to use',
|
||||
defaultValue: LOG_LEVEL.toLowerCase(),
|
||||
})
|
||||
parseLogLevel(val: string): LogLevel {
|
||||
return parseLogLevelOption(val);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +1,12 @@
|
||||
import { Command, CommandRunner, Option } from 'nest-commander';
|
||||
|
||||
import type { LogLevel } from '@app/core/log.js';
|
||||
import type { LogLevelOptions } from '@app/unraid-api/cli/restart.command.js';
|
||||
import { levels } from '@app/core/log.js';
|
||||
import { ECOSYSTEM_PATH } from '@app/environment.js';
|
||||
import { ECOSYSTEM_PATH, LOG_LEVEL } from '@app/environment.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
import { PM2Service } from '@app/unraid-api/cli/pm2.service.js';
|
||||
|
||||
interface StartCommandOptions {
|
||||
'log-level'?: string;
|
||||
}
|
||||
import { parseLogLevelOption } from '@app/unraid-api/cli/restart.command.js';
|
||||
|
||||
@Command({ name: 'start', description: 'Start the Unraid API' })
|
||||
export class StartCommand extends CommandRunner {
|
||||
@@ -27,17 +25,12 @@ export class StartCommand extends CommandRunner {
|
||||
await this.pm2.run({ tag: 'PM2 Delete' }, 'delete', ECOSYSTEM_PATH);
|
||||
}
|
||||
|
||||
async run(_: string[], options: StartCommandOptions): Promise<void> {
|
||||
async run(_: string[], options: LogLevelOptions): Promise<void> {
|
||||
this.logger.info('Starting the Unraid API');
|
||||
await this.cleanupPM2State();
|
||||
|
||||
const env: Record<string, string> = {};
|
||||
if (options['log-level']) {
|
||||
env.LOG_LEVEL = options['log-level'];
|
||||
}
|
||||
|
||||
const env = { LOG_LEVEL: options.logLevel };
|
||||
const { stderr, stdout } = await this.pm2.run(
|
||||
{ tag: 'PM2 Start', env, raw: true },
|
||||
{ tag: 'PM2 Start', raw: true, extendEnv: true, env },
|
||||
'start',
|
||||
ECOSYSTEM_PATH,
|
||||
'--update-env'
|
||||
@@ -54,9 +47,9 @@ export class StartCommand extends CommandRunner {
|
||||
@Option({
|
||||
flags: `--log-level <${levels.join('|')}>`,
|
||||
description: 'log level to use',
|
||||
defaultValue: 'info',
|
||||
defaultValue: LOG_LEVEL.toLowerCase(),
|
||||
})
|
||||
parseLogLevel(val: string): LogLevel {
|
||||
return levels.includes(val as LogLevel) ? (val as LogLevel) : 'info';
|
||||
return parseLogLevelOption(val);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +1,37 @@
|
||||
import { Command, CommandRunner } from 'nest-commander';
|
||||
import { Command, CommandRunner, Option } from 'nest-commander';
|
||||
|
||||
import { API_VERSION } from '@app/environment.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
|
||||
@Command({ name: 'version' })
|
||||
interface VersionOptions {
|
||||
json?: boolean;
|
||||
}
|
||||
|
||||
@Command({ name: 'version', description: 'Display API version information' })
|
||||
export class VersionCommand extends CommandRunner {
|
||||
constructor(private readonly logger: LogService) {
|
||||
super();
|
||||
}
|
||||
async run(): Promise<void> {
|
||||
this.logger.info(`Unraid API v${API_VERSION}`);
|
||||
|
||||
@Option({
|
||||
flags: '-j, --json',
|
||||
description: 'Output version information as JSON',
|
||||
})
|
||||
parseJson(): boolean {
|
||||
return true;
|
||||
}
|
||||
|
||||
async run(passedParam: string[], options?: VersionOptions): Promise<void> {
|
||||
if (options?.json) {
|
||||
const [baseVersion, buildInfo] = API_VERSION.split('+');
|
||||
const versionInfo = {
|
||||
version: baseVersion || API_VERSION,
|
||||
build: buildInfo || undefined,
|
||||
combined: API_VERSION,
|
||||
};
|
||||
console.log(JSON.stringify(versionInfo));
|
||||
} else {
|
||||
this.logger.info(`Unraid API v${API_VERSION}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -126,6 +126,9 @@ export class ArrayDisk extends Node {
|
||||
|
||||
@Field(() => ArrayDiskFsColor, { nullable: true })
|
||||
color?: ArrayDiskFsColor | null;
|
||||
|
||||
@Field(() => Boolean, { nullable: true, description: 'Whether the disk is currently spinning' })
|
||||
isSpinning?: boolean | null;
|
||||
}
|
||||
|
||||
@ObjectType({
|
||||
|
||||
@@ -3,7 +3,15 @@ import { Field, ObjectType, registerEnumType } from '@nestjs/graphql';
|
||||
import { Node } from '@unraid/shared/graphql.model.js';
|
||||
import { PrefixedID } from '@unraid/shared/prefixed-id-scalar.js';
|
||||
import { Type } from 'class-transformer';
|
||||
import { IsArray, IsEnum, IsNumber, IsOptional, IsString, ValidateNested } from 'class-validator';
|
||||
import {
|
||||
IsArray,
|
||||
IsBoolean,
|
||||
IsEnum,
|
||||
IsNumber,
|
||||
IsOptional,
|
||||
IsString,
|
||||
ValidateNested,
|
||||
} from 'class-validator';
|
||||
|
||||
export enum DiskFsType {
|
||||
XFS = 'XFS',
|
||||
@@ -136,4 +144,8 @@ export class Disk extends Node {
|
||||
@ValidateNested({ each: true })
|
||||
@Type(() => DiskPartition)
|
||||
partitions!: DiskPartition[];
|
||||
|
||||
@Field(() => Boolean, { description: 'Whether the disk is spinning or not' })
|
||||
@IsBoolean()
|
||||
isSpinning!: boolean;
|
||||
}
|
||||
|
||||
@@ -66,6 +66,7 @@ describe('DisksResolver', () => {
|
||||
smartStatus: DiskSmartStatus.OK,
|
||||
temperature: -1,
|
||||
partitions: [],
|
||||
isSpinning: false,
|
||||
},
|
||||
];
|
||||
mockDisksService.getDisks.mockResolvedValue(mockResult);
|
||||
@@ -92,6 +93,7 @@ describe('DisksResolver', () => {
|
||||
const mockDisk: Disk = {
|
||||
id: 'SERIAL123',
|
||||
device: '/dev/sda',
|
||||
isSpinning: false,
|
||||
type: 'SSD',
|
||||
name: 'Samsung SSD 860 EVO 1TB',
|
||||
vendor: 'Samsung',
|
||||
|
||||
@@ -33,4 +33,9 @@ export class DisksResolver {
|
||||
public async temperature(@Parent() disk: Disk) {
|
||||
return this.disksService.getTemperature(disk.device);
|
||||
}
|
||||
|
||||
@ResolveField(() => Boolean)
|
||||
public async isSpinning(@Parent() disk: Disk) {
|
||||
return disk.isSpinning;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,17 @@
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
|
||||
import type { Systeminformation } from 'systeminformation';
|
||||
import { execa } from 'execa';
|
||||
import { blockDevices, diskLayout } from 'systeminformation';
|
||||
// Vitest imports
|
||||
import { beforeEach, describe, expect, it, Mock, MockedFunction, vi } from 'vitest';
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import {
|
||||
ArrayDisk,
|
||||
ArrayDiskStatus,
|
||||
ArrayDiskType,
|
||||
} from '@app/unraid-api/graph/resolvers/array/array.model.js';
|
||||
import {
|
||||
Disk,
|
||||
DiskFsType,
|
||||
@@ -33,6 +39,86 @@ const mockBatchProcess = batchProcess as any;
|
||||
|
||||
describe('DisksService', () => {
|
||||
let service: DisksService;
|
||||
let configService: ConfigService;
|
||||
|
||||
// Mock ArrayDisk data from state
|
||||
const mockArrayDisks: ArrayDisk[] = [
|
||||
{
|
||||
id: 'S4ENNF0N123456',
|
||||
device: 'sda',
|
||||
name: 'cache',
|
||||
size: 512110190592,
|
||||
idx: 30,
|
||||
type: ArrayDiskType.CACHE,
|
||||
status: ArrayDiskStatus.DISK_OK,
|
||||
isSpinning: null, // NVMe/SSD doesn't spin
|
||||
rotational: false,
|
||||
exportable: false,
|
||||
numErrors: 0,
|
||||
numReads: 1000,
|
||||
numWrites: 2000,
|
||||
temp: 42,
|
||||
comment: 'NVMe Cache',
|
||||
format: 'GPT: 4KiB-aligned',
|
||||
fsType: 'btrfs',
|
||||
transport: 'nvme',
|
||||
warning: null,
|
||||
critical: null,
|
||||
fsFree: null,
|
||||
fsSize: null,
|
||||
fsUsed: null,
|
||||
},
|
||||
{
|
||||
id: 'WD-WCC7K7YL9876',
|
||||
device: 'sdb',
|
||||
name: 'disk1',
|
||||
size: 4000787030016,
|
||||
idx: 1,
|
||||
type: ArrayDiskType.DATA,
|
||||
status: ArrayDiskStatus.DISK_OK,
|
||||
isSpinning: true, // Currently spinning
|
||||
rotational: true,
|
||||
exportable: false,
|
||||
numErrors: 0,
|
||||
numReads: 5000,
|
||||
numWrites: 3000,
|
||||
temp: 35,
|
||||
comment: 'Data Disk 1',
|
||||
format: 'GPT: 4KiB-aligned',
|
||||
fsType: 'xfs',
|
||||
transport: 'sata',
|
||||
warning: null,
|
||||
critical: null,
|
||||
fsFree: 1000000000,
|
||||
fsSize: 4000000000,
|
||||
fsUsed: 3000000000,
|
||||
},
|
||||
{
|
||||
id: 'WD-SPUNDOWN123',
|
||||
device: 'sdd',
|
||||
name: 'disk2',
|
||||
size: 4000787030016,
|
||||
idx: 2,
|
||||
type: ArrayDiskType.DATA,
|
||||
status: ArrayDiskStatus.DISK_OK,
|
||||
isSpinning: false, // Spun down
|
||||
rotational: true,
|
||||
exportable: false,
|
||||
numErrors: 0,
|
||||
numReads: 3000,
|
||||
numWrites: 1000,
|
||||
temp: 30,
|
||||
comment: 'Data Disk 2 (spun down)',
|
||||
format: 'GPT: 4KiB-aligned',
|
||||
fsType: 'xfs',
|
||||
transport: 'sata',
|
||||
warning: null,
|
||||
critical: null,
|
||||
fsFree: 2000000000,
|
||||
fsSize: 4000000000,
|
||||
fsUsed: 2000000000,
|
||||
},
|
||||
];
|
||||
|
||||
const mockDiskLayoutData: Systeminformation.DiskLayoutData[] = [
|
||||
{
|
||||
@@ -92,6 +178,25 @@ describe('DisksService', () => {
|
||||
smartStatus: 'unknown', // Simulate unknown status
|
||||
temperature: null,
|
||||
},
|
||||
{
|
||||
device: '/dev/sdd',
|
||||
type: 'HD',
|
||||
name: 'WD Spun Down',
|
||||
vendor: 'Western Digital',
|
||||
size: 4000787030016,
|
||||
bytesPerSector: 512,
|
||||
totalCylinders: 486401,
|
||||
totalHeads: 255,
|
||||
totalSectors: 7814037168,
|
||||
totalTracks: 124032255,
|
||||
tracksPerCylinder: 255,
|
||||
sectorsPerTrack: 63,
|
||||
firmwareRevision: '82.00A82',
|
||||
serialNum: 'WD-SPUNDOWN123',
|
||||
interfaceType: 'SATA',
|
||||
smartStatus: 'Ok',
|
||||
temperature: null,
|
||||
},
|
||||
];
|
||||
|
||||
const mockBlockDeviceData: Systeminformation.BlockDevicesData[] = [
|
||||
@@ -174,17 +279,50 @@ describe('DisksService', () => {
|
||||
protocol: 'SATA', // Assume SATA even if interface type unknown for disk
|
||||
identifier: '/dev/sdc1',
|
||||
},
|
||||
// Partition for sdd
|
||||
{
|
||||
name: 'sdd1',
|
||||
type: 'part',
|
||||
fsType: 'xfs',
|
||||
mount: '/mnt/disk2',
|
||||
size: 4000787030016,
|
||||
physical: 'HDD',
|
||||
uuid: 'UUID-SDD1',
|
||||
label: 'Data2',
|
||||
model: 'WD Spun Down',
|
||||
serial: 'WD-SPUNDOWN123',
|
||||
removable: false,
|
||||
protocol: 'SATA',
|
||||
identifier: '/dev/sdd1',
|
||||
},
|
||||
];
|
||||
|
||||
beforeEach(async () => {
|
||||
// Reset mocks before each test using vi
|
||||
vi.clearAllMocks();
|
||||
|
||||
// Create mock ConfigService
|
||||
const mockConfigService = {
|
||||
get: vi.fn().mockImplementation((key: string, defaultValue?: any) => {
|
||||
if (key === 'store.emhttp.disks') {
|
||||
return mockArrayDisks;
|
||||
}
|
||||
return defaultValue;
|
||||
}),
|
||||
};
|
||||
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [DisksService],
|
||||
providers: [
|
||||
DisksService,
|
||||
{
|
||||
provide: ConfigService,
|
||||
useValue: mockConfigService,
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
service = module.get<DisksService>(DisksService);
|
||||
configService = module.get<ConfigService>(ConfigService);
|
||||
|
||||
// Setup default mock implementations
|
||||
mockDiskLayout.mockResolvedValue(mockDiskLayoutData);
|
||||
@@ -207,46 +345,112 @@ describe('DisksService', () => {
|
||||
// --- Test getDisks ---
|
||||
|
||||
describe('getDisks', () => {
|
||||
it('should return disks without temperature', async () => {
|
||||
it('should return disks with spinning state from store', async () => {
|
||||
const disks = await service.getDisks();
|
||||
|
||||
expect(mockDiskLayout).toHaveBeenCalledTimes(1);
|
||||
expect(mockBlockDevices).toHaveBeenCalledTimes(1);
|
||||
expect(mockExeca).not.toHaveBeenCalled(); // Temperature should not be fetched
|
||||
expect(mockBatchProcess).toHaveBeenCalledTimes(1); // Still uses batchProcess for parsing
|
||||
expect(configService.get).toHaveBeenCalledWith('store.emhttp.disks', []);
|
||||
expect(mockBatchProcess).toHaveBeenCalledTimes(1);
|
||||
|
||||
expect(disks).toHaveLength(mockDiskLayoutData.length);
|
||||
expect(disks[0]).toMatchObject({
|
||||
id: 'S4ENNF0N123456',
|
||||
device: '/dev/sda',
|
||||
type: 'HD',
|
||||
name: 'SAMSUNG MZVLB512HBJQ-000L7',
|
||||
vendor: 'Samsung',
|
||||
size: 512110190592,
|
||||
interfaceType: DiskInterfaceType.PCIE,
|
||||
smartStatus: DiskSmartStatus.OK,
|
||||
temperature: null, // Temperature is now null by default
|
||||
partitions: [
|
||||
{ name: 'sda1', fsType: DiskFsType.VFAT, size: 536870912 },
|
||||
{ name: 'sda2', fsType: DiskFsType.EXT4, size: 511560000000 },
|
||||
],
|
||||
|
||||
// Check NVMe disk with null spinning state
|
||||
const nvmeDisk = disks.find((d) => d.id === 'S4ENNF0N123456');
|
||||
expect(nvmeDisk).toBeDefined();
|
||||
expect(nvmeDisk?.isSpinning).toBe(false); // null from state defaults to false
|
||||
expect(nvmeDisk?.interfaceType).toBe(DiskInterfaceType.PCIE);
|
||||
expect(nvmeDisk?.smartStatus).toBe(DiskSmartStatus.OK);
|
||||
expect(nvmeDisk?.partitions).toHaveLength(2);
|
||||
|
||||
// Check spinning disk
|
||||
const spinningDisk = disks.find((d) => d.id === 'WD-WCC7K7YL9876');
|
||||
expect(spinningDisk).toBeDefined();
|
||||
expect(spinningDisk?.isSpinning).toBe(true); // From state
|
||||
expect(spinningDisk?.interfaceType).toBe(DiskInterfaceType.SATA);
|
||||
|
||||
// Check spun down disk
|
||||
const spunDownDisk = disks.find((d) => d.id === 'WD-SPUNDOWN123');
|
||||
expect(spunDownDisk).toBeDefined();
|
||||
expect(spunDownDisk?.isSpinning).toBe(false); // From state
|
||||
|
||||
// Check disk not in state (defaults to not spinning)
|
||||
const unknownDisk = disks.find((d) => d.id === 'OTHER-SERIAL-123');
|
||||
expect(unknownDisk).toBeDefined();
|
||||
expect(unknownDisk?.isSpinning).toBe(false); // Not in state, defaults to false
|
||||
expect(unknownDisk?.interfaceType).toBe(DiskInterfaceType.UNKNOWN);
|
||||
expect(unknownDisk?.smartStatus).toBe(DiskSmartStatus.UNKNOWN);
|
||||
});
|
||||
|
||||
it('should handle empty state gracefully', async () => {
|
||||
vi.mocked(configService.get).mockImplementation((key: string, defaultValue?: any) => {
|
||||
if (key === 'store.emhttp.disks') {
|
||||
return [];
|
||||
}
|
||||
return defaultValue;
|
||||
});
|
||||
expect(disks[1]).toMatchObject({
|
||||
id: 'WD-WCC7K7YL9876',
|
||||
device: '/dev/sdb',
|
||||
interfaceType: DiskInterfaceType.SATA,
|
||||
smartStatus: DiskSmartStatus.OK,
|
||||
temperature: null,
|
||||
partitions: [{ name: 'sdb1', fsType: DiskFsType.XFS, size: 4000787030016 }],
|
||||
|
||||
const disks = await service.getDisks();
|
||||
|
||||
// All disks should default to not spinning when state is empty
|
||||
expect(disks).toHaveLength(mockDiskLayoutData.length);
|
||||
disks.forEach((disk) => {
|
||||
expect(disk.isSpinning).toBe(false);
|
||||
});
|
||||
expect(disks[2]).toMatchObject({
|
||||
id: 'OTHER-SERIAL-123',
|
||||
device: '/dev/sdc',
|
||||
interfaceType: DiskInterfaceType.UNKNOWN,
|
||||
smartStatus: DiskSmartStatus.UNKNOWN,
|
||||
temperature: null,
|
||||
partitions: [{ name: 'sdc1', fsType: DiskFsType.NTFS, size: 1000204886016 }],
|
||||
});
|
||||
|
||||
it('should handle trimmed serial numbers correctly', async () => {
|
||||
// Add disk with spaces in ID
|
||||
const disksWithSpaces = [...mockArrayDisks];
|
||||
disksWithSpaces[0] = {
|
||||
...disksWithSpaces[0],
|
||||
id: ' S4ENNF0N123456 ', // spaces around ID
|
||||
};
|
||||
|
||||
vi.mocked(configService.get).mockImplementation((key: string, defaultValue?: any) => {
|
||||
if (key === 'store.emhttp.disks') {
|
||||
return disksWithSpaces;
|
||||
}
|
||||
return defaultValue;
|
||||
});
|
||||
|
||||
const disks = await service.getDisks();
|
||||
const disk = disks.find((d) => d.id === 'S4ENNF0N123456');
|
||||
|
||||
expect(disk).toBeDefined();
|
||||
expect(disk?.isSpinning).toBe(false); // null becomes false
|
||||
});
|
||||
|
||||
it('should correctly map partitions to disks', async () => {
|
||||
const disks = await service.getDisks();
|
||||
|
||||
const disk1 = disks.find((d) => d.id === 'S4ENNF0N123456');
|
||||
expect(disk1?.partitions).toHaveLength(2);
|
||||
expect(disk1?.partitions[0]).toEqual({
|
||||
name: 'sda1',
|
||||
fsType: DiskFsType.VFAT,
|
||||
size: 536870912,
|
||||
});
|
||||
expect(disk1?.partitions[1]).toEqual({
|
||||
name: 'sda2',
|
||||
fsType: DiskFsType.EXT4,
|
||||
size: 511560000000,
|
||||
});
|
||||
|
||||
const disk2 = disks.find((d) => d.id === 'WD-WCC7K7YL9876');
|
||||
expect(disk2?.partitions).toHaveLength(1);
|
||||
expect(disk2?.partitions[0]).toEqual({
|
||||
name: 'sdb1',
|
||||
fsType: DiskFsType.XFS,
|
||||
size: 4000787030016,
|
||||
});
|
||||
});
|
||||
|
||||
it('should use ConfigService to get state data', async () => {
|
||||
await service.getDisks();
|
||||
|
||||
// Verify we're accessing the state through ConfigService
|
||||
expect(configService.get).toHaveBeenCalledWith('store.emhttp.disks', []);
|
||||
});
|
||||
|
||||
it('should handle empty disk layout or block devices', async () => {
|
||||
@@ -267,6 +471,31 @@ describe('DisksService', () => {
|
||||
});
|
||||
});
|
||||
|
||||
// --- Test getDisk ---
|
||||
describe('getDisk', () => {
|
||||
it('should return a specific disk by id', async () => {
|
||||
const disk = await service.getDisk('S4ENNF0N123456');
|
||||
|
||||
expect(disk).toBeDefined();
|
||||
expect(disk.id).toBe('S4ENNF0N123456');
|
||||
expect(disk.isSpinning).toBe(false); // null becomes false
|
||||
});
|
||||
|
||||
it('should return spinning disk correctly', async () => {
|
||||
const disk = await service.getDisk('WD-WCC7K7YL9876');
|
||||
|
||||
expect(disk).toBeDefined();
|
||||
expect(disk.id).toBe('WD-WCC7K7YL9876');
|
||||
expect(disk.isSpinning).toBe(true);
|
||||
});
|
||||
|
||||
it('should throw NotFoundException for non-existent disk', async () => {
|
||||
await expect(service.getDisk('NONEXISTENT')).rejects.toThrow(
|
||||
'Disk with id NONEXISTENT not found'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
// --- Test getTemperature ---
|
||||
describe('getTemperature', () => {
|
||||
it('should return temperature for a disk', async () => {
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
import { Injectable, NotFoundException } from '@nestjs/common';
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
|
||||
import type { Systeminformation } from 'systeminformation';
|
||||
import { execa } from 'execa';
|
||||
import { blockDevices, diskLayout } from 'systeminformation';
|
||||
|
||||
import { ArrayDisk } from '@app/unraid-api/graph/resolvers/array/array.model.js';
|
||||
import {
|
||||
Disk,
|
||||
DiskFsType,
|
||||
@@ -14,6 +16,7 @@ import { batchProcess } from '@app/utils.js';
|
||||
|
||||
@Injectable()
|
||||
export class DisksService {
|
||||
constructor(private readonly configService: ConfigService) {}
|
||||
public async getTemperature(device: string): Promise<number | null> {
|
||||
try {
|
||||
const { stdout } = await execa('smartctl', ['-A', device]);
|
||||
@@ -51,7 +54,8 @@ export class DisksService {
|
||||
|
||||
private async parseDisk(
|
||||
disk: Systeminformation.DiskLayoutData,
|
||||
partitionsToParse: Systeminformation.BlockDevicesData[]
|
||||
partitionsToParse: Systeminformation.BlockDevicesData[],
|
||||
arrayDisks: ArrayDisk[]
|
||||
): Promise<Omit<Disk, 'temperature'>> {
|
||||
const partitions = partitionsToParse
|
||||
// Only get partitions from this disk
|
||||
@@ -115,6 +119,8 @@ export class DisksService {
|
||||
mappedInterfaceType = DiskInterfaceType.UNKNOWN;
|
||||
}
|
||||
|
||||
const arrayDisk = arrayDisks.find((d) => d.id.trim() === disk.serialNum.trim());
|
||||
|
||||
return {
|
||||
...disk,
|
||||
id: disk.serialNum, // Ensure id is set
|
||||
@@ -123,6 +129,7 @@ export class DisksService {
|
||||
DiskSmartStatus.UNKNOWN,
|
||||
interfaceType: mappedInterfaceType,
|
||||
partitions,
|
||||
isSpinning: arrayDisk?.isSpinning ?? false,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -133,9 +140,9 @@ export class DisksService {
|
||||
const partitions = await blockDevices().then((devices) =>
|
||||
devices.filter((device) => device.type === 'part')
|
||||
);
|
||||
|
||||
const arrayDisks = this.configService.get<ArrayDisk[]>('store.emhttp.disks', []);
|
||||
const { data } = await batchProcess(await diskLayout(), async (disk) =>
|
||||
this.parseDisk(disk, partitions)
|
||||
this.parseDisk(disk, partitions, arrayDisks)
|
||||
);
|
||||
return data;
|
||||
}
|
||||
|
||||
@@ -27,6 +27,16 @@ export class CpuLoad {
|
||||
description: 'The percentage of time the CPU spent servicing hardware interrupts.',
|
||||
})
|
||||
percentIrq!: number;
|
||||
|
||||
@Field(() => Float, {
|
||||
description: 'The percentage of time the CPU spent running virtual machines (guest).',
|
||||
})
|
||||
percentGuest!: number;
|
||||
|
||||
@Field(() => Float, {
|
||||
description: 'The percentage of CPU time stolen by the hypervisor.',
|
||||
})
|
||||
percentSteal!: number;
|
||||
}
|
||||
|
||||
@ObjectType({ implements: () => Node })
|
||||
|
||||
246
api/src/unraid-api/graph/resolvers/info/cpu/cpu.service.spec.ts
Normal file
246
api/src/unraid-api/graph/resolvers/info/cpu/cpu.service.spec.ts
Normal file
@@ -0,0 +1,246 @@
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { CpuService } from '@app/unraid-api/graph/resolvers/info/cpu/cpu.service.js';
|
||||
|
||||
vi.mock('systeminformation', () => ({
|
||||
cpu: vi.fn().mockResolvedValue({
|
||||
manufacturer: 'Intel',
|
||||
brand: 'Core i7-9700K',
|
||||
vendor: 'Intel',
|
||||
family: '6',
|
||||
model: '158',
|
||||
stepping: '12',
|
||||
revision: '',
|
||||
voltage: '1.2V',
|
||||
speed: 3.6,
|
||||
speedMin: 800,
|
||||
speedMax: 4900,
|
||||
cores: 16,
|
||||
physicalCores: 8,
|
||||
processors: 1,
|
||||
socket: 'LGA1151',
|
||||
cache: {
|
||||
l1d: 32768,
|
||||
l1i: 32768,
|
||||
l2: 262144,
|
||||
l3: 12582912,
|
||||
},
|
||||
}),
|
||||
cpuFlags: vi.fn().mockResolvedValue('fpu vme de pse tsc msr pae mce cx8'),
|
||||
currentLoad: vi.fn().mockResolvedValue({
|
||||
avgLoad: 2.5,
|
||||
currentLoad: 25.5,
|
||||
currentLoadUser: 15.0,
|
||||
currentLoadSystem: 8.0,
|
||||
currentLoadNice: 0.5,
|
||||
currentLoadIdle: 74.5,
|
||||
currentLoadIrq: 1.0,
|
||||
currentLoadSteal: 0.2,
|
||||
currentLoadGuest: 0.3,
|
||||
rawCurrentLoad: 25500,
|
||||
rawCurrentLoadUser: 15000,
|
||||
rawCurrentLoadSystem: 8000,
|
||||
rawCurrentLoadNice: 500,
|
||||
rawCurrentLoadIdle: 74500,
|
||||
rawCurrentLoadIrq: 1000,
|
||||
rawCurrentLoadSteal: 200,
|
||||
rawCurrentLoadGuest: 300,
|
||||
cpus: [
|
||||
{
|
||||
load: 30.0,
|
||||
loadUser: 20.0,
|
||||
loadSystem: 10.0,
|
||||
loadNice: 0,
|
||||
loadIdle: 70.0,
|
||||
loadIrq: 0,
|
||||
loadSteal: 0,
|
||||
loadGuest: 0,
|
||||
rawLoad: 30000,
|
||||
rawLoadUser: 20000,
|
||||
rawLoadSystem: 10000,
|
||||
rawLoadNice: 0,
|
||||
rawLoadIdle: 70000,
|
||||
rawLoadIrq: 0,
|
||||
rawLoadSteal: 0,
|
||||
rawLoadGuest: 0,
|
||||
},
|
||||
{
|
||||
load: 21.0,
|
||||
loadUser: 15.0,
|
||||
loadSystem: 6.0,
|
||||
loadNice: 0,
|
||||
loadIdle: 79.0,
|
||||
loadIrq: 0,
|
||||
loadSteal: 0,
|
||||
loadGuest: 0,
|
||||
rawLoad: 21000,
|
||||
rawLoadUser: 15000,
|
||||
rawLoadSystem: 6000,
|
||||
rawLoadNice: 0,
|
||||
rawLoadIdle: 79000,
|
||||
rawLoadIrq: 0,
|
||||
rawLoadSteal: 0,
|
||||
rawLoadGuest: 0,
|
||||
},
|
||||
],
|
||||
}),
|
||||
}));
|
||||
|
||||
describe('CpuService', () => {
|
||||
let service: CpuService;
|
||||
|
||||
beforeEach(() => {
|
||||
service = new CpuService();
|
||||
});
|
||||
|
||||
describe('generateCpu', () => {
|
||||
it('should return CPU information with correct structure', async () => {
|
||||
const result = await service.generateCpu();
|
||||
|
||||
expect(result).toEqual({
|
||||
id: 'info/cpu',
|
||||
manufacturer: 'Intel',
|
||||
brand: 'Core i7-9700K',
|
||||
vendor: 'Intel',
|
||||
family: '6',
|
||||
model: '158',
|
||||
stepping: 12,
|
||||
revision: '',
|
||||
voltage: '1.2V',
|
||||
speed: 3.6,
|
||||
speedmin: 800,
|
||||
speedmax: 4900,
|
||||
cores: 8,
|
||||
threads: 16,
|
||||
processors: 1,
|
||||
socket: 'LGA1151',
|
||||
cache: {
|
||||
l1d: 32768,
|
||||
l1i: 32768,
|
||||
l2: 262144,
|
||||
l3: 12582912,
|
||||
},
|
||||
flags: ['fpu', 'vme', 'de', 'pse', 'tsc', 'msr', 'pae', 'mce', 'cx8'],
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle missing speed values', async () => {
|
||||
const { cpu } = await import('systeminformation');
|
||||
vi.mocked(cpu).mockResolvedValueOnce({
|
||||
manufacturer: 'Intel',
|
||||
brand: 'Core i7-9700K',
|
||||
vendor: 'Intel',
|
||||
family: '6',
|
||||
model: '158',
|
||||
stepping: '12',
|
||||
revision: '',
|
||||
voltage: '1.2V',
|
||||
speed: 3.6,
|
||||
cores: 16,
|
||||
physicalCores: 8,
|
||||
processors: 1,
|
||||
socket: 'LGA1151',
|
||||
cache: { l1d: 32768, l1i: 32768, l2: 262144, l3: 12582912 },
|
||||
} as any);
|
||||
|
||||
const result = await service.generateCpu();
|
||||
|
||||
expect(result.speedmin).toBe(-1);
|
||||
expect(result.speedmax).toBe(-1);
|
||||
});
|
||||
|
||||
it('should handle cpuFlags error gracefully', async () => {
|
||||
const { cpuFlags } = await import('systeminformation');
|
||||
vi.mocked(cpuFlags).mockRejectedValueOnce(new Error('flags error'));
|
||||
|
||||
const result = await service.generateCpu();
|
||||
|
||||
expect(result.flags).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('generateCpuLoad', () => {
|
||||
it('should return CPU utilization with all load metrics', async () => {
|
||||
const result = await service.generateCpuLoad();
|
||||
|
||||
expect(result).toEqual({
|
||||
id: 'info/cpu-load',
|
||||
percentTotal: 25.5,
|
||||
cpus: [
|
||||
{
|
||||
percentTotal: 30.0,
|
||||
percentUser: 20.0,
|
||||
percentSystem: 10.0,
|
||||
percentNice: 0,
|
||||
percentIdle: 70.0,
|
||||
percentIrq: 0,
|
||||
percentGuest: 0,
|
||||
percentSteal: 0,
|
||||
},
|
||||
{
|
||||
percentTotal: 21.0,
|
||||
percentUser: 15.0,
|
||||
percentSystem: 6.0,
|
||||
percentNice: 0,
|
||||
percentIdle: 79.0,
|
||||
percentIrq: 0,
|
||||
percentGuest: 0,
|
||||
percentSteal: 0,
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
it('should include guest and steal metrics when present', async () => {
|
||||
const { currentLoad } = await import('systeminformation');
|
||||
vi.mocked(currentLoad).mockResolvedValueOnce({
|
||||
avgLoad: 2.5,
|
||||
currentLoad: 25.5,
|
||||
currentLoadUser: 15.0,
|
||||
currentLoadSystem: 8.0,
|
||||
currentLoadNice: 0.5,
|
||||
currentLoadIdle: 74.5,
|
||||
currentLoadIrq: 1.0,
|
||||
currentLoadSteal: 0.2,
|
||||
currentLoadGuest: 0.3,
|
||||
rawCurrentLoad: 25500,
|
||||
rawCurrentLoadUser: 15000,
|
||||
rawCurrentLoadSystem: 8000,
|
||||
rawCurrentLoadNice: 500,
|
||||
rawCurrentLoadIdle: 74500,
|
||||
rawCurrentLoadIrq: 1000,
|
||||
rawCurrentLoadSteal: 200,
|
||||
rawCurrentLoadGuest: 300,
|
||||
cpus: [
|
||||
{
|
||||
load: 30.0,
|
||||
loadUser: 20.0,
|
||||
loadSystem: 10.0,
|
||||
loadNice: 0,
|
||||
loadIdle: 70.0,
|
||||
loadIrq: 0,
|
||||
loadGuest: 2.5,
|
||||
loadSteal: 1.2,
|
||||
rawLoad: 30000,
|
||||
rawLoadUser: 20000,
|
||||
rawLoadSystem: 10000,
|
||||
rawLoadNice: 0,
|
||||
rawLoadIdle: 70000,
|
||||
rawLoadIrq: 0,
|
||||
rawLoadGuest: 2500,
|
||||
rawLoadSteal: 1200,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const result = await service.generateCpuLoad();
|
||||
|
||||
expect(result.cpus[0]).toEqual(
|
||||
expect.objectContaining({
|
||||
percentGuest: 2.5,
|
||||
percentSteal: 1.2,
|
||||
})
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -37,6 +37,8 @@ export class CpuService {
|
||||
percentNice: cpu.loadNice,
|
||||
percentIdle: cpu.loadIdle,
|
||||
percentIrq: cpu.loadIrq,
|
||||
percentGuest: cpu.loadGuest || 0,
|
||||
percentSteal: cpu.loadSteal || 0,
|
||||
})),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -0,0 +1,213 @@
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import {
|
||||
LogWatcherManager,
|
||||
WatcherState,
|
||||
} from '@app/unraid-api/graph/resolvers/logs/log-watcher-manager.service.js';
|
||||
|
||||
describe('LogWatcherManager', () => {
|
||||
let manager: LogWatcherManager;
|
||||
let mockWatcher: any;
|
||||
|
||||
beforeEach(async () => {
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [LogWatcherManager],
|
||||
}).compile();
|
||||
|
||||
manager = module.get<LogWatcherManager>(LogWatcherManager);
|
||||
|
||||
mockWatcher = {
|
||||
close: vi.fn(),
|
||||
on: vi.fn(),
|
||||
};
|
||||
});
|
||||
|
||||
describe('state management', () => {
|
||||
it('should set watcher as initializing', () => {
|
||||
manager.setInitializing('test-key');
|
||||
const entry = manager.getEntry('test-key');
|
||||
expect(entry).toBeDefined();
|
||||
expect(entry?.state).toBe(WatcherState.INITIALIZING);
|
||||
});
|
||||
|
||||
it('should set watcher as active with position', () => {
|
||||
manager.setActive('test-key', mockWatcher as any, 1000);
|
||||
const entry = manager.getEntry('test-key');
|
||||
expect(entry).toBeDefined();
|
||||
expect(entry?.state).toBe(WatcherState.ACTIVE);
|
||||
if (manager.isActive(entry)) {
|
||||
expect(entry.watcher).toBe(mockWatcher);
|
||||
expect(entry.position).toBe(1000);
|
||||
}
|
||||
});
|
||||
|
||||
it('should set watcher as stopping', () => {
|
||||
manager.setStopping('test-key');
|
||||
const entry = manager.getEntry('test-key');
|
||||
expect(entry).toBeDefined();
|
||||
expect(entry?.state).toBe(WatcherState.STOPPING);
|
||||
});
|
||||
});
|
||||
|
||||
describe('isWatchingOrInitializing', () => {
|
||||
it('should return true for initializing watcher', () => {
|
||||
manager.setInitializing('test-key');
|
||||
expect(manager.isWatchingOrInitializing('test-key')).toBe(true);
|
||||
});
|
||||
|
||||
it('should return true for active watcher', () => {
|
||||
manager.setActive('test-key', mockWatcher as any, 0);
|
||||
expect(manager.isWatchingOrInitializing('test-key')).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false for stopping watcher', () => {
|
||||
manager.setStopping('test-key');
|
||||
expect(manager.isWatchingOrInitializing('test-key')).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for non-existent watcher', () => {
|
||||
expect(manager.isWatchingOrInitializing('test-key')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('handlePostInitialization', () => {
|
||||
it('should activate watcher when not stopped', () => {
|
||||
manager.setInitializing('test-key');
|
||||
const result = manager.handlePostInitialization('test-key', mockWatcher as any, 500);
|
||||
|
||||
expect(result).toBe(true);
|
||||
expect(mockWatcher.close).not.toHaveBeenCalled();
|
||||
|
||||
const entry = manager.getEntry('test-key');
|
||||
expect(entry?.state).toBe(WatcherState.ACTIVE);
|
||||
if (manager.isActive(entry)) {
|
||||
expect(entry.position).toBe(500);
|
||||
}
|
||||
});
|
||||
|
||||
it('should cleanup watcher when marked as stopping', () => {
|
||||
manager.setStopping('test-key');
|
||||
const result = manager.handlePostInitialization('test-key', mockWatcher as any, 500);
|
||||
|
||||
expect(result).toBe(false);
|
||||
expect(mockWatcher.close).toHaveBeenCalled();
|
||||
expect(manager.getEntry('test-key')).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should cleanup watcher when entry is missing', () => {
|
||||
const result = manager.handlePostInitialization('test-key', mockWatcher as any, 500);
|
||||
|
||||
expect(result).toBe(false);
|
||||
expect(mockWatcher.close).toHaveBeenCalled();
|
||||
expect(manager.getEntry('test-key')).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('stopWatcher', () => {
|
||||
it('should mark initializing watcher as stopping', () => {
|
||||
manager.setInitializing('test-key');
|
||||
manager.stopWatcher('test-key');
|
||||
|
||||
const entry = manager.getEntry('test-key');
|
||||
expect(entry?.state).toBe(WatcherState.STOPPING);
|
||||
});
|
||||
|
||||
it('should close and remove active watcher', () => {
|
||||
manager.setActive('test-key', mockWatcher as any, 0);
|
||||
manager.stopWatcher('test-key');
|
||||
|
||||
expect(mockWatcher.close).toHaveBeenCalled();
|
||||
expect(manager.getEntry('test-key')).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should do nothing for non-existent watcher', () => {
|
||||
manager.stopWatcher('test-key');
|
||||
expect(mockWatcher.close).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('position management', () => {
|
||||
it('should update position for active watcher', () => {
|
||||
manager.setActive('test-key', mockWatcher as any, 100);
|
||||
manager.updatePosition('test-key', 200);
|
||||
|
||||
const position = manager.getPosition('test-key');
|
||||
expect(position).toBe(200);
|
||||
});
|
||||
|
||||
it('should not update position for non-active watcher', () => {
|
||||
manager.setInitializing('test-key');
|
||||
manager.updatePosition('test-key', 200);
|
||||
|
||||
const position = manager.getPosition('test-key');
|
||||
expect(position).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should get position for active watcher', () => {
|
||||
manager.setActive('test-key', mockWatcher as any, 300);
|
||||
expect(manager.getPosition('test-key')).toBe(300);
|
||||
});
|
||||
|
||||
it('should return undefined for non-active watcher', () => {
|
||||
manager.setStopping('test-key');
|
||||
expect(manager.getPosition('test-key')).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('stopAllWatchers', () => {
|
||||
it('should close all active watchers and clear map', () => {
|
||||
const mockWatcher1 = { close: vi.fn() };
|
||||
const mockWatcher2 = { close: vi.fn() };
|
||||
const mockWatcher3 = { close: vi.fn() };
|
||||
|
||||
manager.setActive('key1', mockWatcher1 as any, 0);
|
||||
manager.setInitializing('key2');
|
||||
manager.setActive('key3', mockWatcher2 as any, 0);
|
||||
manager.setStopping('key4');
|
||||
manager.setActive('key5', mockWatcher3 as any, 0);
|
||||
|
||||
manager.stopAllWatchers();
|
||||
|
||||
expect(mockWatcher1.close).toHaveBeenCalled();
|
||||
expect(mockWatcher2.close).toHaveBeenCalled();
|
||||
expect(mockWatcher3.close).toHaveBeenCalled();
|
||||
|
||||
expect(manager.getEntry('key1')).toBeUndefined();
|
||||
expect(manager.getEntry('key2')).toBeUndefined();
|
||||
expect(manager.getEntry('key3')).toBeUndefined();
|
||||
expect(manager.getEntry('key4')).toBeUndefined();
|
||||
expect(manager.getEntry('key5')).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('in-flight processing', () => {
|
||||
it('should prevent concurrent processing', () => {
|
||||
manager.setActive('test-key', mockWatcher as any, 0);
|
||||
|
||||
// First call should succeed
|
||||
expect(manager.startProcessing('test-key')).toBe(true);
|
||||
|
||||
// Second call should fail (already in flight)
|
||||
expect(manager.startProcessing('test-key')).toBe(false);
|
||||
|
||||
// After finishing, should be able to start again
|
||||
manager.finishProcessing('test-key');
|
||||
expect(manager.startProcessing('test-key')).toBe(true);
|
||||
});
|
||||
|
||||
it('should not start processing for non-active watcher', () => {
|
||||
manager.setInitializing('test-key');
|
||||
expect(manager.startProcessing('test-key')).toBe(false);
|
||||
|
||||
manager.setStopping('test-key');
|
||||
expect(manager.startProcessing('test-key')).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle finish processing for non-existent watcher', () => {
|
||||
// Should not throw
|
||||
expect(() => manager.finishProcessing('non-existent')).not.toThrow();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,183 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
|
||||
import * as chokidar from 'chokidar';
|
||||
|
||||
export enum WatcherState {
|
||||
INITIALIZING = 'initializing',
|
||||
ACTIVE = 'active',
|
||||
STOPPING = 'stopping',
|
||||
}
|
||||
|
||||
export type WatcherEntry =
|
||||
| { state: WatcherState.INITIALIZING }
|
||||
| { state: WatcherState.ACTIVE; watcher: chokidar.FSWatcher; position: number; inFlight: boolean }
|
||||
| { state: WatcherState.STOPPING };
|
||||
|
||||
/**
|
||||
* Service responsible for managing log file watchers and their lifecycle.
|
||||
* Handles race conditions during watcher initialization and cleanup.
|
||||
*/
|
||||
@Injectable()
|
||||
export class LogWatcherManager {
|
||||
private readonly logger = new Logger(LogWatcherManager.name);
|
||||
private readonly watchers = new Map<string, WatcherEntry>();
|
||||
|
||||
/**
|
||||
* Set a watcher as initializing
|
||||
*/
|
||||
setInitializing(key: string): void {
|
||||
this.watchers.set(key, { state: WatcherState.INITIALIZING });
|
||||
}
|
||||
|
||||
/**
|
||||
* Set a watcher as active with its FSWatcher and position
|
||||
*/
|
||||
setActive(key: string, watcher: chokidar.FSWatcher, position: number): void {
|
||||
this.watchers.set(key, { state: WatcherState.ACTIVE, watcher, position, inFlight: false });
|
||||
}
|
||||
|
||||
/**
|
||||
* Mark a watcher as stopping (used during initialization race conditions)
|
||||
*/
|
||||
setStopping(key: string): void {
|
||||
this.watchers.set(key, { state: WatcherState.STOPPING });
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a watcher entry by key
|
||||
*/
|
||||
getEntry(key: string): WatcherEntry | undefined {
|
||||
return this.watchers.get(key);
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove a watcher entry
|
||||
*/
|
||||
removeEntry(key: string): void {
|
||||
this.watchers.delete(key);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a watcher is active and return typed entry
|
||||
*/
|
||||
isActive(entry: WatcherEntry | undefined): entry is {
|
||||
state: WatcherState.ACTIVE;
|
||||
watcher: chokidar.FSWatcher;
|
||||
position: number;
|
||||
inFlight: boolean;
|
||||
} {
|
||||
return entry?.state === WatcherState.ACTIVE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a watcher exists and is either initializing or active
|
||||
*/
|
||||
isWatchingOrInitializing(key: string): boolean {
|
||||
const entry = this.getEntry(key);
|
||||
return (
|
||||
entry !== undefined &&
|
||||
(entry.state === WatcherState.ACTIVE || entry.state === WatcherState.INITIALIZING)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle cleanup after initialization completes.
|
||||
* Returns true if the watcher should continue, false if it should be cleaned up.
|
||||
*/
|
||||
handlePostInitialization(key: string, watcher: chokidar.FSWatcher, position: number): boolean {
|
||||
const currentEntry = this.getEntry(key);
|
||||
|
||||
if (!currentEntry || currentEntry.state === WatcherState.STOPPING) {
|
||||
// We were stopped during initialization, clean up immediately
|
||||
this.logger.debug(`Watcher for ${key} was stopped during initialization, cleaning up`);
|
||||
watcher.close();
|
||||
this.removeEntry(key);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Store the active watcher and position
|
||||
this.setActive(key, watcher, position);
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop a watcher, handling all possible states
|
||||
*/
|
||||
stopWatcher(key: string): void {
|
||||
const entry = this.getEntry(key);
|
||||
|
||||
if (!entry) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (entry.state === WatcherState.INITIALIZING) {
|
||||
// Mark as stopping so the initialization will clean up
|
||||
this.setStopping(key);
|
||||
this.logger.debug(`Marked watcher as stopping during initialization: ${key}`);
|
||||
} else if (entry.state === WatcherState.ACTIVE) {
|
||||
// Close the active watcher
|
||||
entry.watcher.close();
|
||||
this.removeEntry(key);
|
||||
this.logger.debug(`Stopped active watcher: ${key}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the position for an active watcher
|
||||
*/
|
||||
updatePosition(key: string, newPosition: number): void {
|
||||
const entry = this.getEntry(key);
|
||||
if (this.isActive(entry)) {
|
||||
entry.position = newPosition;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Start processing a change event (set inFlight to true)
|
||||
* Returns true if processing can proceed, false if already in flight
|
||||
*/
|
||||
startProcessing(key: string): boolean {
|
||||
const entry = this.getEntry(key);
|
||||
if (this.isActive(entry)) {
|
||||
if (entry.inFlight) {
|
||||
return false; // Already processing
|
||||
}
|
||||
entry.inFlight = true;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Finish processing a change event (set inFlight to false)
|
||||
*/
|
||||
finishProcessing(key: string): void {
|
||||
const entry = this.getEntry(key);
|
||||
if (this.isActive(entry)) {
|
||||
entry.inFlight = false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the position for an active watcher
|
||||
*/
|
||||
getPosition(key: string): number | undefined {
|
||||
const entry = this.getEntry(key);
|
||||
if (this.isActive(entry)) {
|
||||
return entry.position;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up all watchers (useful for module cleanup)
|
||||
*/
|
||||
stopAllWatchers(): void {
|
||||
for (const entry of this.watchers.values()) {
|
||||
if (this.isActive(entry)) {
|
||||
entry.watcher.close();
|
||||
}
|
||||
}
|
||||
this.watchers.clear();
|
||||
}
|
||||
}
|
||||
@@ -1,10 +1,13 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
|
||||
import { LogWatcherManager } from '@app/unraid-api/graph/resolvers/logs/log-watcher-manager.service.js';
|
||||
import { LogsResolver } from '@app/unraid-api/graph/resolvers/logs/logs.resolver.js';
|
||||
import { LogsService } from '@app/unraid-api/graph/resolvers/logs/logs.service.js';
|
||||
import { ServicesModule } from '@app/unraid-api/graph/services/services.module.js';
|
||||
|
||||
@Module({
|
||||
providers: [LogsResolver, LogsService],
|
||||
exports: [LogsService],
|
||||
imports: [ServicesModule],
|
||||
providers: [LogsResolver, LogsService, LogWatcherManager],
|
||||
exports: [LogsService, LogWatcherManager],
|
||||
})
|
||||
export class LogsModule {}
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
|
||||
import { beforeEach, describe, expect, it } from 'vitest';
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { LogsResolver } from '@app/unraid-api/graph/resolvers/logs/logs.resolver.js';
|
||||
import { LogsService } from '@app/unraid-api/graph/resolvers/logs/logs.service.js';
|
||||
import { SubscriptionHelperService } from '@app/unraid-api/graph/services/subscription-helper.service.js';
|
||||
|
||||
describe('LogsResolver', () => {
|
||||
let resolver: LogsResolver;
|
||||
@@ -18,6 +19,13 @@ describe('LogsResolver', () => {
|
||||
// Add mock implementations for service methods used by resolver
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: SubscriptionHelperService,
|
||||
useValue: {
|
||||
// Add mock implementations for subscription helper methods
|
||||
createTrackedSubscription: vi.fn(),
|
||||
},
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
resolver = module.get<LogsResolver>(LogsResolver);
|
||||
|
||||
@@ -3,13 +3,16 @@ import { Args, Int, Query, Resolver, Subscription } from '@nestjs/graphql';
|
||||
import { AuthAction, Resource } from '@unraid/shared/graphql.model.js';
|
||||
import { UsePermissions } from '@unraid/shared/use-permissions.directive.js';
|
||||
|
||||
import { createSubscription, PUBSUB_CHANNEL } from '@app/core/pubsub.js';
|
||||
import { LogFile, LogFileContent } from '@app/unraid-api/graph/resolvers/logs/logs.model.js';
|
||||
import { LogsService } from '@app/unraid-api/graph/resolvers/logs/logs.service.js';
|
||||
import { SubscriptionHelperService } from '@app/unraid-api/graph/services/subscription-helper.service.js';
|
||||
|
||||
@Resolver(() => LogFile)
|
||||
export class LogsResolver {
|
||||
constructor(private readonly logsService: LogsService) {}
|
||||
constructor(
|
||||
private readonly logsService: LogsService,
|
||||
private readonly subscriptionHelper: SubscriptionHelperService
|
||||
) {}
|
||||
|
||||
@Query(() => [LogFile])
|
||||
@UsePermissions({
|
||||
@@ -38,27 +41,12 @@ export class LogsResolver {
|
||||
action: AuthAction.READ_ANY,
|
||||
resource: Resource.LOGS,
|
||||
})
|
||||
async logFileSubscription(@Args('path') path: string) {
|
||||
// Start watching the file
|
||||
this.logsService.getLogFileSubscriptionChannel(path);
|
||||
logFileSubscription(@Args('path') path: string) {
|
||||
// Register the topic and get the key
|
||||
const topicKey = this.logsService.registerLogFileSubscription(path);
|
||||
|
||||
// Create the async iterator
|
||||
const asyncIterator = createSubscription(PUBSUB_CHANNEL.LOG_FILE);
|
||||
|
||||
// Store the original return method to wrap it
|
||||
const originalReturn = asyncIterator.return;
|
||||
|
||||
// Override the return method to clean up resources
|
||||
asyncIterator.return = async () => {
|
||||
// Stop watching the file when subscription ends
|
||||
this.logsService.stopWatchingLogFile(path);
|
||||
|
||||
// Call the original return method
|
||||
return originalReturn
|
||||
? originalReturn.call(asyncIterator)
|
||||
: Promise.resolve({ value: undefined, done: true });
|
||||
};
|
||||
|
||||
return asyncIterator;
|
||||
// Use the helper service to create a tracked subscription
|
||||
// This automatically handles subscribe/unsubscribe with reference counting
|
||||
return this.subscriptionHelper.createTrackedSubscription(topicKey);
|
||||
}
|
||||
}
|
||||
|
||||
201
api/src/unraid-api/graph/resolvers/logs/logs.service.spec.ts
Normal file
201
api/src/unraid-api/graph/resolvers/logs/logs.service.spec.ts
Normal file
@@ -0,0 +1,201 @@
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
import * as fs from 'node:fs/promises';
|
||||
|
||||
import * as chokidar from 'chokidar';
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { LogWatcherManager } from '@app/unraid-api/graph/resolvers/logs/log-watcher-manager.service.js';
|
||||
import { LogsService } from '@app/unraid-api/graph/resolvers/logs/logs.service.js';
|
||||
import { SubscriptionTrackerService } from '@app/unraid-api/graph/services/subscription-tracker.service.js';
|
||||
|
||||
vi.mock('node:fs/promises');
|
||||
vi.mock('chokidar');
|
||||
vi.mock('@app/store/index.js', () => ({
|
||||
getters: {
|
||||
paths: () => ({
|
||||
'unraid-log-base': '/var/log',
|
||||
}),
|
||||
},
|
||||
}));
|
||||
vi.mock('@app/core/pubsub.js', () => ({
|
||||
pubsub: {
|
||||
publish: vi.fn(),
|
||||
},
|
||||
PUBSUB_CHANNEL: {},
|
||||
}));
|
||||
|
||||
describe('LogsService', () => {
|
||||
let service: LogsService;
|
||||
let mockWatcher: any;
|
||||
let subscriptionTracker: any;
|
||||
|
||||
beforeEach(async () => {
|
||||
// Create a mock watcher
|
||||
mockWatcher = {
|
||||
on: vi.fn(),
|
||||
close: vi.fn(),
|
||||
};
|
||||
|
||||
// Mock chokidar.watch to return our mock watcher
|
||||
vi.mocked(chokidar.watch).mockReturnValue(mockWatcher as any);
|
||||
|
||||
// Mock fs.stat to return a file size
|
||||
vi.mocked(fs.stat).mockResolvedValue({ size: 1000 } as any);
|
||||
|
||||
subscriptionTracker = {
|
||||
getSubscriberCount: vi.fn().mockReturnValue(0),
|
||||
registerTopic: vi.fn(),
|
||||
};
|
||||
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [
|
||||
LogsService,
|
||||
LogWatcherManager,
|
||||
{
|
||||
provide: SubscriptionTrackerService,
|
||||
useValue: subscriptionTracker,
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
service = module.get<LogsService>(LogsService);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should be defined', () => {
|
||||
expect(service).toBeDefined();
|
||||
});
|
||||
|
||||
it('should handle race condition when stopping watcher during initialization', async () => {
|
||||
// Setup: Register the subscription which will trigger registerTopic
|
||||
service.registerLogFileSubscription('test.log');
|
||||
|
||||
// Get the onStart callback that was registered
|
||||
const registerTopicCall = subscriptionTracker.registerTopic.mock.calls[0];
|
||||
const onStartCallback = registerTopicCall[1];
|
||||
const onStopCallback = registerTopicCall[2];
|
||||
|
||||
// Create a promise to control when stat resolves
|
||||
let statResolve: any;
|
||||
const statPromise = new Promise((resolve) => {
|
||||
statResolve = resolve;
|
||||
});
|
||||
vi.mocked(fs.stat).mockReturnValue(statPromise as any);
|
||||
|
||||
// Start the watcher (this will call startWatchingLogFile internally)
|
||||
onStartCallback();
|
||||
|
||||
// At this point, the watcher should be marked as 'initializing'
|
||||
// Now call stop before the stat promise resolves
|
||||
onStopCallback();
|
||||
|
||||
// Now resolve the stat promise to complete initialization
|
||||
statResolve({ size: 1000 });
|
||||
|
||||
// Wait for any async operations to complete
|
||||
await new Promise((resolve) => setImmediate(resolve));
|
||||
|
||||
// The watcher should have been closed due to the race condition check
|
||||
expect(mockWatcher.close).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not leak watcher if stopped multiple times during initialization', async () => {
|
||||
// Setup: Register the subscription
|
||||
service.registerLogFileSubscription('test.log');
|
||||
|
||||
const registerTopicCall = subscriptionTracker.registerTopic.mock.calls[0];
|
||||
const onStartCallback = registerTopicCall[1];
|
||||
const onStopCallback = registerTopicCall[2];
|
||||
|
||||
// Create controlled stat promise
|
||||
let statResolve: any;
|
||||
const statPromise = new Promise((resolve) => {
|
||||
statResolve = resolve;
|
||||
});
|
||||
vi.mocked(fs.stat).mockReturnValue(statPromise as any);
|
||||
|
||||
// Start the watcher
|
||||
onStartCallback();
|
||||
|
||||
// Call stop multiple times during initialization
|
||||
onStopCallback();
|
||||
onStopCallback();
|
||||
onStopCallback();
|
||||
|
||||
// Complete initialization
|
||||
statResolve({ size: 1000 });
|
||||
await new Promise((resolve) => setImmediate(resolve));
|
||||
|
||||
// Should only close once
|
||||
expect(mockWatcher.close).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should properly handle normal start and stop without race condition', async () => {
|
||||
// Setup: Register the subscription
|
||||
service.registerLogFileSubscription('test.log');
|
||||
|
||||
const registerTopicCall = subscriptionTracker.registerTopic.mock.calls[0];
|
||||
const onStartCallback = registerTopicCall[1];
|
||||
const onStopCallback = registerTopicCall[2];
|
||||
|
||||
// Make stat resolve immediately
|
||||
vi.mocked(fs.stat).mockResolvedValue({ size: 1000 } as any);
|
||||
|
||||
// Start the watcher and let it complete initialization
|
||||
onStartCallback();
|
||||
await new Promise((resolve) => setImmediate(resolve));
|
||||
|
||||
// Watcher should be created but not closed
|
||||
expect(chokidar.watch).toHaveBeenCalled();
|
||||
expect(mockWatcher.close).not.toHaveBeenCalled();
|
||||
|
||||
// Now stop it normally
|
||||
onStopCallback();
|
||||
|
||||
// Watcher should be closed
|
||||
expect(mockWatcher.close).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should handle error during initialization without leaking watchers', async () => {
|
||||
// Setup: Register the subscription
|
||||
service.registerLogFileSubscription('test.log');
|
||||
|
||||
const registerTopicCall = subscriptionTracker.registerTopic.mock.calls[0];
|
||||
const onStartCallback = registerTopicCall[1];
|
||||
|
||||
// Make stat reject with an error
|
||||
vi.mocked(fs.stat).mockRejectedValue(new Error('File not found'));
|
||||
|
||||
// Start the watcher (should fail during initialization)
|
||||
onStartCallback();
|
||||
await new Promise((resolve) => setImmediate(resolve));
|
||||
|
||||
// Watcher should never be created due to stat error
|
||||
expect(chokidar.watch).not.toHaveBeenCalled();
|
||||
expect(mockWatcher.close).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not create duplicate watchers when started multiple times', async () => {
|
||||
// Setup: Register the subscription
|
||||
service.registerLogFileSubscription('test.log');
|
||||
|
||||
const registerTopicCall = subscriptionTracker.registerTopic.mock.calls[0];
|
||||
const onStartCallback = registerTopicCall[1];
|
||||
|
||||
// Make stat resolve immediately
|
||||
vi.mocked(fs.stat).mockResolvedValue({ size: 1000 } as any);
|
||||
|
||||
// Start the watcher multiple times
|
||||
onStartCallback();
|
||||
onStartCallback();
|
||||
onStartCallback();
|
||||
|
||||
await new Promise((resolve) => setImmediate(resolve));
|
||||
|
||||
// Should only create one watcher
|
||||
expect(chokidar.watch).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
@@ -1,13 +1,15 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { createReadStream } from 'node:fs';
|
||||
import { readdir, readFile, stat } from 'node:fs/promises';
|
||||
import { readdir, stat } from 'node:fs/promises';
|
||||
import { basename, join } from 'node:path';
|
||||
import { createInterface } from 'node:readline';
|
||||
|
||||
import * as chokidar from 'chokidar';
|
||||
|
||||
import { pubsub, PUBSUB_CHANNEL } from '@app/core/pubsub.js';
|
||||
import { pubsub } from '@app/core/pubsub.js';
|
||||
import { getters } from '@app/store/index.js';
|
||||
import { LogWatcherManager } from '@app/unraid-api/graph/resolvers/logs/log-watcher-manager.service.js';
|
||||
import { SubscriptionTrackerService } from '@app/unraid-api/graph/services/subscription-tracker.service.js';
|
||||
|
||||
interface LogFile {
|
||||
name: string;
|
||||
@@ -26,12 +28,13 @@ interface LogFileContent {
|
||||
@Injectable()
|
||||
export class LogsService {
|
||||
private readonly logger = new Logger(LogsService.name);
|
||||
private readonly logWatchers = new Map<
|
||||
string,
|
||||
{ watcher: chokidar.FSWatcher; position: number; subscriptionCount: number }
|
||||
>();
|
||||
private readonly DEFAULT_LINES = 100;
|
||||
|
||||
constructor(
|
||||
private readonly subscriptionTracker: SubscriptionTrackerService,
|
||||
private readonly watcherManager: LogWatcherManager
|
||||
) {}
|
||||
|
||||
/**
|
||||
* Get the base path for log files
|
||||
*/
|
||||
@@ -111,135 +114,208 @@ export class LogsService {
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the subscription channel for a log file
|
||||
* Register and get the topic key for a log file subscription
|
||||
* @param path Path to the log file
|
||||
* @returns The subscription topic key
|
||||
*/
|
||||
getLogFileSubscriptionChannel(path: string): PUBSUB_CHANNEL {
|
||||
registerLogFileSubscription(path: string): string {
|
||||
const normalizedPath = join(this.logBasePath, basename(path));
|
||||
const topicKey = this.getTopicKey(normalizedPath);
|
||||
|
||||
// Start watching the file if not already watching
|
||||
if (!this.logWatchers.has(normalizedPath)) {
|
||||
this.startWatchingLogFile(normalizedPath);
|
||||
} else {
|
||||
// Increment subscription count for existing watcher
|
||||
const watcher = this.logWatchers.get(normalizedPath);
|
||||
if (watcher) {
|
||||
watcher.subscriptionCount++;
|
||||
this.logger.debug(
|
||||
`Incremented subscription count for ${normalizedPath} to ${watcher.subscriptionCount}`
|
||||
);
|
||||
}
|
||||
// Register the topic if not already registered
|
||||
if (!this.subscriptionTracker.getSubscriberCount(topicKey)) {
|
||||
this.logger.debug(`Registering log file subscription topic: ${topicKey}`);
|
||||
|
||||
this.subscriptionTracker.registerTopic(
|
||||
topicKey,
|
||||
// onStart handler
|
||||
() => {
|
||||
this.logger.debug(`Starting log file watcher for topic: ${topicKey}`);
|
||||
this.startWatchingLogFile(normalizedPath);
|
||||
},
|
||||
// onStop handler
|
||||
() => {
|
||||
this.logger.debug(`Stopping log file watcher for topic: ${topicKey}`);
|
||||
this.stopWatchingLogFile(normalizedPath);
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
return PUBSUB_CHANNEL.LOG_FILE;
|
||||
return topicKey;
|
||||
}
|
||||
|
||||
/**
|
||||
* Start watching a log file for changes using chokidar
|
||||
* @param path Path to the log file
|
||||
*/
|
||||
private async startWatchingLogFile(path: string): Promise<void> {
|
||||
try {
|
||||
// Get initial file size
|
||||
const stats = await stat(path);
|
||||
let position = stats.size;
|
||||
private startWatchingLogFile(path: string): void {
|
||||
const watcherKey = path;
|
||||
|
||||
// Create a watcher for the file using chokidar
|
||||
const watcher = chokidar.watch(path, {
|
||||
persistent: true,
|
||||
awaitWriteFinish: {
|
||||
stabilityThreshold: 300,
|
||||
pollInterval: 100,
|
||||
},
|
||||
});
|
||||
// Check if already watching or initializing
|
||||
if (this.watcherManager.isWatchingOrInitializing(watcherKey)) {
|
||||
this.logger.debug(`Already watching or initializing log file: ${watcherKey}`);
|
||||
return;
|
||||
}
|
||||
|
||||
watcher.on('change', async () => {
|
||||
try {
|
||||
const newStats = await stat(path);
|
||||
// Mark as initializing immediately to prevent race conditions
|
||||
this.watcherManager.setInitializing(watcherKey);
|
||||
|
||||
// If the file has grown
|
||||
if (newStats.size > position) {
|
||||
// Read only the new content
|
||||
const stream = createReadStream(path, {
|
||||
start: position,
|
||||
end: newStats.size - 1,
|
||||
});
|
||||
// Get initial file size and set up watcher
|
||||
stat(path)
|
||||
.then((stats) => {
|
||||
const position = stats.size;
|
||||
|
||||
let newContent = '';
|
||||
stream.on('data', (chunk) => {
|
||||
newContent += chunk.toString();
|
||||
});
|
||||
// Create a watcher for the file using chokidar
|
||||
const watcher = chokidar.watch(path, {
|
||||
persistent: true,
|
||||
awaitWriteFinish: {
|
||||
stabilityThreshold: 300,
|
||||
pollInterval: 100,
|
||||
},
|
||||
});
|
||||
|
||||
stream.on('end', () => {
|
||||
if (newContent) {
|
||||
pubsub.publish(PUBSUB_CHANNEL.LOG_FILE, {
|
||||
watcher.on('change', async () => {
|
||||
// Check if we're already processing a change event for this file
|
||||
if (!this.watcherManager.startProcessing(watcherKey)) {
|
||||
// Already processing, ignore this event
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const newStats = await stat(path);
|
||||
|
||||
// Get the current position
|
||||
const currentPosition = this.watcherManager.getPosition(watcherKey);
|
||||
if (currentPosition === undefined) {
|
||||
// Watcher was stopped or not active, ignore the event
|
||||
return;
|
||||
}
|
||||
|
||||
// If the file has grown
|
||||
if (newStats.size > currentPosition) {
|
||||
// Read only the new content
|
||||
const stream = createReadStream(path, {
|
||||
start: currentPosition,
|
||||
end: newStats.size - 1,
|
||||
});
|
||||
|
||||
let newContent = '';
|
||||
stream.on('data', (chunk) => {
|
||||
newContent += chunk.toString();
|
||||
});
|
||||
|
||||
stream.on('end', () => {
|
||||
try {
|
||||
if (newContent) {
|
||||
// Use topic-specific channel
|
||||
const topicKey = this.getTopicKey(path);
|
||||
pubsub.publish(topicKey, {
|
||||
logFile: {
|
||||
path,
|
||||
content: newContent,
|
||||
totalLines: 0, // We don't need to count lines for updates
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// Update position for next read (while still holding the guard)
|
||||
this.watcherManager.updatePosition(watcherKey, newStats.size);
|
||||
} finally {
|
||||
// Clear the in-flight flag
|
||||
this.watcherManager.finishProcessing(watcherKey);
|
||||
}
|
||||
});
|
||||
|
||||
stream.on('error', (error) => {
|
||||
this.logger.error(`Error reading stream for ${path}: ${error}`);
|
||||
// Clear the in-flight flag on error
|
||||
this.watcherManager.finishProcessing(watcherKey);
|
||||
});
|
||||
} else if (newStats.size < currentPosition) {
|
||||
// File was truncated, reset position and read from beginning
|
||||
this.logger.debug(`File ${path} was truncated, resetting position`);
|
||||
|
||||
try {
|
||||
// Read the entire file content
|
||||
const content = await this.getLogFileContent(
|
||||
path,
|
||||
this.DEFAULT_LINES,
|
||||
undefined
|
||||
);
|
||||
|
||||
// Use topic-specific channel
|
||||
const topicKey = this.getTopicKey(path);
|
||||
pubsub.publish(topicKey, {
|
||||
logFile: {
|
||||
path,
|
||||
content: newContent,
|
||||
totalLines: 0, // We don't need to count lines for updates
|
||||
...content,
|
||||
},
|
||||
});
|
||||
|
||||
// Update position (while still holding the guard)
|
||||
this.watcherManager.updatePosition(watcherKey, newStats.size);
|
||||
} finally {
|
||||
// Clear the in-flight flag
|
||||
this.watcherManager.finishProcessing(watcherKey);
|
||||
}
|
||||
|
||||
// Update position for next read
|
||||
position = newStats.size;
|
||||
});
|
||||
} else if (newStats.size < position) {
|
||||
// File was truncated, reset position and read from beginning
|
||||
position = 0;
|
||||
this.logger.debug(`File ${path} was truncated, resetting position`);
|
||||
|
||||
// Read the entire file content
|
||||
const content = await this.getLogFileContent(path);
|
||||
|
||||
pubsub.publish(PUBSUB_CHANNEL.LOG_FILE, {
|
||||
logFile: content,
|
||||
});
|
||||
|
||||
position = newStats.size;
|
||||
} else {
|
||||
// File size unchanged, clear the in-flight flag
|
||||
this.watcherManager.finishProcessing(watcherKey);
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
this.logger.error(`Error processing file change for ${path}: ${error}`);
|
||||
// Clear the in-flight flag on error
|
||||
this.watcherManager.finishProcessing(watcherKey);
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
this.logger.error(`Error processing file change for ${path}: ${error}`);
|
||||
});
|
||||
|
||||
watcher.on('error', (error) => {
|
||||
this.logger.error(`Chokidar watcher error for ${path}: ${error}`);
|
||||
});
|
||||
|
||||
// Check if we were stopped during initialization and handle cleanup
|
||||
if (!this.watcherManager.handlePostInitialization(watcherKey, watcher, position)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Publish initial snapshot
|
||||
this.getLogFileContent(path, this.DEFAULT_LINES, undefined)
|
||||
.then((content) => {
|
||||
const topicKey = this.getTopicKey(path);
|
||||
pubsub.publish(topicKey, {
|
||||
logFile: {
|
||||
...content,
|
||||
},
|
||||
});
|
||||
})
|
||||
.catch((error) => {
|
||||
this.logger.error(`Error publishing initial log content for ${path}: ${error}`);
|
||||
});
|
||||
|
||||
this.logger.debug(`Started watching log file with chokidar: ${path}`);
|
||||
})
|
||||
.catch((error) => {
|
||||
this.logger.error(`Error setting up file watcher for ${path}: ${error}`);
|
||||
// Clean up the initializing entry on error
|
||||
this.watcherManager.removeEntry(watcherKey);
|
||||
});
|
||||
}
|
||||
|
||||
watcher.on('error', (error) => {
|
||||
this.logger.error(`Chokidar watcher error for ${path}: ${error}`);
|
||||
});
|
||||
|
||||
// Store the watcher and current position with initial subscription count of 1
|
||||
this.logWatchers.set(path, { watcher, position, subscriptionCount: 1 });
|
||||
|
||||
this.logger.debug(
|
||||
`Started watching log file with chokidar: ${path} (subscription count: 1)`
|
||||
);
|
||||
} catch (error: unknown) {
|
||||
this.logger.error(`Error setting up chokidar file watcher for ${path}: ${error}`);
|
||||
}
|
||||
/**
|
||||
* Get the topic key for a log file subscription
|
||||
* @param path Path to the log file (should already be normalized)
|
||||
* @returns The topic key
|
||||
*/
|
||||
private getTopicKey(path: string): string {
|
||||
// Assume path is already normalized (full path)
|
||||
return `LOG_FILE:${path}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop watching a log file
|
||||
* @param path Path to the log file
|
||||
*/
|
||||
public stopWatchingLogFile(path: string): void {
|
||||
const normalizedPath = join(this.logBasePath, basename(path));
|
||||
const watcher = this.logWatchers.get(normalizedPath);
|
||||
|
||||
if (watcher) {
|
||||
// Decrement subscription count
|
||||
watcher.subscriptionCount--;
|
||||
this.logger.debug(
|
||||
`Decremented subscription count for ${normalizedPath} to ${watcher.subscriptionCount}`
|
||||
);
|
||||
|
||||
// Only close the watcher when subscription count reaches 0
|
||||
if (watcher.subscriptionCount <= 0) {
|
||||
watcher.watcher.close();
|
||||
this.logWatchers.delete(normalizedPath);
|
||||
this.logger.debug(`Stopped watching log file: ${normalizedPath} (no more subscribers)`);
|
||||
}
|
||||
}
|
||||
private stopWatchingLogFile(path: string): void {
|
||||
this.watcherManager.stopWatcher(path);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -9,7 +9,7 @@ import { CpuService } from '@app/unraid-api/graph/resolvers/info/cpu/cpu.service
|
||||
import { MemoryService } from '@app/unraid-api/graph/resolvers/info/memory/memory.service.js';
|
||||
import { MetricsResolver } from '@app/unraid-api/graph/resolvers/metrics/metrics.resolver.js';
|
||||
import { SubscriptionHelperService } from '@app/unraid-api/graph/services/subscription-helper.service.js';
|
||||
import { SubscriptionPollingService } from '@app/unraid-api/graph/services/subscription-polling.service.js';
|
||||
import { SubscriptionManagerService } from '@app/unraid-api/graph/services/subscription-manager.service.js';
|
||||
import { SubscriptionTrackerService } from '@app/unraid-api/graph/services/subscription-tracker.service.js';
|
||||
|
||||
describe('MetricsResolver Integration Tests', () => {
|
||||
@@ -25,7 +25,7 @@ describe('MetricsResolver Integration Tests', () => {
|
||||
MemoryService,
|
||||
SubscriptionTrackerService,
|
||||
SubscriptionHelperService,
|
||||
SubscriptionPollingService,
|
||||
SubscriptionManagerService,
|
||||
],
|
||||
}).compile();
|
||||
|
||||
@@ -36,8 +36,8 @@ describe('MetricsResolver Integration Tests', () => {
|
||||
|
||||
afterEach(async () => {
|
||||
// Clean up polling service
|
||||
const pollingService = module.get<SubscriptionPollingService>(SubscriptionPollingService);
|
||||
pollingService.stopAll();
|
||||
const subscriptionManager = module.get<SubscriptionManagerService>(SubscriptionManagerService);
|
||||
subscriptionManager.stopAll();
|
||||
await module.close();
|
||||
});
|
||||
|
||||
@@ -202,10 +202,13 @@ describe('MetricsResolver Integration Tests', () => {
|
||||
it('should handle errors in CPU polling gracefully', async () => {
|
||||
const service = module.get<CpuService>(CpuService);
|
||||
const trackerService = module.get<SubscriptionTrackerService>(SubscriptionTrackerService);
|
||||
const pollingService = module.get<SubscriptionPollingService>(SubscriptionPollingService);
|
||||
const subscriptionManager =
|
||||
module.get<SubscriptionManagerService>(SubscriptionManagerService);
|
||||
|
||||
// Mock logger to capture error logs
|
||||
const loggerSpy = vi.spyOn(pollingService['logger'], 'error').mockImplementation(() => {});
|
||||
const loggerSpy = vi
|
||||
.spyOn(subscriptionManager['logger'], 'error')
|
||||
.mockImplementation(() => {});
|
||||
vi.spyOn(service, 'generateCpuLoad').mockRejectedValueOnce(new Error('CPU error'));
|
||||
|
||||
// Trigger polling
|
||||
@@ -215,7 +218,7 @@ describe('MetricsResolver Integration Tests', () => {
|
||||
await new Promise((resolve) => setTimeout(resolve, 1100));
|
||||
|
||||
expect(loggerSpy).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Error in polling task'),
|
||||
expect.stringContaining('Error in subscription callback'),
|
||||
expect.any(Error)
|
||||
);
|
||||
|
||||
@@ -226,10 +229,13 @@ describe('MetricsResolver Integration Tests', () => {
|
||||
it('should handle errors in memory polling gracefully', async () => {
|
||||
const service = module.get<MemoryService>(MemoryService);
|
||||
const trackerService = module.get<SubscriptionTrackerService>(SubscriptionTrackerService);
|
||||
const pollingService = module.get<SubscriptionPollingService>(SubscriptionPollingService);
|
||||
const subscriptionManager =
|
||||
module.get<SubscriptionManagerService>(SubscriptionManagerService);
|
||||
|
||||
// Mock logger to capture error logs
|
||||
const loggerSpy = vi.spyOn(pollingService['logger'], 'error').mockImplementation(() => {});
|
||||
const loggerSpy = vi
|
||||
.spyOn(subscriptionManager['logger'], 'error')
|
||||
.mockImplementation(() => {});
|
||||
vi.spyOn(service, 'generateMemoryLoad').mockRejectedValueOnce(new Error('Memory error'));
|
||||
|
||||
// Trigger polling
|
||||
@@ -239,7 +245,7 @@ describe('MetricsResolver Integration Tests', () => {
|
||||
await new Promise((resolve) => setTimeout(resolve, 2100));
|
||||
|
||||
expect(loggerSpy).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Error in polling task'),
|
||||
expect.stringContaining('Error in subscription callback'),
|
||||
expect.any(Error)
|
||||
);
|
||||
|
||||
@@ -251,22 +257,30 @@ describe('MetricsResolver Integration Tests', () => {
|
||||
describe('Polling cleanup on module destroy', () => {
|
||||
it('should clean up timers when module is destroyed', async () => {
|
||||
const trackerService = module.get<SubscriptionTrackerService>(SubscriptionTrackerService);
|
||||
const pollingService = module.get<SubscriptionPollingService>(SubscriptionPollingService);
|
||||
const subscriptionManager =
|
||||
module.get<SubscriptionManagerService>(SubscriptionManagerService);
|
||||
|
||||
// Start polling
|
||||
trackerService.subscribe(PUBSUB_CHANNEL.CPU_UTILIZATION);
|
||||
trackerService.subscribe(PUBSUB_CHANNEL.MEMORY_UTILIZATION);
|
||||
|
||||
// Verify polling is active
|
||||
expect(pollingService.isPolling(PUBSUB_CHANNEL.CPU_UTILIZATION)).toBe(true);
|
||||
expect(pollingService.isPolling(PUBSUB_CHANNEL.MEMORY_UTILIZATION)).toBe(true);
|
||||
// Wait a bit for subscriptions to be fully set up
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
// Verify subscriptions are active
|
||||
expect(subscriptionManager.isSubscriptionActive(PUBSUB_CHANNEL.CPU_UTILIZATION)).toBe(true);
|
||||
expect(subscriptionManager.isSubscriptionActive(PUBSUB_CHANNEL.MEMORY_UTILIZATION)).toBe(
|
||||
true
|
||||
);
|
||||
|
||||
// Clean up the module
|
||||
await module.close();
|
||||
|
||||
// Timers should be cleaned up
|
||||
expect(pollingService.isPolling(PUBSUB_CHANNEL.CPU_UTILIZATION)).toBe(false);
|
||||
expect(pollingService.isPolling(PUBSUB_CHANNEL.MEMORY_UTILIZATION)).toBe(false);
|
||||
// Subscriptions should be cleaned up
|
||||
expect(subscriptionManager.isSubscriptionActive(PUBSUB_CHANNEL.CPU_UTILIZATION)).toBe(false);
|
||||
expect(subscriptionManager.isSubscriptionActive(PUBSUB_CHANNEL.MEMORY_UTILIZATION)).toBe(
|
||||
false
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -32,6 +32,8 @@ describe('MetricsResolver', () => {
|
||||
loadNice: 0,
|
||||
loadIdle: 70.0,
|
||||
loadIrq: 0,
|
||||
loadGuest: 0,
|
||||
loadSteal: 0,
|
||||
},
|
||||
{
|
||||
load: 21.0,
|
||||
@@ -40,6 +42,8 @@ describe('MetricsResolver', () => {
|
||||
loadNice: 0,
|
||||
loadIdle: 79.0,
|
||||
loadIrq: 0,
|
||||
loadGuest: 0,
|
||||
loadSteal: 0,
|
||||
},
|
||||
],
|
||||
}),
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
|
||||
import { AuthModule } from '@app/unraid-api/auth/auth.module.js';
|
||||
import { ApiConfigModule } from '@app/unraid-api/config/api-config.module.js';
|
||||
import { ApiKeyModule } from '@app/unraid-api/graph/resolvers/api-key/api-key.module.js';
|
||||
import { ApiKeyResolver } from '@app/unraid-api/graph/resolvers/api-key/api-key.resolver.js';
|
||||
import { ArrayModule } from '@app/unraid-api/graph/resolvers/array/array.module.js';
|
||||
@@ -11,8 +12,7 @@ import { DockerModule } from '@app/unraid-api/graph/resolvers/docker/docker.modu
|
||||
import { FlashBackupModule } from '@app/unraid-api/graph/resolvers/flash-backup/flash-backup.module.js';
|
||||
import { FlashResolver } from '@app/unraid-api/graph/resolvers/flash/flash.resolver.js';
|
||||
import { InfoModule } from '@app/unraid-api/graph/resolvers/info/info.module.js';
|
||||
import { LogsResolver } from '@app/unraid-api/graph/resolvers/logs/logs.resolver.js';
|
||||
import { LogsService } from '@app/unraid-api/graph/resolvers/logs/logs.service.js';
|
||||
import { LogsModule } from '@app/unraid-api/graph/resolvers/logs/logs.module.js';
|
||||
import { MetricsModule } from '@app/unraid-api/graph/resolvers/metrics/metrics.module.js';
|
||||
import { RootMutationsResolver } from '@app/unraid-api/graph/resolvers/mutation/mutation.resolver.js';
|
||||
import { NotificationsResolver } from '@app/unraid-api/graph/resolvers/notifications/notifications.resolver.js';
|
||||
@@ -39,12 +39,14 @@ import { MeResolver } from '@app/unraid-api/graph/user/user.resolver.js';
|
||||
ServicesModule,
|
||||
ArrayModule,
|
||||
ApiKeyModule,
|
||||
ApiConfigModule,
|
||||
AuthModule,
|
||||
CustomizationModule,
|
||||
DockerModule,
|
||||
DisksModule,
|
||||
FlashBackupModule,
|
||||
InfoModule,
|
||||
LogsModule,
|
||||
RCloneModule,
|
||||
SettingsModule,
|
||||
SsoModule,
|
||||
@@ -54,8 +56,6 @@ import { MeResolver } from '@app/unraid-api/graph/user/user.resolver.js';
|
||||
providers: [
|
||||
ConfigResolver,
|
||||
FlashResolver,
|
||||
LogsResolver,
|
||||
LogsService,
|
||||
MeResolver,
|
||||
NotificationsResolver,
|
||||
NotificationsService,
|
||||
|
||||
@@ -38,7 +38,9 @@ export class Server extends Node {
|
||||
@Field()
|
||||
name!: string;
|
||||
|
||||
@Field(() => ServerStatus)
|
||||
@Field(() => ServerStatus, {
|
||||
description: 'Whether this server is online or offline',
|
||||
})
|
||||
status!: ServerStatus;
|
||||
|
||||
@Field()
|
||||
|
||||
@@ -24,7 +24,7 @@ export class ServerResolver {
|
||||
resource: Resource.SERVERS,
|
||||
})
|
||||
public async server(): Promise<ServerModel | null> {
|
||||
return this.getLocalServer()[0] || null;
|
||||
return this.getLocalServer() || null;
|
||||
}
|
||||
|
||||
@Query(() => [ServerModel])
|
||||
@@ -33,7 +33,7 @@ export class ServerResolver {
|
||||
resource: Resource.SERVERS,
|
||||
})
|
||||
public async servers(): Promise<ServerModel[]> {
|
||||
return this.getLocalServer();
|
||||
return [this.getLocalServer()];
|
||||
}
|
||||
|
||||
@Subscription(() => ServerModel)
|
||||
@@ -45,7 +45,7 @@ export class ServerResolver {
|
||||
return createSubscription(PUBSUB_CHANNEL.SERVERS);
|
||||
}
|
||||
|
||||
private getLocalServer(): ServerModel[] {
|
||||
private getLocalServer(): ServerModel {
|
||||
const emhttp = getters.emhttp();
|
||||
const connectConfig = this.configService.get('connect');
|
||||
|
||||
@@ -64,22 +64,17 @@ export class ServerResolver {
|
||||
avatar: '',
|
||||
};
|
||||
|
||||
return [
|
||||
{
|
||||
id: 'local',
|
||||
owner,
|
||||
guid: guid || '',
|
||||
apikey: connectConfig?.config?.apikey ?? '',
|
||||
name: name ?? 'Local Server',
|
||||
status:
|
||||
connectConfig?.mothership?.status === MinigraphStatus.CONNECTED
|
||||
? ServerStatus.ONLINE
|
||||
: ServerStatus.OFFLINE,
|
||||
wanip,
|
||||
lanip,
|
||||
localurl,
|
||||
remoteurl,
|
||||
},
|
||||
];
|
||||
return {
|
||||
id: 'local',
|
||||
owner,
|
||||
guid: guid || '',
|
||||
apikey: connectConfig?.config?.apikey ?? '',
|
||||
name: name ?? 'Local Server',
|
||||
status: ServerStatus.ONLINE,
|
||||
wanip,
|
||||
lanip,
|
||||
localurl,
|
||||
remoteurl,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,8 +16,8 @@ import {
|
||||
} from '@app/unraid-api/graph/resolvers/settings/settings.model.js';
|
||||
import { ApiSettings } from '@app/unraid-api/graph/resolvers/settings/settings.service.js';
|
||||
import { SsoSettings } from '@app/unraid-api/graph/resolvers/settings/sso-settings.model.js';
|
||||
import { OidcConfigPersistence } from '@app/unraid-api/graph/resolvers/sso/oidc-config.service.js';
|
||||
import { OidcProvider } from '@app/unraid-api/graph/resolvers/sso/oidc-provider.model.js';
|
||||
import { OidcConfigPersistence } from '@app/unraid-api/graph/resolvers/sso/core/oidc-config.service.js';
|
||||
import { OidcProvider } from '@app/unraid-api/graph/resolvers/sso/models/oidc-provider.model.js';
|
||||
|
||||
@Resolver(() => Settings)
|
||||
export class SettingsResolver {
|
||||
|
||||
@@ -7,7 +7,7 @@ import { type ApiConfig } from '@unraid/shared/services/api-config.js';
|
||||
import { UserSettingsService } from '@unraid/shared/services/user-settings.js';
|
||||
import { execa } from 'execa';
|
||||
|
||||
import { OidcConfigPersistence } from '@app/unraid-api/graph/resolvers/sso/oidc-config.service.js';
|
||||
import { OidcConfigPersistence } from '@app/unraid-api/graph/resolvers/sso/core/oidc-config.service.js';
|
||||
import { createLabeledControl } from '@app/unraid-api/graph/utils/form-utils.js';
|
||||
import { SettingSlice } from '@app/unraid-api/types/json-forms.js';
|
||||
|
||||
|
||||
@@ -0,0 +1,11 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
|
||||
import { OidcAuthorizationService } from '@app/unraid-api/graph/resolvers/sso/auth/oidc-authorization.service.js';
|
||||
import { OidcClaimsService } from '@app/unraid-api/graph/resolvers/sso/auth/oidc-claims.service.js';
|
||||
import { OidcTokenExchangeService } from '@app/unraid-api/graph/resolvers/sso/auth/oidc-token-exchange.service.js';
|
||||
|
||||
@Module({
|
||||
providers: [OidcAuthorizationService, OidcTokenExchangeService, OidcClaimsService],
|
||||
exports: [OidcAuthorizationService, OidcTokenExchangeService, OidcClaimsService],
|
||||
})
|
||||
export class OidcAuthModule {}
|
||||
@@ -1,70 +1,26 @@
|
||||
import { UnauthorizedException } from '@nestjs/common';
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
|
||||
import * as client from 'openid-client';
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { OidcAuthService } from '@app/unraid-api/graph/resolvers/sso/oidc-auth.service.js';
|
||||
import { OidcConfigPersistence } from '@app/unraid-api/graph/resolvers/sso/oidc-config.service.js';
|
||||
import { OidcAuthorizationService } from '@app/unraid-api/graph/resolvers/sso/auth/oidc-authorization.service.js';
|
||||
import {
|
||||
AuthorizationOperator,
|
||||
AuthorizationRuleMode,
|
||||
OidcAuthorizationRule,
|
||||
OidcProvider,
|
||||
} from '@app/unraid-api/graph/resolvers/sso/oidc-provider.model.js';
|
||||
import { OidcSessionService } from '@app/unraid-api/graph/resolvers/sso/oidc-session.service.js';
|
||||
import { OidcStateService } from '@app/unraid-api/graph/resolvers/sso/oidc-state.service.js';
|
||||
import { OidcValidationService } from '@app/unraid-api/graph/resolvers/sso/oidc-validation.service.js';
|
||||
} from '@app/unraid-api/graph/resolvers/sso/models/oidc-provider.model.js';
|
||||
|
||||
describe('OidcAuthService', () => {
|
||||
let service: OidcAuthService;
|
||||
let oidcConfig: any;
|
||||
let sessionService: any;
|
||||
let configService: any;
|
||||
let stateService: any;
|
||||
let validationService: any;
|
||||
describe('OidcAuthorizationService', () => {
|
||||
let service: OidcAuthorizationService;
|
||||
let module: TestingModule;
|
||||
|
||||
beforeEach(async () => {
|
||||
module = await Test.createTestingModule({
|
||||
providers: [
|
||||
OidcAuthService,
|
||||
{
|
||||
provide: ConfigService,
|
||||
useValue: {
|
||||
get: vi.fn(),
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: OidcConfigPersistence,
|
||||
useValue: {
|
||||
getProvider: vi.fn(),
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: OidcSessionService,
|
||||
useValue: {
|
||||
createSession: vi.fn(),
|
||||
},
|
||||
},
|
||||
OidcStateService,
|
||||
{
|
||||
provide: OidcValidationService,
|
||||
useValue: {
|
||||
validateProvider: vi.fn(),
|
||||
performDiscovery: vi.fn(),
|
||||
},
|
||||
},
|
||||
],
|
||||
providers: [OidcAuthorizationService],
|
||||
}).compile();
|
||||
|
||||
service = module.get<OidcAuthService>(OidcAuthService);
|
||||
oidcConfig = module.get(OidcConfigPersistence);
|
||||
sessionService = module.get(OidcSessionService);
|
||||
configService = module.get(ConfigService);
|
||||
stateService = module.get(OidcStateService);
|
||||
validationService = module.get<OidcValidationService>(OidcValidationService);
|
||||
service = module.get<OidcAuthorizationService>(OidcAuthorizationService);
|
||||
});
|
||||
|
||||
describe('Authorization Rule Evaluation', () => {
|
||||
@@ -1189,467 +1145,4 @@ describe('OidcAuthService', () => {
|
||||
).resolves.toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Manual Configuration (No Discovery)', () => {
|
||||
it('should create manual configuration when discovery fails but manual endpoints are provided', async () => {
|
||||
const provider: OidcProvider = {
|
||||
id: 'manual-provider',
|
||||
name: 'Manual Provider',
|
||||
clientId: 'test-client-id',
|
||||
clientSecret: 'test-client-secret',
|
||||
issuer: 'https://manual.example.com',
|
||||
authorizationEndpoint: 'https://manual.example.com/auth',
|
||||
tokenEndpoint: 'https://manual.example.com/token',
|
||||
jwksUri: 'https://manual.example.com/jwks',
|
||||
scopes: ['openid', 'profile'],
|
||||
authorizationRules: [],
|
||||
};
|
||||
|
||||
oidcConfig.getProvider.mockResolvedValue(provider);
|
||||
|
||||
// Mock discovery to fail
|
||||
validationService.performDiscovery = vi
|
||||
.fn()
|
||||
.mockRejectedValue(new Error('Discovery failed'));
|
||||
|
||||
// Access the private method
|
||||
const getOrCreateConfig = async (provider: OidcProvider) => {
|
||||
return (service as any).getOrCreateConfig(provider);
|
||||
};
|
||||
|
||||
const config = await getOrCreateConfig(provider);
|
||||
|
||||
// Verify the configuration was created with the correct endpoints
|
||||
expect(config).toBeDefined();
|
||||
expect(config.serverMetadata().authorization_endpoint).toBe(
|
||||
'https://manual.example.com/auth'
|
||||
);
|
||||
expect(config.serverMetadata().token_endpoint).toBe('https://manual.example.com/token');
|
||||
expect(config.serverMetadata().jwks_uri).toBe('https://manual.example.com/jwks');
|
||||
expect(config.serverMetadata().issuer).toBe('https://manual.example.com');
|
||||
});
|
||||
|
||||
it('should create manual configuration with fallback issuer when not provided', async () => {
|
||||
const provider: OidcProvider = {
|
||||
id: 'manual-provider-no-issuer',
|
||||
name: 'Manual Provider No Issuer',
|
||||
clientId: 'test-client-id',
|
||||
clientSecret: 'test-client-secret',
|
||||
issuer: '', // Empty issuer should skip discovery and use manual endpoints
|
||||
authorizationEndpoint: 'https://manual.example.com/auth',
|
||||
tokenEndpoint: 'https://manual.example.com/token',
|
||||
scopes: ['openid', 'profile'],
|
||||
authorizationRules: [],
|
||||
};
|
||||
|
||||
oidcConfig.getProvider.mockResolvedValue(provider);
|
||||
|
||||
// No need to mock discovery since it won't be called with empty issuer
|
||||
|
||||
// Access the private method
|
||||
const getOrCreateConfig = async (provider: OidcProvider) => {
|
||||
return (service as any).getOrCreateConfig(provider);
|
||||
};
|
||||
|
||||
const config = await getOrCreateConfig(provider);
|
||||
|
||||
// Verify the configuration was created with fallback issuer
|
||||
expect(config).toBeDefined();
|
||||
expect(config.serverMetadata().issuer).toBe('manual-manual-provider-no-issuer');
|
||||
expect(config.serverMetadata().authorization_endpoint).toBe(
|
||||
'https://manual.example.com/auth'
|
||||
);
|
||||
expect(config.serverMetadata().token_endpoint).toBe('https://manual.example.com/token');
|
||||
});
|
||||
|
||||
it('should handle manual configuration with client secret properly', async () => {
|
||||
const provider: OidcProvider = {
|
||||
id: 'manual-with-secret',
|
||||
name: 'Manual With Secret',
|
||||
clientId: 'test-client-id',
|
||||
clientSecret: 'secret-123',
|
||||
issuer: 'https://manual.example.com',
|
||||
authorizationEndpoint: 'https://manual.example.com/auth',
|
||||
tokenEndpoint: 'https://manual.example.com/token',
|
||||
scopes: ['openid', 'profile'],
|
||||
authorizationRules: [],
|
||||
};
|
||||
|
||||
oidcConfig.getProvider.mockResolvedValue(provider);
|
||||
|
||||
// Mock discovery to fail
|
||||
validationService.performDiscovery = vi
|
||||
.fn()
|
||||
.mockRejectedValue(new Error('Discovery failed'));
|
||||
|
||||
// Access the private method
|
||||
const getOrCreateConfig = async (provider: OidcProvider) => {
|
||||
return (service as any).getOrCreateConfig(provider);
|
||||
};
|
||||
|
||||
const config = await getOrCreateConfig(provider);
|
||||
|
||||
// Verify configuration was created successfully
|
||||
expect(config).toBeDefined();
|
||||
expect(config.clientMetadata().client_secret).toBe('secret-123');
|
||||
});
|
||||
|
||||
it('should handle manual configuration without client secret (public client)', async () => {
|
||||
const provider: OidcProvider = {
|
||||
id: 'manual-public-client',
|
||||
name: 'Manual Public Client',
|
||||
clientId: 'public-client-id',
|
||||
// No client secret
|
||||
issuer: 'https://manual.example.com',
|
||||
authorizationEndpoint: 'https://manual.example.com/auth',
|
||||
tokenEndpoint: 'https://manual.example.com/token',
|
||||
scopes: ['openid', 'profile'],
|
||||
authorizationRules: [],
|
||||
};
|
||||
|
||||
oidcConfig.getProvider.mockResolvedValue(provider);
|
||||
|
||||
// Mock discovery to fail
|
||||
validationService.performDiscovery = vi
|
||||
.fn()
|
||||
.mockRejectedValue(new Error('Discovery failed'));
|
||||
|
||||
// Access the private method
|
||||
const getOrCreateConfig = async (provider: OidcProvider) => {
|
||||
return (service as any).getOrCreateConfig(provider);
|
||||
};
|
||||
|
||||
const config = await getOrCreateConfig(provider);
|
||||
|
||||
// Verify configuration was created successfully for public client
|
||||
expect(config).toBeDefined();
|
||||
expect(config.clientMetadata().client_secret).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should throw error when discovery fails and no manual endpoints provided', async () => {
|
||||
const provider: OidcProvider = {
|
||||
id: 'no-manual-endpoints',
|
||||
name: 'No Manual Endpoints',
|
||||
clientId: 'test-client-id',
|
||||
issuer: 'https://broken.example.com',
|
||||
// Missing authorizationEndpoint and tokenEndpoint
|
||||
scopes: ['openid', 'profile'],
|
||||
authorizationRules: [],
|
||||
};
|
||||
|
||||
oidcConfig.getProvider.mockResolvedValue(provider);
|
||||
|
||||
// Mock discovery to fail
|
||||
validationService.performDiscovery = vi
|
||||
.fn()
|
||||
.mockRejectedValue(new Error('Discovery failed'));
|
||||
|
||||
// Access the private method
|
||||
const getOrCreateConfig = async (provider: OidcProvider) => {
|
||||
return (service as any).getOrCreateConfig(provider);
|
||||
};
|
||||
|
||||
await expect(getOrCreateConfig(provider)).rejects.toThrow(UnauthorizedException);
|
||||
});
|
||||
|
||||
it('should throw error when only authorization endpoint is provided', async () => {
|
||||
const provider: OidcProvider = {
|
||||
id: 'partial-manual-endpoints',
|
||||
name: 'Partial Manual Endpoints',
|
||||
clientId: 'test-client-id',
|
||||
issuer: 'https://broken.example.com',
|
||||
authorizationEndpoint: 'https://manual.example.com/auth',
|
||||
// Missing tokenEndpoint
|
||||
scopes: ['openid', 'profile'],
|
||||
authorizationRules: [],
|
||||
};
|
||||
|
||||
oidcConfig.getProvider.mockResolvedValue(provider);
|
||||
|
||||
// Mock discovery to fail
|
||||
validationService.performDiscovery = vi
|
||||
.fn()
|
||||
.mockRejectedValue(new Error('Discovery failed'));
|
||||
|
||||
// Access the private method
|
||||
const getOrCreateConfig = async (provider: OidcProvider) => {
|
||||
return (service as any).getOrCreateConfig(provider);
|
||||
};
|
||||
|
||||
await expect(getOrCreateConfig(provider)).rejects.toThrow(UnauthorizedException);
|
||||
});
|
||||
|
||||
it('should cache manual configuration properly', async () => {
|
||||
const provider: OidcProvider = {
|
||||
id: 'cache-test',
|
||||
name: 'Cache Test',
|
||||
clientId: 'test-client-id',
|
||||
clientSecret: 'test-secret',
|
||||
issuer: 'https://manual.example.com',
|
||||
authorizationEndpoint: 'https://manual.example.com/auth',
|
||||
tokenEndpoint: 'https://manual.example.com/token',
|
||||
scopes: ['openid', 'profile'],
|
||||
authorizationRules: [],
|
||||
};
|
||||
|
||||
oidcConfig.getProvider.mockResolvedValue(provider);
|
||||
|
||||
// Mock discovery to fail
|
||||
validationService.performDiscovery = vi
|
||||
.fn()
|
||||
.mockRejectedValue(new Error('Discovery failed'));
|
||||
|
||||
// Access the private method
|
||||
const getOrCreateConfig = async (provider: OidcProvider) => {
|
||||
return (service as any).getOrCreateConfig(provider);
|
||||
};
|
||||
|
||||
// First call should create configuration
|
||||
const config1 = await getOrCreateConfig(provider);
|
||||
|
||||
// Second call should return cached configuration
|
||||
const config2 = await getOrCreateConfig(provider);
|
||||
|
||||
expect(config1).toBe(config2); // Should be the exact same instance
|
||||
expect(validationService.performDiscovery).toHaveBeenCalledTimes(1); // Only called once due to caching
|
||||
});
|
||||
|
||||
it('should handle HTTP endpoints with allowInsecureRequests', async () => {
|
||||
const provider: OidcProvider = {
|
||||
id: 'http-endpoints',
|
||||
name: 'HTTP Endpoints',
|
||||
clientId: 'test-client-id',
|
||||
clientSecret: 'test-secret',
|
||||
issuer: 'http://manual.example.com', // HTTP instead of HTTPS
|
||||
authorizationEndpoint: 'http://manual.example.com/auth',
|
||||
tokenEndpoint: 'http://manual.example.com/token',
|
||||
scopes: ['openid', 'profile'],
|
||||
authorizationRules: [],
|
||||
};
|
||||
|
||||
oidcConfig.getProvider.mockResolvedValue(provider);
|
||||
|
||||
// Mock discovery to fail
|
||||
validationService.performDiscovery = vi
|
||||
.fn()
|
||||
.mockRejectedValue(new Error('Discovery failed'));
|
||||
|
||||
// Access the private method
|
||||
const getOrCreateConfig = async (provider: OidcProvider) => {
|
||||
return (service as any).getOrCreateConfig(provider);
|
||||
};
|
||||
|
||||
const config = await getOrCreateConfig(provider);
|
||||
|
||||
// Verify configuration was created successfully even with HTTP
|
||||
expect(config).toBeDefined();
|
||||
expect(config.serverMetadata().token_endpoint).toBe('http://manual.example.com/token');
|
||||
expect(config.serverMetadata().authorization_endpoint).toBe(
|
||||
'http://manual.example.com/auth'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getAuthorizationUrl', () => {
|
||||
it('should generate authorization URL with custom authorization endpoint', async () => {
|
||||
const provider: OidcProvider = {
|
||||
id: 'test-provider',
|
||||
name: 'Test Provider',
|
||||
clientId: 'test-client-id',
|
||||
issuer: 'https://example.com',
|
||||
authorizationEndpoint: 'https://custom.example.com/auth',
|
||||
scopes: ['openid', 'profile'],
|
||||
authorizationRules: [],
|
||||
};
|
||||
|
||||
oidcConfig.getProvider.mockResolvedValue(provider);
|
||||
|
||||
const authUrl = await service.getAuthorizationUrl(
|
||||
'test-provider',
|
||||
'test-state',
|
||||
'localhost:3001'
|
||||
);
|
||||
|
||||
expect(authUrl).toContain('https://custom.example.com/auth');
|
||||
expect(authUrl).toContain('client_id=test-client-id');
|
||||
expect(authUrl).toContain('response_type=code');
|
||||
expect(authUrl).toContain('scope=openid+profile');
|
||||
// State should start with provider ID followed by secure state token
|
||||
expect(authUrl).toMatch(/state=test-provider%3A[a-f0-9]+\.[0-9]+\.[a-f0-9]+/);
|
||||
expect(authUrl).toContain('redirect_uri=');
|
||||
});
|
||||
|
||||
it('should encode provider ID in state parameter', async () => {
|
||||
const provider: OidcProvider = {
|
||||
id: 'encode-test-provider',
|
||||
name: 'Encode Test Provider',
|
||||
clientId: 'test-client-id',
|
||||
issuer: 'https://example.com',
|
||||
authorizationEndpoint: 'https://example.com/auth',
|
||||
scopes: ['openid', 'email'],
|
||||
authorizationRules: [],
|
||||
};
|
||||
|
||||
oidcConfig.getProvider.mockResolvedValue(provider);
|
||||
|
||||
const authUrl = await service.getAuthorizationUrl('encode-test-provider', 'original-state');
|
||||
|
||||
// Verify that the state parameter includes provider ID at the start
|
||||
expect(authUrl).toMatch(/state=encode-test-provider%3A[a-f0-9]+\.[0-9]+\.[a-f0-9]+/);
|
||||
});
|
||||
|
||||
it('should throw error when provider not found', async () => {
|
||||
oidcConfig.getProvider.mockResolvedValue(null);
|
||||
|
||||
await expect(
|
||||
service.getAuthorizationUrl('nonexistent-provider', 'test-state')
|
||||
).rejects.toThrow('Provider nonexistent-provider not found');
|
||||
});
|
||||
|
||||
it('should handle custom scopes properly', async () => {
|
||||
const provider: OidcProvider = {
|
||||
id: 'custom-scopes-provider',
|
||||
name: 'Custom Scopes Provider',
|
||||
clientId: 'test-client-id',
|
||||
issuer: 'https://example.com',
|
||||
authorizationEndpoint: 'https://example.com/auth',
|
||||
scopes: ['openid', 'profile', 'groups', 'custom:scope'],
|
||||
authorizationRules: [],
|
||||
};
|
||||
|
||||
oidcConfig.getProvider.mockResolvedValue(provider);
|
||||
|
||||
const authUrl = await service.getAuthorizationUrl('custom-scopes-provider', 'test-state');
|
||||
|
||||
expect(authUrl).toContain('scope=openid+profile+groups+custom%3Ascope');
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleCallback', () => {
|
||||
it('should throw error when provider not found in callback', async () => {
|
||||
oidcConfig.getProvider.mockResolvedValue(null);
|
||||
|
||||
await expect(
|
||||
service.handleCallback('nonexistent-provider', 'code', 'redirect-uri')
|
||||
).rejects.toThrow('Provider nonexistent-provider not found');
|
||||
});
|
||||
|
||||
it('should handle malformed state parameter', async () => {
|
||||
await expect(
|
||||
service.handleCallback('invalid-state', 'code', 'redirect-uri')
|
||||
).rejects.toThrow(UnauthorizedException);
|
||||
});
|
||||
|
||||
it('should call getProvider with the provided provider ID', async () => {
|
||||
const provider: OidcProvider = {
|
||||
id: 'test-provider',
|
||||
name: 'Test Provider',
|
||||
clientId: 'test-client-id',
|
||||
issuer: 'https://example.com',
|
||||
scopes: ['openid'],
|
||||
authorizationRules: [],
|
||||
};
|
||||
|
||||
oidcConfig.getProvider.mockResolvedValue(provider);
|
||||
|
||||
// This will fail during token exchange, but we're testing the provider lookup logic
|
||||
await expect(
|
||||
service.handleCallback('test-provider', 'code', 'redirect-uri')
|
||||
).rejects.toThrow(UnauthorizedException);
|
||||
|
||||
// Verify the provider was looked up with the correct ID
|
||||
expect(oidcConfig.getProvider).toHaveBeenCalledWith('test-provider');
|
||||
});
|
||||
});
|
||||
|
||||
describe('validateProvider', () => {
|
||||
it('should delegate to validation service and return result', async () => {
|
||||
const provider: OidcProvider = {
|
||||
id: 'validate-provider',
|
||||
name: 'Validate Provider',
|
||||
clientId: 'test-client-id',
|
||||
issuer: 'https://example.com',
|
||||
scopes: ['openid'],
|
||||
authorizationRules: [],
|
||||
};
|
||||
|
||||
const expectedResult = {
|
||||
isValid: true,
|
||||
authorizationEndpoint: 'https://example.com/auth',
|
||||
tokenEndpoint: 'https://example.com/token',
|
||||
};
|
||||
|
||||
validationService.validateProvider.mockResolvedValue(expectedResult);
|
||||
|
||||
const result = await service.validateProvider(provider);
|
||||
|
||||
expect(result).toEqual(expectedResult);
|
||||
expect(validationService.validateProvider).toHaveBeenCalledWith(provider);
|
||||
});
|
||||
|
||||
it('should clear config cache before validation', async () => {
|
||||
const provider: OidcProvider = {
|
||||
id: 'cache-clear-provider',
|
||||
name: 'Cache Clear Provider',
|
||||
clientId: 'test-client-id',
|
||||
issuer: 'https://example.com',
|
||||
scopes: ['openid'],
|
||||
authorizationRules: [],
|
||||
};
|
||||
|
||||
const expectedResult = {
|
||||
isValid: false,
|
||||
error: 'Validation failed',
|
||||
};
|
||||
|
||||
validationService.validateProvider.mockResolvedValue(expectedResult);
|
||||
|
||||
const result = await service.validateProvider(provider);
|
||||
|
||||
expect(result).toEqual(expectedResult);
|
||||
// Verify the cache was cleared by checking the method was called
|
||||
expect(validationService.validateProvider).toHaveBeenCalledWith(provider);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getRedirectUri (private method)', () => {
|
||||
it('should generate correct redirect URI with localhost (development)', () => {
|
||||
const getRedirectUri = (service as any).getRedirectUri.bind(service);
|
||||
const redirectUri = getRedirectUri('http://localhost:3000');
|
||||
|
||||
expect(redirectUri).toBe('http://localhost:3000/graphql/api/auth/oidc/callback');
|
||||
});
|
||||
|
||||
it('should generate correct redirect URI with non-localhost host', () => {
|
||||
const getRedirectUri = (service as any).getRedirectUri.bind(service);
|
||||
const redirectUri = getRedirectUri('https://example.com');
|
||||
|
||||
expect(redirectUri).toBe('https://example.com/graphql/api/auth/oidc/callback');
|
||||
});
|
||||
|
||||
it('should handle HTTP protocol for non-localhost hosts', () => {
|
||||
const getRedirectUri = (service as any).getRedirectUri.bind(service);
|
||||
const redirectUri = getRedirectUri('http://tower.local');
|
||||
|
||||
expect(redirectUri).toBe('http://tower.local/graphql/api/auth/oidc/callback');
|
||||
});
|
||||
|
||||
it('should handle non-standard ports correctly', () => {
|
||||
const getRedirectUri = (service as any).getRedirectUri.bind(service);
|
||||
const redirectUri = getRedirectUri('http://example.com:8080');
|
||||
|
||||
expect(redirectUri).toBe('http://example.com:8080/graphql/api/auth/oidc/callback');
|
||||
});
|
||||
|
||||
it('should use default redirect URI when no request host provided', () => {
|
||||
const getRedirectUri = (service as any).getRedirectUri.bind(service);
|
||||
|
||||
// Mock the ConfigService to return a default value
|
||||
configService.get.mockReturnValue('http://tower.local');
|
||||
|
||||
const redirectUri = getRedirectUri();
|
||||
|
||||
expect(redirectUri).toBe('http://tower.local/graphql/api/auth/oidc/callback');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,170 @@
|
||||
import { Injectable, Logger, UnauthorizedException } from '@nestjs/common';
|
||||
|
||||
import {
|
||||
AuthorizationOperator,
|
||||
AuthorizationRuleMode,
|
||||
OidcAuthorizationRule,
|
||||
OidcProvider,
|
||||
} from '@app/unraid-api/graph/resolvers/sso/models/oidc-provider.model.js';
|
||||
|
||||
interface JwtClaims {
|
||||
sub?: string;
|
||||
email?: string;
|
||||
name?: string;
|
||||
hd?: string; // Google hosted domain
|
||||
[claim: string]: unknown;
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class OidcAuthorizationService {
|
||||
private readonly logger = new Logger(OidcAuthorizationService.name);
|
||||
|
||||
/**
|
||||
* Check authorization based on rules
|
||||
* This will throw a helpful error if misconfigured or unauthorized
|
||||
*/
|
||||
async checkAuthorization(provider: OidcProvider, claims: JwtClaims): Promise<void> {
|
||||
this.logger.debug(
|
||||
`Checking authorization for provider ${provider.id} with ${provider.authorizationRules?.length || 0} rules`
|
||||
);
|
||||
this.logger.debug(`Available claims: ${Object.keys(claims).join(', ')}`);
|
||||
this.logger.debug(
|
||||
`Authorization rule mode: ${provider.authorizationRuleMode || AuthorizationRuleMode.OR}`
|
||||
);
|
||||
|
||||
// If no authorization rules are specified, throw a helpful error
|
||||
if (!provider.authorizationRules || provider.authorizationRules.length === 0) {
|
||||
throw new UnauthorizedException(
|
||||
`Login failed: The ${provider.name} provider has no authorization rules configured. ` +
|
||||
`Please configure authorization rules.`
|
||||
);
|
||||
}
|
||||
|
||||
this.logger.debug('Authorization rules to evaluate: %o', provider.authorizationRules);
|
||||
|
||||
// Evaluate the rules
|
||||
const ruleMode = provider.authorizationRuleMode || AuthorizationRuleMode.OR;
|
||||
const isAuthorized = this.evaluateAuthorizationRules(
|
||||
provider.authorizationRules,
|
||||
claims,
|
||||
ruleMode
|
||||
);
|
||||
|
||||
this.logger.debug(`Authorization result: ${isAuthorized}`);
|
||||
|
||||
if (!isAuthorized) {
|
||||
// Log authorization failure with safe claim representation (no PII)
|
||||
const availableClaimKeys = Object.keys(claims).join(', ');
|
||||
this.logger.warn(
|
||||
`Authorization failed for provider ${provider.name}, user ${claims.sub}, available claim keys: [${availableClaimKeys}]`
|
||||
);
|
||||
throw new UnauthorizedException(
|
||||
`Access denied: Your account does not meet the authorization requirements for ${provider.name}.`
|
||||
);
|
||||
}
|
||||
|
||||
this.logger.debug(`Authorization successful for user ${claims.sub}`);
|
||||
}
|
||||
|
||||
private evaluateAuthorizationRules(
|
||||
rules: OidcAuthorizationRule[],
|
||||
claims: JwtClaims,
|
||||
mode: AuthorizationRuleMode = AuthorizationRuleMode.OR
|
||||
): boolean {
|
||||
// No rules means no authorization
|
||||
if (rules.length === 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (mode === AuthorizationRuleMode.AND) {
|
||||
// All rules must pass (AND logic)
|
||||
return rules.every((rule) => this.evaluateRule(rule, claims));
|
||||
} else {
|
||||
// Any rule can pass (OR logic) - default behavior
|
||||
// Multiple rules act as alternative authorization paths
|
||||
return rules.some((rule) => this.evaluateRule(rule, claims));
|
||||
}
|
||||
}
|
||||
|
||||
private evaluateRule(rule: OidcAuthorizationRule, claims: JwtClaims): boolean {
|
||||
const claimValue = claims[rule.claim];
|
||||
|
||||
this.logger.verbose(
|
||||
`Evaluating rule for claim ${rule.claim}: { claimType: ${typeof claimValue}, isArray: ${Array.isArray(claimValue)}, ruleOperator: ${rule.operator}, ruleValuesCount: ${rule.value.length} }`
|
||||
);
|
||||
|
||||
if (claimValue === undefined || claimValue === null) {
|
||||
this.logger.verbose(`Claim ${rule.claim} not found in token`);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Handle non-array, non-string objects
|
||||
if (typeof claimValue === 'object' && claimValue !== null && !Array.isArray(claimValue)) {
|
||||
this.logger.warn(
|
||||
`unexpected JWT claim value encountered - claim ${rule.claim} has unsupported object type (keys: [${Object.keys(claimValue as Record<string, unknown>).join(', ')}])`
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Handle array claims - evaluate rule against each array element
|
||||
if (Array.isArray(claimValue)) {
|
||||
this.logger.verbose(
|
||||
`Processing array claim ${rule.claim} with ${claimValue.length} elements`
|
||||
);
|
||||
|
||||
// For array claims, check if ANY element in the array matches the rule
|
||||
const arrayResult = claimValue.some((element) => {
|
||||
// Skip non-string elements
|
||||
if (
|
||||
typeof element !== 'string' &&
|
||||
typeof element !== 'number' &&
|
||||
typeof element !== 'boolean'
|
||||
) {
|
||||
this.logger.verbose(`Skipping non-primitive element in array: ${typeof element}`);
|
||||
return false;
|
||||
}
|
||||
|
||||
const elementValue = String(element);
|
||||
return this.evaluateSingleValue(elementValue, rule);
|
||||
});
|
||||
|
||||
this.logger.verbose(`Array evaluation result for claim ${rule.claim}: ${arrayResult}`);
|
||||
return arrayResult;
|
||||
}
|
||||
|
||||
// Handle single value claims (string, number, boolean)
|
||||
const value = String(claimValue);
|
||||
this.logger.verbose(`Processing single value claim ${rule.claim}`);
|
||||
|
||||
return this.evaluateSingleValue(value, rule);
|
||||
}
|
||||
|
||||
private evaluateSingleValue(value: string, rule: OidcAuthorizationRule): boolean {
|
||||
let result: boolean;
|
||||
switch (rule.operator) {
|
||||
case AuthorizationOperator.EQUALS:
|
||||
result = rule.value.some((v) => value === v);
|
||||
this.logger.verbose(`EQUALS check: evaluated for claim ${rule.claim}: ${result}`);
|
||||
return result;
|
||||
|
||||
case AuthorizationOperator.CONTAINS:
|
||||
result = rule.value.some((v) => value.includes(v));
|
||||
this.logger.verbose(`CONTAINS check: evaluated for claim ${rule.claim}: ${result}`);
|
||||
return result;
|
||||
|
||||
case AuthorizationOperator.STARTS_WITH:
|
||||
result = rule.value.some((v) => value.startsWith(v));
|
||||
this.logger.verbose(`STARTS_WITH check: evaluated for claim ${rule.claim}: ${result}`);
|
||||
return result;
|
||||
|
||||
case AuthorizationOperator.ENDS_WITH:
|
||||
result = rule.value.some((v) => value.endsWith(v));
|
||||
this.logger.verbose(`ENDS_WITH check: evaluated for claim ${rule.claim}: ${result}`);
|
||||
return result;
|
||||
|
||||
default:
|
||||
this.logger.error(`Unknown authorization operator: ${rule.operator}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,218 @@
|
||||
import { UnauthorizedException } from '@nestjs/common';
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
|
||||
import { decodeJwt } from 'jose';
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import {
|
||||
JwtClaims,
|
||||
OidcClaimsService,
|
||||
} from '@app/unraid-api/graph/resolvers/sso/auth/oidc-claims.service.js';
|
||||
|
||||
// Mock jose
|
||||
vi.mock('jose', () => ({
|
||||
decodeJwt: vi.fn(),
|
||||
}));
|
||||
|
||||
describe('OidcClaimsService', () => {
|
||||
let service: OidcClaimsService;
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.clearAllMocks();
|
||||
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [OidcClaimsService],
|
||||
}).compile();
|
||||
|
||||
service = module.get<OidcClaimsService>(OidcClaimsService);
|
||||
});
|
||||
|
||||
describe('parseIdToken', () => {
|
||||
it('should parse valid ID token', () => {
|
||||
const mockClaims: JwtClaims = {
|
||||
sub: 'user123',
|
||||
email: 'user@example.com',
|
||||
name: 'Test User',
|
||||
iat: 1234567890,
|
||||
exp: 1234567890,
|
||||
};
|
||||
|
||||
(decodeJwt as any).mockReturnValue(mockClaims);
|
||||
|
||||
const result = service.parseIdToken('valid.jwt.token');
|
||||
|
||||
expect(result).toEqual(mockClaims);
|
||||
expect(decodeJwt).toHaveBeenCalledWith('valid.jwt.token');
|
||||
});
|
||||
|
||||
it('should return null when no token provided', () => {
|
||||
const result = service.parseIdToken(undefined);
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it('should return null when token parsing fails', () => {
|
||||
(decodeJwt as any).mockImplementation(() => {
|
||||
throw new Error('Invalid token');
|
||||
});
|
||||
|
||||
const result = service.parseIdToken('invalid.token');
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it('should handle claims with array values', () => {
|
||||
const mockClaims: JwtClaims = {
|
||||
sub: 'user123',
|
||||
groups: ['admin', 'user'],
|
||||
roles: ['role1', 'role2', 'role3'],
|
||||
};
|
||||
|
||||
(decodeJwt as any).mockReturnValue(mockClaims);
|
||||
|
||||
const result = service.parseIdToken('token.with.arrays');
|
||||
|
||||
expect(result).toEqual(mockClaims);
|
||||
});
|
||||
|
||||
it('should log warning for complex object claims', () => {
|
||||
const loggerSpy = vi.spyOn(service['logger'], 'warn');
|
||||
|
||||
const mockClaims: JwtClaims = {
|
||||
sub: 'user123',
|
||||
complexClaim: {
|
||||
nested: 'value',
|
||||
another: 'field',
|
||||
},
|
||||
};
|
||||
|
||||
(decodeJwt as any).mockReturnValue(mockClaims);
|
||||
|
||||
service.parseIdToken('token.with.complex');
|
||||
|
||||
expect(loggerSpy).toHaveBeenCalledWith(expect.stringContaining('complex object structure'));
|
||||
});
|
||||
|
||||
it('should handle Google-specific claims', () => {
|
||||
const mockClaims: JwtClaims = {
|
||||
sub: 'google-user-id',
|
||||
email: 'user@company.com',
|
||||
name: 'Google User',
|
||||
hd: 'company.com', // Google hosted domain
|
||||
};
|
||||
|
||||
(decodeJwt as any).mockReturnValue(mockClaims);
|
||||
|
||||
const result = service.parseIdToken('google.jwt.token');
|
||||
|
||||
expect(result).toEqual(mockClaims);
|
||||
expect(result?.hd).toBe('company.com');
|
||||
});
|
||||
});
|
||||
|
||||
describe('validateClaims', () => {
|
||||
it('should return user sub when claims are valid', () => {
|
||||
const claims: JwtClaims = {
|
||||
sub: 'user123',
|
||||
email: 'user@example.com',
|
||||
};
|
||||
|
||||
const result = service.validateClaims(claims);
|
||||
expect(result).toBe('user123');
|
||||
});
|
||||
|
||||
it('should throw UnauthorizedException when claims are null', () => {
|
||||
expect(() => service.validateClaims(null)).toThrow(UnauthorizedException);
|
||||
});
|
||||
|
||||
it('should throw UnauthorizedException when sub is missing', () => {
|
||||
const claims: JwtClaims = {
|
||||
email: 'user@example.com',
|
||||
name: 'User',
|
||||
};
|
||||
|
||||
expect(() => service.validateClaims(claims)).toThrow(UnauthorizedException);
|
||||
});
|
||||
|
||||
it('should throw UnauthorizedException when sub is empty', () => {
|
||||
const claims: JwtClaims = {
|
||||
sub: '',
|
||||
email: 'user@example.com',
|
||||
};
|
||||
|
||||
expect(() => service.validateClaims(claims)).toThrow(UnauthorizedException);
|
||||
});
|
||||
});
|
||||
|
||||
describe('extractUserInfo', () => {
|
||||
it('should extract basic user information', () => {
|
||||
const claims: JwtClaims = {
|
||||
sub: 'user123',
|
||||
email: 'user@example.com',
|
||||
name: 'Test User',
|
||||
};
|
||||
|
||||
const result = service.extractUserInfo(claims);
|
||||
|
||||
expect(result).toEqual({
|
||||
sub: 'user123',
|
||||
email: 'user@example.com',
|
||||
name: 'Test User',
|
||||
domain: undefined,
|
||||
});
|
||||
});
|
||||
|
||||
it('should extract Google hosted domain', () => {
|
||||
const claims: JwtClaims = {
|
||||
sub: 'google-user',
|
||||
email: 'user@company.com',
|
||||
name: 'Google User',
|
||||
hd: 'company.com',
|
||||
};
|
||||
|
||||
const result = service.extractUserInfo(claims);
|
||||
|
||||
expect(result).toEqual({
|
||||
sub: 'google-user',
|
||||
email: 'user@company.com',
|
||||
name: 'Google User',
|
||||
domain: 'company.com',
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle missing optional fields', () => {
|
||||
const claims: JwtClaims = {
|
||||
sub: 'user123',
|
||||
};
|
||||
|
||||
const result = service.extractUserInfo(claims);
|
||||
|
||||
expect(result).toEqual({
|
||||
sub: 'user123',
|
||||
email: undefined,
|
||||
name: undefined,
|
||||
domain: undefined,
|
||||
});
|
||||
});
|
||||
|
||||
it('should ignore extra claims', () => {
|
||||
const claims: JwtClaims = {
|
||||
sub: 'user123',
|
||||
email: 'user@example.com',
|
||||
name: 'Test User',
|
||||
extra: 'claim',
|
||||
another: 'field',
|
||||
groups: ['admin'],
|
||||
};
|
||||
|
||||
const result = service.extractUserInfo(claims);
|
||||
|
||||
expect(result).toEqual({
|
||||
sub: 'user123',
|
||||
email: 'user@example.com',
|
||||
name: 'Test User',
|
||||
domain: undefined,
|
||||
});
|
||||
expect(result).not.toHaveProperty('extra');
|
||||
expect(result).not.toHaveProperty('groups');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,80 @@
|
||||
import { Injectable, Logger, UnauthorizedException } from '@nestjs/common';
|
||||
|
||||
import { decodeJwt } from 'jose';
|
||||
|
||||
export interface JwtClaims {
|
||||
sub?: string;
|
||||
email?: string;
|
||||
name?: string;
|
||||
hd?: string; // Google hosted domain
|
||||
[claim: string]: unknown;
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class OidcClaimsService {
|
||||
private readonly logger = new Logger(OidcClaimsService.name);
|
||||
|
||||
parseIdToken(idToken: string | undefined): JwtClaims | null {
|
||||
if (!idToken) {
|
||||
this.logger.error('No ID token received from provider');
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
// Use jose to properly decode the JWT
|
||||
const claims = decodeJwt(idToken) as JwtClaims;
|
||||
|
||||
// Log claims safely without PII - only structure, not values
|
||||
if (claims) {
|
||||
const claimKeys = Object.keys(claims).join(', ');
|
||||
this.logger.debug(`ID token decoded successfully. Available claims: [${claimKeys}]`);
|
||||
|
||||
// Log claim types without exposing sensitive values
|
||||
for (const [key, value] of Object.entries(claims)) {
|
||||
const valueType = Array.isArray(value) ? `array[${value.length}]` : typeof value;
|
||||
|
||||
// Only log structure, not actual values (avoid PII)
|
||||
this.logger.debug(`Claim '${key}': type=${valueType}`);
|
||||
|
||||
// Check for unexpected claim types
|
||||
if (valueType === 'object' && value !== null && !Array.isArray(value)) {
|
||||
this.logger.warn(`Claim '${key}' contains complex object structure`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return claims;
|
||||
} catch (e) {
|
||||
this.logger.warn(`Failed to parse ID token: ${e}`);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
validateClaims(claims: JwtClaims | null): string {
|
||||
if (!claims?.sub) {
|
||||
this.logger.error(
|
||||
'No subject in token - claims available: ' +
|
||||
(claims ? Object.keys(claims).join(', ') : 'none')
|
||||
);
|
||||
throw new UnauthorizedException('No subject in token');
|
||||
}
|
||||
|
||||
const userSub = claims.sub;
|
||||
this.logger.debug(`Processing authentication for user: ${userSub}`);
|
||||
return userSub;
|
||||
}
|
||||
|
||||
extractUserInfo(claims: JwtClaims): {
|
||||
sub: string;
|
||||
email?: string;
|
||||
name?: string;
|
||||
domain?: string;
|
||||
} {
|
||||
return {
|
||||
sub: claims.sub!,
|
||||
email: claims.email,
|
||||
name: claims.name,
|
||||
domain: claims.hd,
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,224 @@
|
||||
import { Logger } from '@nestjs/common';
|
||||
|
||||
import * as client from 'openid-client';
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { OidcTokenExchangeService } from '@app/unraid-api/graph/resolvers/sso/auth/oidc-token-exchange.service.js';
|
||||
import { OidcProvider } from '@app/unraid-api/graph/resolvers/sso/models/oidc-provider.model.js';
|
||||
|
||||
vi.mock('openid-client', () => ({
|
||||
authorizationCodeGrant: vi.fn(),
|
||||
allowInsecureRequests: vi.fn(),
|
||||
}));
|
||||
|
||||
describe('OidcTokenExchangeService', () => {
|
||||
let service: OidcTokenExchangeService;
|
||||
let mockConfig: client.Configuration;
|
||||
let mockProvider: OidcProvider;
|
||||
|
||||
beforeEach(() => {
|
||||
service = new OidcTokenExchangeService();
|
||||
|
||||
mockConfig = {
|
||||
serverMetadata: vi.fn().mockReturnValue({
|
||||
issuer: 'https://example.com',
|
||||
token_endpoint: 'https://example.com/token',
|
||||
response_types_supported: ['code'],
|
||||
grant_types_supported: ['authorization_code'],
|
||||
token_endpoint_auth_methods_supported: ['client_secret_post'],
|
||||
}),
|
||||
} as unknown as client.Configuration;
|
||||
|
||||
mockProvider = {
|
||||
id: 'test-provider',
|
||||
issuer: 'https://example.com',
|
||||
clientId: 'test-client-id',
|
||||
clientSecret: 'test-client-secret',
|
||||
} as OidcProvider;
|
||||
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('exchangeCodeForTokens', () => {
|
||||
it('should handle malformed fullCallbackUrl gracefully', async () => {
|
||||
const code = 'test-code';
|
||||
const state = 'test-state';
|
||||
const redirectUri = 'https://example.com/callback';
|
||||
const malformedUrl = 'not://a valid url';
|
||||
|
||||
const mockTokens = {
|
||||
access_token: 'test-access-token',
|
||||
id_token: 'test-id-token',
|
||||
};
|
||||
|
||||
vi.mocked(client.authorizationCodeGrant).mockResolvedValue(mockTokens as any);
|
||||
|
||||
const loggerWarnSpy = vi.spyOn(Logger.prototype, 'warn').mockImplementation(() => {});
|
||||
const loggerDebugSpy = vi.spyOn(Logger.prototype, 'debug').mockImplementation(() => {});
|
||||
|
||||
const result = await service.exchangeCodeForTokens(
|
||||
mockConfig,
|
||||
mockProvider,
|
||||
code,
|
||||
state,
|
||||
redirectUri,
|
||||
malformedUrl
|
||||
);
|
||||
|
||||
expect(result).toEqual(mockTokens);
|
||||
expect(loggerWarnSpy).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Failed to parse fullCallbackUrl'),
|
||||
expect.any(Error)
|
||||
);
|
||||
expect(client.authorizationCodeGrant).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle empty fullCallbackUrl without throwing', async () => {
|
||||
const code = 'test-code';
|
||||
const state = 'test-state';
|
||||
const redirectUri = 'https://example.com/callback';
|
||||
|
||||
const mockTokens = {
|
||||
access_token: 'test-access-token',
|
||||
id_token: 'test-id-token',
|
||||
};
|
||||
|
||||
vi.mocked(client.authorizationCodeGrant).mockResolvedValue(mockTokens as any);
|
||||
|
||||
const loggerWarnSpy = vi.spyOn(Logger.prototype, 'warn').mockImplementation(() => {});
|
||||
|
||||
const result = await service.exchangeCodeForTokens(
|
||||
mockConfig,
|
||||
mockProvider,
|
||||
code,
|
||||
state,
|
||||
redirectUri,
|
||||
''
|
||||
);
|
||||
|
||||
expect(result).toEqual(mockTokens);
|
||||
expect(loggerWarnSpy).not.toHaveBeenCalled();
|
||||
expect(client.authorizationCodeGrant).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle whitespace-only fullCallbackUrl without throwing', async () => {
|
||||
const code = 'test-code';
|
||||
const state = 'test-state';
|
||||
const redirectUri = 'https://example.com/callback';
|
||||
|
||||
const mockTokens = {
|
||||
access_token: 'test-access-token',
|
||||
id_token: 'test-id-token',
|
||||
};
|
||||
|
||||
vi.mocked(client.authorizationCodeGrant).mockResolvedValue(mockTokens as any);
|
||||
|
||||
const loggerWarnSpy = vi.spyOn(Logger.prototype, 'warn').mockImplementation(() => {});
|
||||
|
||||
const result = await service.exchangeCodeForTokens(
|
||||
mockConfig,
|
||||
mockProvider,
|
||||
code,
|
||||
state,
|
||||
redirectUri,
|
||||
' '
|
||||
);
|
||||
|
||||
expect(result).toEqual(mockTokens);
|
||||
expect(loggerWarnSpy).not.toHaveBeenCalled();
|
||||
expect(client.authorizationCodeGrant).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should copy parameters from valid fullCallbackUrl', async () => {
|
||||
const code = 'test-code';
|
||||
const state = 'test-state';
|
||||
const redirectUri = 'https://example.com/callback';
|
||||
const fullCallbackUrl =
|
||||
'https://example.com/callback?code=test-code&state=test-state&scope=openid&authuser=0';
|
||||
|
||||
const mockTokens = {
|
||||
access_token: 'test-access-token',
|
||||
id_token: 'test-id-token',
|
||||
};
|
||||
|
||||
vi.mocked(client.authorizationCodeGrant).mockResolvedValue(mockTokens as any);
|
||||
|
||||
const loggerWarnSpy = vi.spyOn(Logger.prototype, 'warn').mockImplementation(() => {});
|
||||
const loggerDebugSpy = vi.spyOn(Logger.prototype, 'debug').mockImplementation(() => {});
|
||||
|
||||
const result = await service.exchangeCodeForTokens(
|
||||
mockConfig,
|
||||
mockProvider,
|
||||
code,
|
||||
state,
|
||||
redirectUri,
|
||||
fullCallbackUrl
|
||||
);
|
||||
|
||||
expect(result).toEqual(mockTokens);
|
||||
expect(loggerWarnSpy).not.toHaveBeenCalled();
|
||||
|
||||
const authCodeGrantCall = vi.mocked(client.authorizationCodeGrant).mock.calls[0];
|
||||
const cleanUrl = authCodeGrantCall[1] as URL;
|
||||
|
||||
expect(cleanUrl.searchParams.get('scope')).toBe('openid');
|
||||
expect(cleanUrl.searchParams.get('authuser')).toBe('0');
|
||||
});
|
||||
|
||||
it('should handle undefined fullCallbackUrl', async () => {
|
||||
const code = 'test-code';
|
||||
const state = 'test-state';
|
||||
const redirectUri = 'https://example.com/callback';
|
||||
|
||||
const mockTokens = {
|
||||
access_token: 'test-access-token',
|
||||
id_token: 'test-id-token',
|
||||
};
|
||||
|
||||
vi.mocked(client.authorizationCodeGrant).mockResolvedValue(mockTokens as any);
|
||||
|
||||
const loggerWarnSpy = vi.spyOn(Logger.prototype, 'warn').mockImplementation(() => {});
|
||||
|
||||
const result = await service.exchangeCodeForTokens(
|
||||
mockConfig,
|
||||
mockProvider,
|
||||
code,
|
||||
state,
|
||||
redirectUri,
|
||||
undefined
|
||||
);
|
||||
|
||||
expect(result).toEqual(mockTokens);
|
||||
expect(loggerWarnSpy).not.toHaveBeenCalled();
|
||||
expect(client.authorizationCodeGrant).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle non-string fullCallbackUrl types gracefully', async () => {
|
||||
const code = 'test-code';
|
||||
const state = 'test-state';
|
||||
const redirectUri = 'https://example.com/callback';
|
||||
|
||||
const mockTokens = {
|
||||
access_token: 'test-access-token',
|
||||
id_token: 'test-id-token',
|
||||
};
|
||||
|
||||
vi.mocked(client.authorizationCodeGrant).mockResolvedValue(mockTokens as any);
|
||||
|
||||
const loggerWarnSpy = vi.spyOn(Logger.prototype, 'warn').mockImplementation(() => {});
|
||||
|
||||
const result = await service.exchangeCodeForTokens(
|
||||
mockConfig,
|
||||
mockProvider,
|
||||
code,
|
||||
state,
|
||||
redirectUri,
|
||||
123 as any
|
||||
);
|
||||
|
||||
expect(result).toEqual(mockTokens);
|
||||
expect(loggerWarnSpy).not.toHaveBeenCalled();
|
||||
expect(client.authorizationCodeGrant).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,174 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
|
||||
import * as client from 'openid-client';
|
||||
|
||||
import { OidcProvider } from '@app/unraid-api/graph/resolvers/sso/models/oidc-provider.model.js';
|
||||
import { ErrorExtractor } from '@app/unraid-api/utils/error-extractor.util.js';
|
||||
|
||||
// Extended type for our internal use - openid-client v6 doesn't directly expose
|
||||
// skip options for aud/iss checks, so we'll handle validation errors differently
|
||||
type ExtendedGrantChecks = client.AuthorizationCodeGrantChecks;
|
||||
|
||||
@Injectable()
|
||||
export class OidcTokenExchangeService {
|
||||
private readonly logger = new Logger(OidcTokenExchangeService.name);
|
||||
|
||||
async exchangeCodeForTokens(
|
||||
config: client.Configuration,
|
||||
provider: OidcProvider,
|
||||
code: string,
|
||||
state: string,
|
||||
redirectUri: string,
|
||||
fullCallbackUrl?: string
|
||||
): Promise<client.TokenEndpointResponse> {
|
||||
this.logger.debug(`Provider ${provider.id} config loaded`);
|
||||
this.logger.debug(`Redirect URI: ${redirectUri}`);
|
||||
|
||||
// Build current URL for token exchange
|
||||
// CRITICAL: The URL used here MUST match the redirect_uri that was sent to the authorization endpoint
|
||||
// Google expects the exact same redirect_uri during token exchange
|
||||
const currentUrl = new URL(redirectUri);
|
||||
currentUrl.searchParams.set('code', code);
|
||||
currentUrl.searchParams.set('state', state);
|
||||
|
||||
// Copy additional parameters from the actual callback if provided
|
||||
if (fullCallbackUrl && typeof fullCallbackUrl === 'string' && fullCallbackUrl.trim()) {
|
||||
try {
|
||||
const actualUrl = new URL(fullCallbackUrl);
|
||||
// Copy over additional params that Google might have added (scope, authuser, prompt, etc)
|
||||
// but DO NOT change the base URL or path
|
||||
['scope', 'authuser', 'prompt', 'hd', 'session_state', 'iss'].forEach((param) => {
|
||||
const value = actualUrl.searchParams.get(param);
|
||||
if (value && !currentUrl.searchParams.has(param)) {
|
||||
currentUrl.searchParams.set(param, value);
|
||||
}
|
||||
});
|
||||
} catch (urlError) {
|
||||
this.logger.warn(`Failed to parse fullCallbackUrl: ${fullCallbackUrl}`, urlError);
|
||||
// Continue with the existing currentUrl flow without additional params
|
||||
}
|
||||
}
|
||||
|
||||
// Google returns iss in the response, openid-client v6 expects it
|
||||
// If not present, add it based on the provider's issuer
|
||||
if (!currentUrl.searchParams.has('iss') && provider.issuer) {
|
||||
currentUrl.searchParams.set('iss', provider.issuer);
|
||||
}
|
||||
|
||||
this.logger.debug(`Token exchange URL (matches redirect_uri): ${currentUrl.href}`);
|
||||
|
||||
// For openid-client v6, we need to prepare the authorization response
|
||||
const authorizationResponse = new URLSearchParams(currentUrl.search);
|
||||
|
||||
// Set the original client state for openid-client
|
||||
authorizationResponse.set('state', state);
|
||||
|
||||
// Create a new URL with the cleaned parameters
|
||||
const cleanUrl = new URL(redirectUri);
|
||||
cleanUrl.search = authorizationResponse.toString();
|
||||
|
||||
this.logger.debug(`Clean URL for token exchange: ${cleanUrl.href}`);
|
||||
|
||||
try {
|
||||
this.logger.debug(`Starting token exchange with openid-client`);
|
||||
this.logger.debug(`Config issuer: ${config.serverMetadata().issuer}`);
|
||||
this.logger.debug(`Config token endpoint: ${config.serverMetadata().token_endpoint}`);
|
||||
|
||||
// Log the complete token exchange request details
|
||||
const tokenEndpoint = config.serverMetadata().token_endpoint;
|
||||
this.logger.debug(`Full token endpoint URL: ${tokenEndpoint}`);
|
||||
this.logger.debug(`Authorization code: ${code.substring(0, 10)}...`);
|
||||
this.logger.debug(`Redirect URI in token request: ${redirectUri}`);
|
||||
this.logger.debug(`Client ID: ${provider.clientId}`);
|
||||
this.logger.debug(`Client secret configured: ${provider.clientSecret ? 'Yes' : 'No'}`);
|
||||
this.logger.debug(`Expected state value: ${state}`);
|
||||
|
||||
// Log the server metadata to check for any configuration issues
|
||||
const metadata = config.serverMetadata();
|
||||
this.logger.debug(
|
||||
`Server supports response types: ${metadata.response_types_supported?.join(', ') || 'not specified'}`
|
||||
);
|
||||
this.logger.debug(
|
||||
`Server grant types: ${metadata.grant_types_supported?.join(', ') || 'not specified'}`
|
||||
);
|
||||
this.logger.debug(
|
||||
`Token endpoint auth methods: ${metadata.token_endpoint_auth_methods_supported?.join(', ') || 'not specified'}`
|
||||
);
|
||||
|
||||
// For HTTP endpoints, we need to call allowInsecureRequests on the config
|
||||
if (provider.issuer) {
|
||||
try {
|
||||
const serverUrl = new URL(provider.issuer);
|
||||
if (serverUrl.protocol === 'http:') {
|
||||
this.logger.debug(
|
||||
`Allowing insecure requests for HTTP endpoint: ${provider.id}`
|
||||
);
|
||||
// allowInsecureRequests is deprecated but still needed for HTTP endpoints
|
||||
client.allowInsecureRequests(config);
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.warn(
|
||||
`Invalid issuer URL for provider ${provider.id}: ${provider.issuer}`
|
||||
);
|
||||
// Continue without special HTTP options
|
||||
}
|
||||
}
|
||||
|
||||
const requestChecks: ExtendedGrantChecks = {
|
||||
expectedState: state,
|
||||
};
|
||||
|
||||
// Log what we're about to send
|
||||
this.logger.debug(`Executing authorizationCodeGrant with:`);
|
||||
this.logger.debug(`- Clean URL: ${cleanUrl.href}`);
|
||||
this.logger.debug(`- Expected state: ${state}`);
|
||||
this.logger.debug(`- Grant type: authorization_code`);
|
||||
|
||||
const tokens = await client.authorizationCodeGrant(config, cleanUrl, requestChecks);
|
||||
|
||||
this.logger.debug(
|
||||
`Token exchange successful, received tokens: ${Object.keys(tokens).join(', ')}`
|
||||
);
|
||||
|
||||
return tokens;
|
||||
} catch (tokenError) {
|
||||
// Extract and log error details using the utility
|
||||
const extracted = ErrorExtractor.extract(tokenError);
|
||||
this.logger.error('Token exchange failed');
|
||||
ErrorExtractor.formatForLogging(extracted, this.logger);
|
||||
|
||||
// Special handling for content-type and parsing errors
|
||||
if (ErrorExtractor.isOAuthResponseError(extracted)) {
|
||||
this.logger.error('Token endpoint returned invalid or non-JSON response.');
|
||||
this.logger.error('This typically means:');
|
||||
this.logger.error(
|
||||
'1. The token endpoint URL is incorrect (check for typos or wrong paths)'
|
||||
);
|
||||
this.logger.error('2. The server returned an HTML error page instead of JSON');
|
||||
this.logger.error('3. Authentication failed (invalid client_id or client_secret)');
|
||||
this.logger.error('4. A proxy/firewall is intercepting the request');
|
||||
this.logger.error('5. The OAuth server returned malformed JSON');
|
||||
this.logger.error(
|
||||
`Configured token endpoint: ${config.serverMetadata().token_endpoint}`
|
||||
);
|
||||
this.logger.error('Please verify your OIDC provider configuration.');
|
||||
}
|
||||
|
||||
// Check if error message contains the "unexpected JWT claim" text
|
||||
if (ErrorExtractor.isJwtClaimError(extracted)) {
|
||||
this.logger.error(
|
||||
`unexpected JWT claim value encountered during token validation by openid-client`
|
||||
);
|
||||
this.logger.error(
|
||||
`This error typically means the 'iss' claim in the JWT doesn't match the expected issuer`
|
||||
);
|
||||
this.logger.error(`Check that your provider's issuer URL is configured correctly`);
|
||||
this.logger.error(`Expected issuer: ${config.serverMetadata().issuer}`);
|
||||
this.logger.error(`Provider configured issuer: ${provider.issuer}`);
|
||||
}
|
||||
|
||||
// Re-throw the original error with all its properties intact
|
||||
throw tokenError;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,216 @@
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import { Test } from '@nestjs/testing';
|
||||
|
||||
import * as client from 'openid-client';
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { OidcClientConfigService } from '@app/unraid-api/graph/resolvers/sso/client/oidc-client-config.service.js';
|
||||
import { OidcValidationService } from '@app/unraid-api/graph/resolvers/sso/core/oidc-validation.service.js';
|
||||
import { OidcProvider } from '@app/unraid-api/graph/resolvers/sso/models/oidc-provider.model.js';
|
||||
|
||||
vi.mock('openid-client');
|
||||
|
||||
describe('OidcClientConfigService - Cache Behavior', () => {
|
||||
let service: OidcClientConfigService;
|
||||
let validationService: OidcValidationService;
|
||||
|
||||
const createMockProvider = (port: number): OidcProvider => ({
|
||||
id: 'test-provider',
|
||||
name: 'Test Provider',
|
||||
clientId: 'test-client-id',
|
||||
clientSecret: 'test-secret',
|
||||
issuer: `http://localhost:${port}`,
|
||||
scopes: ['openid', 'profile', 'email'],
|
||||
authorizationRules: [],
|
||||
});
|
||||
|
||||
const createMockConfiguration = (port: number) => {
|
||||
const mockConfig = {
|
||||
serverMetadata: vi.fn(() => ({
|
||||
issuer: `http://localhost:${port}`,
|
||||
authorization_endpoint: `http://localhost:${port}/auth`,
|
||||
token_endpoint: `http://localhost:${port}/token`,
|
||||
jwks_uri: `http://localhost:${port}/jwks`,
|
||||
userinfo_endpoint: `http://localhost:${port}/userinfo`,
|
||||
})),
|
||||
};
|
||||
return mockConfig as unknown as client.Configuration;
|
||||
};
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.clearAllMocks();
|
||||
|
||||
const mockConfigService = {
|
||||
get: vi.fn(),
|
||||
set: vi.fn(),
|
||||
};
|
||||
|
||||
const module = await Test.createTestingModule({
|
||||
providers: [
|
||||
OidcClientConfigService,
|
||||
OidcValidationService,
|
||||
{
|
||||
provide: ConfigService,
|
||||
useValue: mockConfigService,
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
service = module.get<OidcClientConfigService>(OidcClientConfigService);
|
||||
validationService = module.get<OidcValidationService>(OidcValidationService);
|
||||
});
|
||||
|
||||
describe('Configuration Caching', () => {
|
||||
it('should cache configuration on first call', async () => {
|
||||
const provider = createMockProvider(1029);
|
||||
const mockConfig = createMockConfiguration(1029);
|
||||
|
||||
vi.spyOn(validationService, 'performDiscovery').mockResolvedValueOnce(mockConfig);
|
||||
|
||||
// First call
|
||||
const config1 = await service.getOrCreateConfig(provider);
|
||||
expect(validationService.performDiscovery).toHaveBeenCalledTimes(1);
|
||||
expect(config1.serverMetadata().issuer).toBe('http://localhost:1029');
|
||||
|
||||
// Second call with same provider ID should use cache
|
||||
const config2 = await service.getOrCreateConfig(provider);
|
||||
expect(validationService.performDiscovery).toHaveBeenCalledTimes(1);
|
||||
expect(config2).toBe(config1);
|
||||
});
|
||||
|
||||
it('should return stale cached configuration when issuer changes without cache clear', async () => {
|
||||
const provider1029 = createMockProvider(1029);
|
||||
const provider1030 = createMockProvider(1030);
|
||||
const mockConfig1029 = createMockConfiguration(1029);
|
||||
const mockConfig1030 = createMockConfiguration(1030);
|
||||
|
||||
vi.spyOn(validationService, 'performDiscovery')
|
||||
.mockResolvedValueOnce(mockConfig1029)
|
||||
.mockResolvedValueOnce(mockConfig1030);
|
||||
|
||||
// Initial configuration on port 1029
|
||||
const config1 = await service.getOrCreateConfig(provider1029);
|
||||
expect(config1.serverMetadata().issuer).toBe('http://localhost:1029');
|
||||
expect(config1.serverMetadata().authorization_endpoint).toBe('http://localhost:1029/auth');
|
||||
|
||||
// Update provider to port 1030 (simulating UI change)
|
||||
// Without clearing cache, it should still return the old cached config
|
||||
const config2 = await service.getOrCreateConfig(provider1030);
|
||||
|
||||
// THIS IS THE BUG: The service returns cached config for port 1029
|
||||
// even though the provider now has issuer on port 1030
|
||||
expect(config2.serverMetadata().issuer).toBe('http://localhost:1029');
|
||||
expect(config2.serverMetadata().authorization_endpoint).toBe('http://localhost:1029/auth');
|
||||
|
||||
// performDiscovery should only be called once because cache is used
|
||||
expect(validationService.performDiscovery).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should return fresh configuration after cache is cleared', async () => {
|
||||
const provider1029 = createMockProvider(1029);
|
||||
const provider1030 = createMockProvider(1030);
|
||||
const mockConfig1029 = createMockConfiguration(1029);
|
||||
const mockConfig1030 = createMockConfiguration(1030);
|
||||
|
||||
vi.spyOn(validationService, 'performDiscovery')
|
||||
.mockResolvedValueOnce(mockConfig1029)
|
||||
.mockResolvedValueOnce(mockConfig1030);
|
||||
|
||||
// Initial configuration on port 1029
|
||||
const config1 = await service.getOrCreateConfig(provider1029);
|
||||
expect(config1.serverMetadata().issuer).toBe('http://localhost:1029');
|
||||
|
||||
// Clear cache for the provider
|
||||
service.clearCache(provider1030.id);
|
||||
|
||||
// Now it should fetch fresh config for port 1030
|
||||
const config2 = await service.getOrCreateConfig(provider1030);
|
||||
expect(config2.serverMetadata().issuer).toBe('http://localhost:1030');
|
||||
expect(config2.serverMetadata().authorization_endpoint).toBe('http://localhost:1030/auth');
|
||||
|
||||
// performDiscovery should be called twice (once for each port)
|
||||
expect(validationService.performDiscovery).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
it('should clear all provider caches when clearCache is called without providerId', async () => {
|
||||
const provider1 = { ...createMockProvider(1029), id: 'provider1' };
|
||||
const provider2 = { ...createMockProvider(1030), id: 'provider2' };
|
||||
const mockConfig1 = createMockConfiguration(1029);
|
||||
const mockConfig2 = createMockConfiguration(1030);
|
||||
|
||||
vi.spyOn(validationService, 'performDiscovery')
|
||||
.mockResolvedValueOnce(mockConfig1)
|
||||
.mockResolvedValueOnce(mockConfig2)
|
||||
.mockResolvedValueOnce(mockConfig1)
|
||||
.mockResolvedValueOnce(mockConfig2);
|
||||
|
||||
// Cache both providers
|
||||
await service.getOrCreateConfig(provider1);
|
||||
await service.getOrCreateConfig(provider2);
|
||||
expect(service.getCacheSize()).toBe(2);
|
||||
|
||||
// Clear all caches
|
||||
service.clearCache();
|
||||
expect(service.getCacheSize()).toBe(0);
|
||||
|
||||
// Both should fetch fresh configs
|
||||
await service.getOrCreateConfig(provider1);
|
||||
await service.getOrCreateConfig(provider2);
|
||||
|
||||
// performDiscovery should be called 4 times total
|
||||
expect(validationService.performDiscovery).toHaveBeenCalledTimes(4);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Manual Configuration Caching', () => {
|
||||
it('should cache manual configuration and exhibit same stale cache issue', async () => {
|
||||
const provider1029: OidcProvider = {
|
||||
id: 'manual-provider',
|
||||
name: 'Manual Provider',
|
||||
clientId: 'client-id',
|
||||
clientSecret: 'secret',
|
||||
issuer: '',
|
||||
authorizationEndpoint: 'http://localhost:1029/auth',
|
||||
tokenEndpoint: 'http://localhost:1029/token',
|
||||
scopes: ['openid'],
|
||||
authorizationRules: [],
|
||||
};
|
||||
|
||||
const provider1030: OidcProvider = {
|
||||
...provider1029,
|
||||
authorizationEndpoint: 'http://localhost:1030/auth',
|
||||
tokenEndpoint: 'http://localhost:1030/token',
|
||||
};
|
||||
|
||||
// Mock the client.Configuration constructor for manual configs
|
||||
const mockManualConfig1029 = createMockConfiguration(1029);
|
||||
const mockManualConfig1030 = createMockConfiguration(1030);
|
||||
|
||||
let configCallCount = 0;
|
||||
vi.mocked(client.Configuration).mockImplementation(() => {
|
||||
configCallCount++;
|
||||
return configCallCount === 1 ? mockManualConfig1029 : mockManualConfig1030;
|
||||
});
|
||||
|
||||
vi.mocked(client.ClientSecretPost).mockReturnValue({} as any);
|
||||
vi.mocked(client.allowInsecureRequests).mockImplementation(() => {});
|
||||
|
||||
// First call with port 1029
|
||||
const config1 = await service.getOrCreateConfig(provider1029);
|
||||
expect(config1.serverMetadata().authorization_endpoint).toBe('http://localhost:1029/auth');
|
||||
|
||||
// Update to port 1030 without clearing cache
|
||||
const config2 = await service.getOrCreateConfig(provider1030);
|
||||
|
||||
// BUG: Still returns cached config with port 1029
|
||||
expect(config2.serverMetadata().authorization_endpoint).toBe('http://localhost:1029/auth');
|
||||
|
||||
// Clear cache and try again
|
||||
service.clearCache(provider1030.id);
|
||||
const config3 = await service.getOrCreateConfig(provider1030);
|
||||
|
||||
// Now it should return the updated config
|
||||
expect(config3.serverMetadata().authorization_endpoint).toBe('http://localhost:1030/auth');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,267 @@
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { OidcClientConfigService } from '@app/unraid-api/graph/resolvers/sso/client/oidc-client-config.service.js';
|
||||
import { OidcValidationService } from '@app/unraid-api/graph/resolvers/sso/core/oidc-validation.service.js';
|
||||
import { OidcProvider } from '@app/unraid-api/graph/resolvers/sso/models/oidc-provider.model.js';
|
||||
|
||||
describe('OidcClientConfigService', () => {
|
||||
let service: OidcClientConfigService;
|
||||
let validationService: any;
|
||||
|
||||
beforeEach(async () => {
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [
|
||||
OidcClientConfigService,
|
||||
{
|
||||
provide: OidcValidationService,
|
||||
useValue: {
|
||||
performDiscovery: vi.fn(),
|
||||
},
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
service = module.get<OidcClientConfigService>(OidcClientConfigService);
|
||||
validationService = module.get(OidcValidationService);
|
||||
});
|
||||
|
||||
describe('Manual Configuration', () => {
|
||||
it('should create manual configuration when discovery fails but manual endpoints are provided', async () => {
|
||||
const provider: OidcProvider = {
|
||||
id: 'manual-provider',
|
||||
name: 'Manual Provider',
|
||||
clientId: 'test-client-id',
|
||||
clientSecret: 'test-client-secret',
|
||||
issuer: 'https://manual.example.com',
|
||||
authorizationEndpoint: 'https://manual.example.com/auth',
|
||||
tokenEndpoint: 'https://manual.example.com/token',
|
||||
jwksUri: 'https://manual.example.com/jwks',
|
||||
scopes: ['openid', 'profile'],
|
||||
authorizationRules: [],
|
||||
};
|
||||
|
||||
// Mock discovery to fail
|
||||
validationService.performDiscovery.mockRejectedValue(new Error('Discovery failed'));
|
||||
|
||||
const config = await service.getOrCreateConfig(provider);
|
||||
|
||||
// Verify the configuration was created with the correct endpoints
|
||||
expect(config).toBeDefined();
|
||||
expect(config.serverMetadata().authorization_endpoint).toBe(
|
||||
'https://manual.example.com/auth'
|
||||
);
|
||||
expect(config.serverMetadata().token_endpoint).toBe('https://manual.example.com/token');
|
||||
expect(config.serverMetadata().jwks_uri).toBe('https://manual.example.com/jwks');
|
||||
expect(config.serverMetadata().issuer).toBe('https://manual.example.com');
|
||||
});
|
||||
|
||||
it('should create manual configuration with fallback issuer when not provided', async () => {
|
||||
const provider: OidcProvider = {
|
||||
id: 'manual-provider-no-issuer',
|
||||
name: 'Manual Provider No Issuer',
|
||||
clientId: 'test-client-id',
|
||||
clientSecret: 'test-client-secret',
|
||||
issuer: '', // Empty issuer should skip discovery and use manual endpoints
|
||||
authorizationEndpoint: 'https://manual.example.com/auth',
|
||||
tokenEndpoint: 'https://manual.example.com/token',
|
||||
scopes: ['openid', 'profile'],
|
||||
authorizationRules: [],
|
||||
};
|
||||
|
||||
const config = await service.getOrCreateConfig(provider);
|
||||
|
||||
// Verify the configuration was created with inferred issuer from endpoints
|
||||
expect(config).toBeDefined();
|
||||
expect(config.serverMetadata().issuer).toBe('https://manual.example.com');
|
||||
expect(config.serverMetadata().authorization_endpoint).toBe(
|
||||
'https://manual.example.com/auth'
|
||||
);
|
||||
expect(config.serverMetadata().token_endpoint).toBe('https://manual.example.com/token');
|
||||
});
|
||||
|
||||
it('should handle manual configuration with client secret properly', async () => {
|
||||
const provider: OidcProvider = {
|
||||
id: 'manual-with-secret',
|
||||
name: 'Manual With Secret',
|
||||
clientId: 'test-client-id',
|
||||
clientSecret: 'secret-123',
|
||||
issuer: 'https://manual.example.com',
|
||||
authorizationEndpoint: 'https://manual.example.com/auth',
|
||||
tokenEndpoint: 'https://manual.example.com/token',
|
||||
scopes: ['openid', 'profile'],
|
||||
authorizationRules: [],
|
||||
};
|
||||
|
||||
// Mock discovery to fail
|
||||
validationService.performDiscovery.mockRejectedValue(new Error('Discovery failed'));
|
||||
|
||||
const config = await service.getOrCreateConfig(provider);
|
||||
|
||||
// Verify configuration was created successfully
|
||||
expect(config).toBeDefined();
|
||||
expect(config.clientMetadata().client_secret).toBe('secret-123');
|
||||
});
|
||||
|
||||
it('should handle manual configuration without client secret (public client)', async () => {
|
||||
const provider: OidcProvider = {
|
||||
id: 'manual-public-client',
|
||||
name: 'Manual Public Client',
|
||||
clientId: 'public-client-id',
|
||||
// No client secret
|
||||
issuer: 'https://manual.example.com',
|
||||
authorizationEndpoint: 'https://manual.example.com/auth',
|
||||
tokenEndpoint: 'https://manual.example.com/token',
|
||||
scopes: ['openid', 'profile'],
|
||||
authorizationRules: [],
|
||||
};
|
||||
|
||||
// Mock discovery to fail
|
||||
validationService.performDiscovery.mockRejectedValue(new Error('Discovery failed'));
|
||||
|
||||
const config = await service.getOrCreateConfig(provider);
|
||||
|
||||
// Verify configuration was created successfully
|
||||
expect(config).toBeDefined();
|
||||
expect(config.clientMetadata().client_secret).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should cache configurations', async () => {
|
||||
const provider: OidcProvider = {
|
||||
id: 'cached-provider',
|
||||
name: 'Cached Provider',
|
||||
clientId: 'test-client-id',
|
||||
issuer: '',
|
||||
authorizationEndpoint: 'https://cached.example.com/auth',
|
||||
tokenEndpoint: 'https://cached.example.com/token',
|
||||
scopes: ['openid'],
|
||||
authorizationRules: [],
|
||||
};
|
||||
|
||||
// First call
|
||||
const config1 = await service.getOrCreateConfig(provider);
|
||||
|
||||
// Second call - should return cached value
|
||||
const config2 = await service.getOrCreateConfig(provider);
|
||||
|
||||
// Should be the exact same object
|
||||
expect(config1).toBe(config2);
|
||||
expect(service.getCacheSize()).toBe(1);
|
||||
});
|
||||
|
||||
it('should clear cache for specific provider', async () => {
|
||||
const provider: OidcProvider = {
|
||||
id: 'provider-to-clear',
|
||||
name: 'Provider to Clear',
|
||||
clientId: 'test-client-id',
|
||||
issuer: '',
|
||||
authorizationEndpoint: 'https://clear.example.com/auth',
|
||||
tokenEndpoint: 'https://clear.example.com/token',
|
||||
scopes: ['openid'],
|
||||
authorizationRules: [],
|
||||
};
|
||||
|
||||
await service.getOrCreateConfig(provider);
|
||||
expect(service.getCacheSize()).toBe(1);
|
||||
|
||||
service.clearCache('provider-to-clear');
|
||||
expect(service.getCacheSize()).toBe(0);
|
||||
});
|
||||
|
||||
it('should clear entire cache', async () => {
|
||||
const provider1: OidcProvider = {
|
||||
id: 'provider1',
|
||||
name: 'Provider 1',
|
||||
clientId: 'client1',
|
||||
issuer: '',
|
||||
authorizationEndpoint: 'https://p1.example.com/auth',
|
||||
tokenEndpoint: 'https://p1.example.com/token',
|
||||
scopes: ['openid'],
|
||||
authorizationRules: [],
|
||||
};
|
||||
|
||||
const provider2: OidcProvider = {
|
||||
id: 'provider2',
|
||||
name: 'Provider 2',
|
||||
clientId: 'client2',
|
||||
issuer: '',
|
||||
authorizationEndpoint: 'https://p2.example.com/auth',
|
||||
tokenEndpoint: 'https://p2.example.com/token',
|
||||
scopes: ['openid'],
|
||||
authorizationRules: [],
|
||||
};
|
||||
|
||||
await service.getOrCreateConfig(provider1);
|
||||
await service.getOrCreateConfig(provider2);
|
||||
expect(service.getCacheSize()).toBe(2);
|
||||
|
||||
service.clearCache();
|
||||
expect(service.getCacheSize()).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Discovery Configuration', () => {
|
||||
it('should use discovery when issuer is provided', async () => {
|
||||
const provider: OidcProvider = {
|
||||
id: 'discovery-provider',
|
||||
name: 'Discovery Provider',
|
||||
clientId: 'test-client-id',
|
||||
clientSecret: 'test-secret',
|
||||
issuer: 'https://discovery.example.com',
|
||||
scopes: ['openid', 'profile'],
|
||||
authorizationRules: [],
|
||||
};
|
||||
|
||||
const mockConfig = {
|
||||
serverMetadata: vi.fn().mockReturnValue({
|
||||
issuer: 'https://discovery.example.com',
|
||||
authorization_endpoint: 'https://discovery.example.com/authorize',
|
||||
token_endpoint: 'https://discovery.example.com/token',
|
||||
jwks_uri: 'https://discovery.example.com/.well-known/jwks.json',
|
||||
userinfo_endpoint: 'https://discovery.example.com/userinfo',
|
||||
}),
|
||||
clientMetadata: vi.fn().mockReturnValue({}),
|
||||
};
|
||||
|
||||
validationService.performDiscovery.mockResolvedValue(mockConfig);
|
||||
|
||||
const config = await service.getOrCreateConfig(provider);
|
||||
|
||||
expect(validationService.performDiscovery).toHaveBeenCalledWith(provider, undefined);
|
||||
expect(config).toBe(mockConfig);
|
||||
});
|
||||
|
||||
it('should allow HTTP for discovery when issuer uses HTTP', async () => {
|
||||
const provider: OidcProvider = {
|
||||
id: 'http-discovery-provider',
|
||||
name: 'HTTP Discovery Provider',
|
||||
clientId: 'test-client-id',
|
||||
issuer: 'http://discovery.example.com',
|
||||
scopes: ['openid'],
|
||||
authorizationRules: [],
|
||||
};
|
||||
|
||||
const mockConfig = {
|
||||
serverMetadata: vi.fn().mockReturnValue({
|
||||
issuer: 'http://discovery.example.com',
|
||||
authorization_endpoint: 'http://discovery.example.com/authorize',
|
||||
token_endpoint: 'http://discovery.example.com/token',
|
||||
}),
|
||||
clientMetadata: vi.fn().mockReturnValue({}),
|
||||
};
|
||||
|
||||
validationService.performDiscovery.mockResolvedValue(mockConfig);
|
||||
|
||||
const config = await service.getOrCreateConfig(provider);
|
||||
|
||||
expect(validationService.performDiscovery).toHaveBeenCalledWith(
|
||||
provider,
|
||||
expect.objectContaining({
|
||||
execute: expect.any(Array),
|
||||
})
|
||||
);
|
||||
expect(config).toBe(mockConfig);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,168 @@
|
||||
import { Injectable, Logger, UnauthorizedException } from '@nestjs/common';
|
||||
|
||||
import * as client from 'openid-client';
|
||||
|
||||
import { OidcValidationService } from '@app/unraid-api/graph/resolvers/sso/core/oidc-validation.service.js';
|
||||
import { OidcProvider } from '@app/unraid-api/graph/resolvers/sso/models/oidc-provider.model.js';
|
||||
import { ErrorExtractor } from '@app/unraid-api/utils/error-extractor.util.js';
|
||||
|
||||
@Injectable()
|
||||
export class OidcClientConfigService {
|
||||
private readonly logger = new Logger(OidcClientConfigService.name);
|
||||
private readonly configCache = new Map<string, client.Configuration>();
|
||||
|
||||
constructor(private readonly validationService: OidcValidationService) {}
|
||||
|
||||
async getOrCreateConfig(provider: OidcProvider): Promise<client.Configuration> {
|
||||
const cacheKey = provider.id;
|
||||
|
||||
if (this.configCache.has(cacheKey)) {
|
||||
return this.configCache.get(cacheKey)!;
|
||||
}
|
||||
|
||||
try {
|
||||
// Use the validation service to perform discovery with HTTP support
|
||||
if (provider.issuer) {
|
||||
this.logger.debug(`Attempting discovery for ${provider.id} at ${provider.issuer}`);
|
||||
|
||||
// Create client options with HTTP support if needed
|
||||
const serverUrl = new URL(provider.issuer);
|
||||
let clientOptions: client.DiscoveryRequestOptions | undefined;
|
||||
if (serverUrl.protocol === 'http:') {
|
||||
this.logger.debug(`Allowing HTTP for ${provider.id} as specified by user`);
|
||||
clientOptions = {
|
||||
execute: [client.allowInsecureRequests],
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
const config = await this.validationService.performDiscovery(
|
||||
provider,
|
||||
clientOptions
|
||||
);
|
||||
this.logger.debug(`Discovery successful for ${provider.id}`);
|
||||
this.logger.debug(
|
||||
`Authorization endpoint: ${config.serverMetadata().authorization_endpoint}`
|
||||
);
|
||||
this.logger.debug(`Token endpoint: ${config.serverMetadata().token_endpoint}`);
|
||||
this.logger.debug(`JWKS URI: ${config.serverMetadata().jwks_uri || 'Not provided'}`);
|
||||
this.logger.debug(
|
||||
`Userinfo endpoint: ${config.serverMetadata().userinfo_endpoint || 'Not provided'}`
|
||||
);
|
||||
this.configCache.set(cacheKey, config);
|
||||
return config;
|
||||
} catch (discoveryError) {
|
||||
const extracted = ErrorExtractor.extract(discoveryError);
|
||||
this.logger.warn(`Discovery failed for ${provider.id}: ${extracted.message}`);
|
||||
|
||||
// Log more details about the discovery error
|
||||
const discoveryUrl = `${provider.issuer}/.well-known/openid-configuration`;
|
||||
this.logger.debug(`Discovery URL attempted: ${discoveryUrl}`);
|
||||
|
||||
// Use error extractor for consistent logging
|
||||
ErrorExtractor.formatForLogging(extracted, this.logger);
|
||||
|
||||
// If discovery fails but we have manual endpoints, use them
|
||||
if (provider.authorizationEndpoint && provider.tokenEndpoint) {
|
||||
this.logger.log(`Using manual endpoints for ${provider.id}`);
|
||||
return this.createManualConfiguration(provider, cacheKey);
|
||||
} else {
|
||||
throw new Error(
|
||||
`OIDC discovery failed and no manual endpoints provided for ${provider.id}`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Manual configuration when no issuer is provided
|
||||
if (provider.authorizationEndpoint && provider.tokenEndpoint) {
|
||||
this.logger.log(`Using manual endpoints for ${provider.id} (no issuer provided)`);
|
||||
return this.createManualConfiguration(provider, cacheKey);
|
||||
}
|
||||
|
||||
// If we reach here, neither discovery nor manual endpoints are available
|
||||
throw new Error(
|
||||
`No configuration method available for ${provider.id}: requires either valid issuer for discovery or manual endpoints`
|
||||
);
|
||||
} catch (error) {
|
||||
const extracted = ErrorExtractor.extract(error);
|
||||
this.logger.error(
|
||||
`Failed to create OIDC configuration for ${provider.id}: ${extracted.message}`
|
||||
);
|
||||
|
||||
// Log more details in debug mode
|
||||
if (extracted.stack) {
|
||||
this.logger.debug(`Stack trace: ${extracted.stack}`);
|
||||
}
|
||||
|
||||
throw new UnauthorizedException('Provider configuration error');
|
||||
}
|
||||
}
|
||||
|
||||
private createManualConfiguration(provider: OidcProvider, cacheKey: string): client.Configuration {
|
||||
// Create manual configuration with a valid issuer URL
|
||||
const inferredIssuer =
|
||||
provider.issuer && provider.issuer.trim() !== ''
|
||||
? provider.issuer
|
||||
: new URL(provider.authorizationEndpoint ?? provider.tokenEndpoint!).origin;
|
||||
const serverMetadata: client.ServerMetadata = {
|
||||
issuer: inferredIssuer,
|
||||
authorization_endpoint: provider.authorizationEndpoint!,
|
||||
token_endpoint: provider.tokenEndpoint!,
|
||||
jwks_uri: provider.jwksUri,
|
||||
};
|
||||
|
||||
const clientMetadata: Partial<client.ClientMetadata> = {
|
||||
client_secret: provider.clientSecret,
|
||||
};
|
||||
|
||||
// Configure client auth method
|
||||
const clientAuth = provider.clientSecret
|
||||
? client.ClientSecretPost(provider.clientSecret)
|
||||
: client.None();
|
||||
|
||||
try {
|
||||
const config = new client.Configuration(
|
||||
serverMetadata,
|
||||
provider.clientId,
|
||||
clientMetadata,
|
||||
clientAuth
|
||||
);
|
||||
|
||||
// Allow HTTP if any configured endpoint uses http
|
||||
const endpoints = [
|
||||
serverMetadata.authorization_endpoint,
|
||||
serverMetadata.token_endpoint,
|
||||
].filter(Boolean) as string[];
|
||||
const hasHttp = endpoints.some((e) => new URL(e).protocol === 'http:');
|
||||
if (hasHttp) {
|
||||
this.logger.debug(`Allowing HTTP for manual endpoints on ${provider.id}`);
|
||||
// allowInsecureRequests is deprecated but still needed for HTTP endpoints
|
||||
client.allowInsecureRequests(config);
|
||||
}
|
||||
|
||||
this.logger.debug(`Manual configuration created for ${provider.id}`);
|
||||
this.logger.debug(`Authorization endpoint: ${serverMetadata.authorization_endpoint}`);
|
||||
this.logger.debug(`Token endpoint: ${serverMetadata.token_endpoint}`);
|
||||
|
||||
this.configCache.set(cacheKey, config);
|
||||
return config;
|
||||
} catch (manualConfigError) {
|
||||
const extracted = ErrorExtractor.extract(manualConfigError);
|
||||
this.logger.error(`Failed to create manual configuration: ${extracted.message}`);
|
||||
throw new Error(`Manual configuration failed for ${provider.id}`);
|
||||
}
|
||||
}
|
||||
|
||||
clearCache(providerId?: string): void {
|
||||
if (providerId) {
|
||||
this.configCache.delete(providerId);
|
||||
} else {
|
||||
this.configCache.clear();
|
||||
}
|
||||
}
|
||||
|
||||
getCacheSize(): number {
|
||||
return this.configCache.size;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
import { forwardRef, Module } from '@nestjs/common';
|
||||
|
||||
import { OidcClientConfigService } from '@app/unraid-api/graph/resolvers/sso/client/oidc-client-config.service.js';
|
||||
import { OidcRedirectUriService } from '@app/unraid-api/graph/resolvers/sso/client/oidc-redirect-uri.service.js';
|
||||
import { OidcBaseModule } from '@app/unraid-api/graph/resolvers/sso/core/oidc-base.module.js';
|
||||
|
||||
@Module({
|
||||
imports: [forwardRef(() => OidcBaseModule)],
|
||||
providers: [OidcClientConfigService, OidcRedirectUriService],
|
||||
exports: [OidcClientConfigService, OidcRedirectUriService],
|
||||
})
|
||||
export class OidcClientModule {}
|
||||
@@ -0,0 +1,222 @@
|
||||
import { UnauthorizedException } from '@nestjs/common';
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { OidcRedirectUriService } from '@app/unraid-api/graph/resolvers/sso/client/oidc-redirect-uri.service.js';
|
||||
import { OidcConfigPersistence } from '@app/unraid-api/graph/resolvers/sso/core/oidc-config.service.js';
|
||||
import { validateRedirectUri } from '@app/unraid-api/utils/redirect-uri-validator.js';
|
||||
|
||||
// Mock the redirect URI validator
|
||||
vi.mock('@app/unraid-api/utils/redirect-uri-validator.js', () => ({
|
||||
validateRedirectUri: vi.fn(),
|
||||
}));
|
||||
|
||||
describe('OidcRedirectUriService', () => {
|
||||
let service: OidcRedirectUriService;
|
||||
let oidcConfig: any;
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.clearAllMocks();
|
||||
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [
|
||||
OidcRedirectUriService,
|
||||
{
|
||||
provide: OidcConfigPersistence,
|
||||
useValue: {
|
||||
getConfig: vi.fn().mockResolvedValue({
|
||||
providers: [],
|
||||
defaultAllowedOrigins: ['https://allowed.example.com'],
|
||||
}),
|
||||
},
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
service = module.get<OidcRedirectUriService>(OidcRedirectUriService);
|
||||
oidcConfig = module.get(OidcConfigPersistence);
|
||||
});
|
||||
|
||||
describe('getRedirectUri', () => {
|
||||
it('should return valid redirect URI when validation passes', async () => {
|
||||
const requestOrigin = 'https://example.com';
|
||||
const requestHeaders = {
|
||||
'x-forwarded-proto': 'https',
|
||||
'x-forwarded-host': 'example.com',
|
||||
};
|
||||
|
||||
(validateRedirectUri as any).mockReturnValue({
|
||||
isValid: true,
|
||||
validatedUri: 'https://example.com',
|
||||
});
|
||||
|
||||
const result = await service.getRedirectUri(requestOrigin, requestHeaders);
|
||||
|
||||
expect(result).toBe('https://example.com/graphql/api/auth/oidc/callback');
|
||||
expect(validateRedirectUri).toHaveBeenCalledWith(
|
||||
'https://example.com',
|
||||
'https',
|
||||
'example.com',
|
||||
expect.anything(),
|
||||
['https://allowed.example.com']
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw UnauthorizedException when validation fails', async () => {
|
||||
const requestOrigin = 'https://evil.com';
|
||||
const requestHeaders = {
|
||||
'x-forwarded-proto': 'https',
|
||||
'x-forwarded-host': 'example.com',
|
||||
};
|
||||
|
||||
(validateRedirectUri as any).mockReturnValue({
|
||||
isValid: false,
|
||||
reason: 'Origin not allowed',
|
||||
});
|
||||
|
||||
await expect(service.getRedirectUri(requestOrigin, requestHeaders)).rejects.toThrow(
|
||||
UnauthorizedException
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle missing allowed origins', async () => {
|
||||
oidcConfig.getConfig.mockResolvedValue({
|
||||
providers: [],
|
||||
defaultAllowedOrigins: undefined,
|
||||
});
|
||||
|
||||
const requestOrigin = 'https://example.com';
|
||||
const requestHeaders = {
|
||||
'x-forwarded-proto': 'https',
|
||||
'x-forwarded-host': 'example.com',
|
||||
};
|
||||
|
||||
(validateRedirectUri as any).mockReturnValue({
|
||||
isValid: true,
|
||||
validatedUri: 'https://example.com',
|
||||
});
|
||||
|
||||
const result = await service.getRedirectUri(requestOrigin, requestHeaders);
|
||||
|
||||
expect(result).toBe('https://example.com/graphql/api/auth/oidc/callback');
|
||||
expect(validateRedirectUri).toHaveBeenCalledWith(
|
||||
'https://example.com',
|
||||
'https',
|
||||
'example.com',
|
||||
expect.anything(),
|
||||
undefined
|
||||
);
|
||||
});
|
||||
|
||||
it('should extract protocol from headers correctly', async () => {
|
||||
const requestOrigin = 'https://example.com';
|
||||
const requestHeaders = {
|
||||
'x-forwarded-proto': ['https', 'http'],
|
||||
host: 'example.com',
|
||||
};
|
||||
|
||||
(validateRedirectUri as any).mockReturnValue({
|
||||
isValid: true,
|
||||
validatedUri: 'https://example.com',
|
||||
});
|
||||
|
||||
const result = await service.getRedirectUri(requestOrigin, requestHeaders);
|
||||
|
||||
expect(result).toBe('https://example.com/graphql/api/auth/oidc/callback');
|
||||
expect(validateRedirectUri).toHaveBeenCalledWith(
|
||||
'https://example.com',
|
||||
'https', // Should use first value from array
|
||||
'example.com',
|
||||
expect.anything(),
|
||||
expect.anything()
|
||||
);
|
||||
});
|
||||
|
||||
it('should use host header as fallback', async () => {
|
||||
const requestOrigin = 'https://example.com';
|
||||
const requestHeaders = {
|
||||
host: 'example.com',
|
||||
};
|
||||
|
||||
(validateRedirectUri as any).mockReturnValue({
|
||||
isValid: true,
|
||||
validatedUri: 'https://example.com',
|
||||
});
|
||||
|
||||
const result = await service.getRedirectUri(requestOrigin, requestHeaders);
|
||||
|
||||
expect(result).toBe('https://example.com/graphql/api/auth/oidc/callback');
|
||||
expect(validateRedirectUri).toHaveBeenCalledWith(
|
||||
'https://example.com',
|
||||
'https', // Inferred from requestOrigin when x-forwarded-proto not present
|
||||
'example.com',
|
||||
expect.anything(),
|
||||
expect.anything()
|
||||
);
|
||||
});
|
||||
|
||||
it('should prefer x-forwarded-host over host header', async () => {
|
||||
const requestOrigin = 'https://example.com';
|
||||
const requestHeaders = {
|
||||
'x-forwarded-host': 'forwarded.example.com',
|
||||
host: 'original.example.com',
|
||||
};
|
||||
|
||||
(validateRedirectUri as any).mockReturnValue({
|
||||
isValid: true,
|
||||
validatedUri: 'https://example.com',
|
||||
});
|
||||
|
||||
const result = await service.getRedirectUri(requestOrigin, requestHeaders);
|
||||
|
||||
expect(result).toBe('https://example.com/graphql/api/auth/oidc/callback');
|
||||
expect(validateRedirectUri).toHaveBeenCalledWith(
|
||||
'https://example.com',
|
||||
'https', // Inferred from requestOrigin when x-forwarded-proto not present
|
||||
'forwarded.example.com', // Should use x-forwarded-host
|
||||
expect.anything(),
|
||||
expect.anything()
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw when URL construction fails', async () => {
|
||||
const requestOrigin = 'https://example.com';
|
||||
const requestHeaders = {};
|
||||
|
||||
(validateRedirectUri as any).mockReturnValue({
|
||||
isValid: true,
|
||||
validatedUri: 'invalid-url', // Invalid URL
|
||||
});
|
||||
|
||||
await expect(service.getRedirectUri(requestOrigin, requestHeaders)).rejects.toThrow(
|
||||
UnauthorizedException
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle array values in headers correctly', async () => {
|
||||
const requestOrigin = 'https://example.com';
|
||||
const requestHeaders = {
|
||||
'x-forwarded-proto': ['https'],
|
||||
'x-forwarded-host': ['forwarded.example.com', 'another.example.com'],
|
||||
host: ['original.example.com'],
|
||||
};
|
||||
|
||||
(validateRedirectUri as any).mockReturnValue({
|
||||
isValid: true,
|
||||
validatedUri: 'https://example.com',
|
||||
});
|
||||
|
||||
const result = await service.getRedirectUri(requestOrigin, requestHeaders);
|
||||
|
||||
expect(result).toBe('https://example.com/graphql/api/auth/oidc/callback');
|
||||
expect(validateRedirectUri).toHaveBeenCalledWith(
|
||||
'https://example.com',
|
||||
'https',
|
||||
'forwarded.example.com', // Should use first value from array
|
||||
expect.anything(),
|
||||
expect.anything()
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,97 @@
|
||||
import { Injectable, Logger, UnauthorizedException } from '@nestjs/common';
|
||||
|
||||
import { OidcConfigPersistence } from '@app/unraid-api/graph/resolvers/sso/core/oidc-config.service.js';
|
||||
import { validateRedirectUri } from '@app/unraid-api/utils/redirect-uri-validator.js';
|
||||
|
||||
@Injectable()
|
||||
export class OidcRedirectUriService {
|
||||
private readonly logger = new Logger(OidcRedirectUriService.name);
|
||||
private readonly CALLBACK_PATH = '/graphql/api/auth/oidc/callback';
|
||||
|
||||
constructor(private readonly oidcConfig: OidcConfigPersistence) {}
|
||||
|
||||
async getRedirectUri(
|
||||
requestOrigin: string,
|
||||
requestHeaders: Record<string, string | string[] | undefined>
|
||||
): Promise<string> {
|
||||
// Extract protocol and host from headers for validation
|
||||
const { protocol, host } = this.getRequestOriginInfo(requestHeaders, requestOrigin);
|
||||
|
||||
// Get the global allowed origins from OIDC config
|
||||
const config = await this.oidcConfig.getConfig();
|
||||
const allowedOrigins = config?.defaultAllowedOrigins;
|
||||
|
||||
// Debug logging to trace the issue
|
||||
this.logger.debug(
|
||||
`OIDC Config loaded: ${JSON.stringify(config ? { hasConfig: true, allowedOrigins } : { hasConfig: false })}`
|
||||
);
|
||||
this.logger.debug(
|
||||
`Validating redirect URI: ${requestOrigin} against host: ${protocol}://${host}`
|
||||
);
|
||||
this.logger.debug(`Allowed origins from config: ${JSON.stringify(allowedOrigins || [])}`);
|
||||
|
||||
// Validate the provided requestOrigin using centralized validator
|
||||
// Pass the global allowed origins if available
|
||||
const validation = validateRedirectUri(
|
||||
requestOrigin,
|
||||
protocol,
|
||||
host,
|
||||
this.logger,
|
||||
allowedOrigins
|
||||
);
|
||||
|
||||
if (!validation.isValid) {
|
||||
this.logger.warn(`Invalid redirect_uri in GraphQL OIDC flow: ${validation.reason}`);
|
||||
throw new UnauthorizedException(
|
||||
`Invalid redirect_uri: ${requestOrigin}. Please add this callback URI to Settings → Management Access → Allowed Redirect URIs`
|
||||
);
|
||||
}
|
||||
|
||||
// Ensure the validated URI has the correct callback path
|
||||
try {
|
||||
const url = new URL(validation.validatedUri);
|
||||
// Only use origin to prevent path manipulation
|
||||
const redirectUri = `${url.origin}${this.CALLBACK_PATH}`;
|
||||
this.logger.debug(`Using validated redirect URI: ${redirectUri}`);
|
||||
return redirectUri;
|
||||
} catch (e) {
|
||||
this.logger.error(
|
||||
`Failed to construct redirect URI from validated URI: ${validation.validatedUri}`
|
||||
);
|
||||
throw new UnauthorizedException('Invalid redirect_uri');
|
||||
}
|
||||
}
|
||||
|
||||
private getRequestOriginInfo(
|
||||
requestHeaders: Record<string, string | string[] | undefined>,
|
||||
requestOrigin?: string
|
||||
): {
|
||||
protocol: string;
|
||||
host: string | undefined;
|
||||
} {
|
||||
// Extract protocol from x-forwarded-proto or infer from requestOrigin, default to http
|
||||
const forwardedProto = requestHeaders['x-forwarded-proto'];
|
||||
const protocol = forwardedProto
|
||||
? Array.isArray(forwardedProto)
|
||||
? forwardedProto[0]
|
||||
: forwardedProto
|
||||
: requestOrigin?.startsWith('https')
|
||||
? 'https'
|
||||
: 'http';
|
||||
|
||||
// Extract host from x-forwarded-host or host header
|
||||
const forwardedHost = requestHeaders['x-forwarded-host'];
|
||||
const hostHeader = requestHeaders['host'];
|
||||
const host = forwardedHost
|
||||
? Array.isArray(forwardedHost)
|
||||
? forwardedHost[0]
|
||||
: forwardedHost
|
||||
: hostHeader
|
||||
? Array.isArray(hostHeader)
|
||||
? hostHeader[0]
|
||||
: hostHeader
|
||||
: undefined;
|
||||
|
||||
return { protocol, host };
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,14 @@
|
||||
import { forwardRef, Module } from '@nestjs/common';
|
||||
|
||||
import { UserSettingsModule } from '@unraid/shared/services/user-settings.js';
|
||||
|
||||
import { OidcClientModule } from '@app/unraid-api/graph/resolvers/sso/client/oidc-client.module.js';
|
||||
import { OidcConfigPersistence } from '@app/unraid-api/graph/resolvers/sso/core/oidc-config.service.js';
|
||||
import { OidcValidationService } from '@app/unraid-api/graph/resolvers/sso/core/oidc-validation.service.js';
|
||||
|
||||
@Module({
|
||||
imports: [UserSettingsModule, forwardRef(() => OidcClientModule)],
|
||||
providers: [OidcConfigPersistence, OidcValidationService],
|
||||
exports: [OidcConfigPersistence, OidcValidationService],
|
||||
})
|
||||
export class OidcBaseModule {}
|
||||
@@ -0,0 +1,276 @@
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import { Test } from '@nestjs/testing';
|
||||
import * as fs from 'fs/promises';
|
||||
|
||||
import { UserSettingsService } from '@unraid/shared/services/user-settings.js';
|
||||
import * as client from 'openid-client';
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { OidcClientConfigService } from '@app/unraid-api/graph/resolvers/sso/client/oidc-client-config.service.js';
|
||||
import { OidcConfigPersistence } from '@app/unraid-api/graph/resolvers/sso/core/oidc-config.service.js';
|
||||
import { OidcValidationService } from '@app/unraid-api/graph/resolvers/sso/core/oidc-validation.service.js';
|
||||
import { OidcProvider } from '@app/unraid-api/graph/resolvers/sso/models/oidc-provider.model.js';
|
||||
|
||||
vi.mock('openid-client');
|
||||
vi.mock('fs/promises', () => ({
|
||||
writeFile: vi.fn().mockResolvedValue(undefined),
|
||||
mkdir: vi.fn().mockResolvedValue(undefined),
|
||||
stat: vi.fn().mockRejectedValue(new Error('File not found')),
|
||||
}));
|
||||
|
||||
describe('OIDC Config Cache Fix - Integration Test', () => {
|
||||
let configPersistence: OidcConfigPersistence;
|
||||
let clientConfigService: OidcClientConfigService;
|
||||
let mockConfigService: any;
|
||||
|
||||
afterEach(() => {
|
||||
delete process.env.PATHS_CONFIG;
|
||||
});
|
||||
|
||||
const createMockProvider = (port: number): OidcProvider => ({
|
||||
id: 'test-provider',
|
||||
name: 'Test Provider',
|
||||
clientId: 'test-client-id',
|
||||
clientSecret: 'test-secret',
|
||||
issuer: `http://localhost:${port}`,
|
||||
scopes: ['openid', 'profile', 'email'],
|
||||
authorizationRules: [
|
||||
{
|
||||
claim: 'email',
|
||||
operator: 'endsWith' as any,
|
||||
value: ['@example.com'],
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const createMockConfiguration = (port: number) => {
|
||||
const mockConfig = {
|
||||
serverMetadata: vi.fn(() => ({
|
||||
issuer: `http://localhost:${port}`,
|
||||
authorization_endpoint: `http://localhost:${port}/auth`,
|
||||
token_endpoint: `http://localhost:${port}/token`,
|
||||
jwks_uri: `http://localhost:${port}/jwks`,
|
||||
userinfo_endpoint: `http://localhost:${port}/userinfo`,
|
||||
})),
|
||||
};
|
||||
return mockConfig as unknown as client.Configuration;
|
||||
};
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.clearAllMocks();
|
||||
|
||||
// Set environment variable for config path
|
||||
process.env.PATHS_CONFIG = '/tmp/test-config';
|
||||
|
||||
mockConfigService = {
|
||||
get: vi.fn((key: string) => {
|
||||
if (key === 'oidc') {
|
||||
return {
|
||||
providers: [createMockProvider(1029)],
|
||||
defaultAllowedOrigins: [],
|
||||
};
|
||||
}
|
||||
if (key === 'paths.config') {
|
||||
return '/tmp/test-config';
|
||||
}
|
||||
return undefined;
|
||||
}),
|
||||
set: vi.fn(),
|
||||
getOrThrow: vi.fn((key: string) => {
|
||||
if (key === 'paths.config' || key === 'paths') {
|
||||
return '/tmp/test-config';
|
||||
}
|
||||
return '/tmp/test-config';
|
||||
}),
|
||||
};
|
||||
|
||||
const mockUserSettingsService = {
|
||||
register: vi.fn(),
|
||||
getAllSettings: vi.fn(),
|
||||
getAllValues: vi.fn(),
|
||||
updateNamespacedValues: vi.fn(),
|
||||
};
|
||||
|
||||
const module = await Test.createTestingModule({
|
||||
providers: [
|
||||
OidcConfigPersistence,
|
||||
OidcClientConfigService,
|
||||
OidcValidationService,
|
||||
{
|
||||
provide: ConfigService,
|
||||
useValue: mockConfigService,
|
||||
},
|
||||
{
|
||||
provide: UserSettingsService,
|
||||
useValue: mockUserSettingsService,
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
configPersistence = module.get<OidcConfigPersistence>(OidcConfigPersistence);
|
||||
clientConfigService = module.get<OidcClientConfigService>(OidcClientConfigService);
|
||||
|
||||
// Mock the persist method since we don't want to write to disk in tests
|
||||
vi.spyOn(configPersistence as any, 'persist').mockResolvedValue(undefined);
|
||||
});
|
||||
|
||||
describe('Cache clearing on provider update', () => {
|
||||
it('should clear cache when provider is updated via upsertProvider', async () => {
|
||||
const provider1029 = createMockProvider(1029);
|
||||
const provider1030 = createMockProvider(1030);
|
||||
const mockConfig1029 = createMockConfiguration(1029);
|
||||
const mockConfig1030 = createMockConfiguration(1030);
|
||||
|
||||
// Mock validation service to return configs
|
||||
const validationService = (configPersistence as any).validationService;
|
||||
vi.spyOn(validationService, 'performDiscovery')
|
||||
.mockResolvedValueOnce(mockConfig1029)
|
||||
.mockResolvedValueOnce(mockConfig1030);
|
||||
|
||||
// First, get config for port 1029 - this caches it
|
||||
const config1 = await clientConfigService.getOrCreateConfig(provider1029);
|
||||
expect(config1.serverMetadata().issuer).toBe('http://localhost:1029');
|
||||
|
||||
// Spy on clearCache method
|
||||
const clearCacheSpy = vi.spyOn(clientConfigService, 'clearCache');
|
||||
|
||||
// Update the provider to port 1030 via upsertProvider
|
||||
await configPersistence.upsertProvider(provider1030);
|
||||
|
||||
// Verify cache was cleared for this specific provider
|
||||
expect(clearCacheSpy).toHaveBeenCalledWith(provider1030.id);
|
||||
|
||||
// Now get config again - should fetch fresh config for port 1030
|
||||
const config2 = await clientConfigService.getOrCreateConfig(provider1030);
|
||||
expect(config2.serverMetadata().issuer).toBe('http://localhost:1030');
|
||||
expect(config2.serverMetadata().authorization_endpoint).toBe('http://localhost:1030/auth');
|
||||
|
||||
// Verify discovery was called twice (not using cache)
|
||||
expect(validationService.performDiscovery).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
it('should clear cache when provider is deleted', async () => {
|
||||
const provider = createMockProvider(1029);
|
||||
const mockConfig = createMockConfiguration(1029);
|
||||
|
||||
// Setup initial provider in config
|
||||
mockConfigService.get.mockReturnValue({
|
||||
providers: [provider, { ...provider, id: 'other-provider' }],
|
||||
defaultAllowedOrigins: [],
|
||||
});
|
||||
|
||||
// Mock validation service
|
||||
const validationService = (configPersistence as any).validationService;
|
||||
vi.spyOn(validationService, 'performDiscovery').mockResolvedValue(mockConfig);
|
||||
|
||||
// First, cache the provider config
|
||||
await clientConfigService.getOrCreateConfig(provider);
|
||||
|
||||
// Spy on clearCache
|
||||
const clearCacheSpy = vi.spyOn(clientConfigService, 'clearCache');
|
||||
|
||||
// Delete the provider
|
||||
const deleted = await configPersistence.deleteProvider(provider.id);
|
||||
expect(deleted).toBe(true);
|
||||
|
||||
// Verify cache was cleared for the deleted provider
|
||||
expect(clearCacheSpy).toHaveBeenCalledWith(provider.id);
|
||||
});
|
||||
|
||||
it('should clear all provider caches when updated via settings updateValues', async () => {
|
||||
// This simulates what happens when settings are saved through the UI
|
||||
const settingsCallback = (configPersistence as any).userSettings.register.mock.calls[0][1];
|
||||
|
||||
const newConfig = {
|
||||
providers: [
|
||||
{
|
||||
...createMockProvider(1030),
|
||||
authorizationMode: 'simple',
|
||||
simpleAuthorization: {
|
||||
allowedDomains: ['example.com'],
|
||||
allowedEmails: [],
|
||||
allowedUserIds: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
defaultAllowedOrigins: [],
|
||||
};
|
||||
|
||||
// Spy on clearCache
|
||||
const clearCacheSpy = vi.spyOn(clientConfigService, 'clearCache');
|
||||
|
||||
// Mock validation
|
||||
const validationService = (configPersistence as any).validationService;
|
||||
vi.spyOn(validationService, 'validateProvider').mockResolvedValue({
|
||||
isValid: true,
|
||||
});
|
||||
|
||||
// Call the updateValues function (simulating saving settings from UI)
|
||||
await settingsCallback.updateValues(newConfig);
|
||||
|
||||
// Verify cache was cleared (called without arguments to clear all)
|
||||
expect(clearCacheSpy).toHaveBeenCalledWith();
|
||||
});
|
||||
|
||||
it('should NOT require API restart after updating provider issuer', async () => {
|
||||
// This test confirms that the fix eliminates the need for API restart
|
||||
const settingsCallback = (configPersistence as any).userSettings.register.mock.calls[0][1];
|
||||
|
||||
const newConfig = {
|
||||
providers: [createMockProvider(1030)],
|
||||
defaultAllowedOrigins: [],
|
||||
};
|
||||
|
||||
// Mock validation
|
||||
const validationService = (configPersistence as any).validationService;
|
||||
vi.spyOn(validationService, 'validateProvider').mockResolvedValue({
|
||||
isValid: true,
|
||||
});
|
||||
|
||||
// Update settings
|
||||
const result = await settingsCallback.updateValues(newConfig);
|
||||
|
||||
// Verify that restartRequired is false
|
||||
expect(result.restartRequired).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Provider validation on save', () => {
|
||||
it('should validate providers and include warnings but still save', async () => {
|
||||
const settingsCallback = (configPersistence as any).userSettings.register.mock.calls[0][1];
|
||||
|
||||
const newConfig = {
|
||||
providers: [
|
||||
createMockProvider(1030),
|
||||
{ ...createMockProvider(1031), id: 'invalid-provider', name: 'Invalid Provider' },
|
||||
],
|
||||
defaultAllowedOrigins: [],
|
||||
};
|
||||
|
||||
// Mock validation - first provider valid, second invalid
|
||||
const validationService = (configPersistence as any).validationService;
|
||||
vi.spyOn(validationService, 'validateProvider')
|
||||
.mockResolvedValueOnce({ isValid: true })
|
||||
.mockResolvedValueOnce({
|
||||
isValid: false,
|
||||
error: 'Discovery failed: Unable to reach issuer',
|
||||
});
|
||||
|
||||
// Update settings
|
||||
const result = await settingsCallback.updateValues(newConfig);
|
||||
|
||||
// Should save successfully but include warnings
|
||||
expect(result.restartRequired).toBe(false);
|
||||
expect(result.warnings).toBeDefined();
|
||||
expect(result.warnings).toContain(
|
||||
'❌ Invalid Provider: Discovery failed: Unable to reach issuer'
|
||||
);
|
||||
expect(result.values.providers).toHaveLength(2);
|
||||
|
||||
// Cache should still be cleared even with validation warnings
|
||||
const clearCacheSpy = vi.spyOn(clientConfigService, 'clearCache');
|
||||
await settingsCallback.updateValues(newConfig);
|
||||
expect(clearCacheSpy).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,87 @@
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
|
||||
import { UserSettingsService } from '@unraid/shared/services/user-settings.js';
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { OidcConfigPersistence } from '@app/unraid-api/graph/resolvers/sso/core/oidc-config.service.js';
|
||||
import { OidcValidationService } from '@app/unraid-api/graph/resolvers/sso/core/oidc-validation.service.js';
|
||||
import { OidcUrlPatterns } from '@app/unraid-api/graph/resolvers/sso/utils/oidc-url-patterns.util.js';
|
||||
|
||||
describe('OidcConfigPersistence', () => {
|
||||
let service: OidcConfigPersistence;
|
||||
let mockConfigService: ConfigService;
|
||||
let mockUserSettingsService: UserSettingsService;
|
||||
let mockValidationService: OidcValidationService;
|
||||
|
||||
beforeEach(async () => {
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [
|
||||
OidcConfigPersistence,
|
||||
{
|
||||
provide: ConfigService,
|
||||
useValue: {
|
||||
get: vi.fn(),
|
||||
set: vi.fn(),
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: UserSettingsService,
|
||||
useValue: {
|
||||
register: vi.fn(),
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: OidcValidationService,
|
||||
useValue: {
|
||||
validateProvider: vi.fn(),
|
||||
},
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
service = module.get<OidcConfigPersistence>(OidcConfigPersistence);
|
||||
mockConfigService = module.get<ConfigService>(ConfigService);
|
||||
mockUserSettingsService = module.get<UserSettingsService>(UserSettingsService);
|
||||
mockValidationService = module.get<OidcValidationService>(OidcValidationService);
|
||||
|
||||
// Mock persist method to avoid file system operations
|
||||
vi.spyOn(service, 'persist').mockResolvedValue(true);
|
||||
});
|
||||
|
||||
describe('URL validation integration', () => {
|
||||
it('should validate issuer URLs using the shared utility', () => {
|
||||
// Test that our shared utility correctly validates URLs
|
||||
// This ensures the pattern we use in the form schema works correctly
|
||||
const examples = OidcUrlPatterns.getExamples();
|
||||
|
||||
// Test valid URLs
|
||||
examples.valid.forEach((url) => {
|
||||
expect(OidcUrlPatterns.isValidIssuerUrl(url)).toBe(true);
|
||||
});
|
||||
|
||||
// Test invalid URLs
|
||||
examples.invalid.forEach((url) => {
|
||||
expect(OidcUrlPatterns.isValidIssuerUrl(url)).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
it('should validate the pattern constant matches the regex', () => {
|
||||
// Ensure the pattern string can be compiled into a valid regex
|
||||
expect(() => new RegExp(OidcUrlPatterns.ISSUER_URL_PATTERN)).not.toThrow();
|
||||
|
||||
// Ensure the static regex matches the pattern
|
||||
const manualRegex = new RegExp(OidcUrlPatterns.ISSUER_URL_PATTERN);
|
||||
expect(OidcUrlPatterns.ISSUER_URL_REGEX.source).toBe(manualRegex.source);
|
||||
});
|
||||
|
||||
it('should reject the specific URL from the bug report', () => {
|
||||
// Test the exact scenario that caused the original bug
|
||||
const problematicUrl = 'https://accounts.google.com/';
|
||||
const correctUrl = 'https://accounts.google.com';
|
||||
|
||||
expect(OidcUrlPatterns.isValidIssuerUrl(problematicUrl)).toBe(false);
|
||||
expect(OidcUrlPatterns.isValidIssuerUrl(correctUrl)).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { forwardRef, Inject, Injectable, Optional } from '@nestjs/common';
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
|
||||
import { RuleEffect } from '@jsonforms/core';
|
||||
@@ -6,12 +6,14 @@ import { mergeSettingSlices } from '@unraid/shared/jsonforms/settings.js';
|
||||
import { ConfigFilePersister } from '@unraid/shared/services/config-file.js';
|
||||
import { UserSettingsService } from '@unraid/shared/services/user-settings.js';
|
||||
|
||||
import { OidcClientConfigService } from '@app/unraid-api/graph/resolvers/sso/client/oidc-client-config.service.js';
|
||||
import { OidcValidationService } from '@app/unraid-api/graph/resolvers/sso/core/oidc-validation.service.js';
|
||||
import {
|
||||
AuthorizationOperator,
|
||||
OidcAuthorizationRule,
|
||||
OidcProvider,
|
||||
} from '@app/unraid-api/graph/resolvers/sso/oidc-provider.model.js';
|
||||
import { OidcValidationService } from '@app/unraid-api/graph/resolvers/sso/oidc-validation.service.js';
|
||||
} from '@app/unraid-api/graph/resolvers/sso/models/oidc-provider.model.js';
|
||||
import { OidcUrlPatterns } from '@app/unraid-api/graph/resolvers/sso/utils/oidc-url-patterns.util.js';
|
||||
import {
|
||||
createAccordionLayout,
|
||||
createLabeledControl,
|
||||
@@ -21,6 +23,7 @@ import { SettingSlice } from '@app/unraid-api/types/json-forms.js';
|
||||
|
||||
export interface OidcConfig {
|
||||
providers: OidcProvider[];
|
||||
defaultAllowedOrigins?: string[];
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
@@ -28,7 +31,10 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
constructor(
|
||||
configService: ConfigService,
|
||||
private readonly userSettings: UserSettingsService,
|
||||
private readonly validationService: OidcValidationService
|
||||
private readonly validationService: OidcValidationService,
|
||||
@Optional()
|
||||
@Inject(forwardRef(() => OidcClientConfigService))
|
||||
private readonly clientConfigService?: OidcClientConfigService
|
||||
) {
|
||||
super(configService);
|
||||
this.registerSettings();
|
||||
@@ -52,6 +58,7 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
defaultConfig(): OidcConfig {
|
||||
return {
|
||||
providers: [this.getUnraidNetSsoProvider()],
|
||||
defaultAllowedOrigins: [],
|
||||
};
|
||||
}
|
||||
|
||||
@@ -93,6 +100,7 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
|
||||
return {
|
||||
providers: [unraidNetSsoProvider],
|
||||
defaultAllowedOrigins: [],
|
||||
};
|
||||
}
|
||||
|
||||
@@ -119,6 +127,42 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
provider.authorizationRules || currentDefaults.authorizationRules,
|
||||
};
|
||||
}
|
||||
|
||||
// Fix dangerous authorization rules for non-unraid.net providers
|
||||
if (provider.authorizationRules && provider.authorizationRules.length > 0) {
|
||||
// Filter out dangerous rules that would allow all emails
|
||||
const safeRules = provider.authorizationRules.filter((rule) => {
|
||||
// Remove rules that have "email endsWith @" which allows all emails
|
||||
if (
|
||||
rule.claim === 'email' &&
|
||||
rule.operator === AuthorizationOperator.ENDS_WITH &&
|
||||
rule.value &&
|
||||
rule.value.length === 1 &&
|
||||
rule.value[0] === '@'
|
||||
) {
|
||||
this.logger.warn(
|
||||
`Removing dangerous authorization rule from provider "${provider.name}": email endsWith "@" allows all emails`
|
||||
);
|
||||
return false;
|
||||
}
|
||||
// Remove rules with empty or invalid values
|
||||
if (
|
||||
!rule.value ||
|
||||
rule.value.length === 0 ||
|
||||
rule.value.every((v) => !v || !v.trim())
|
||||
) {
|
||||
this.logger.warn(
|
||||
`Removing invalid authorization rule from provider "${provider.name}": empty values`
|
||||
);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
|
||||
// Update provider with safe rules
|
||||
provider.authorizationRules = safeRules;
|
||||
}
|
||||
|
||||
return provider;
|
||||
});
|
||||
|
||||
@@ -155,6 +199,34 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
provider.authorizationRules = rules;
|
||||
}
|
||||
|
||||
// Skip providers without authorization rules (they will be ignored)
|
||||
if (!provider.authorizationRules || provider.authorizationRules.length === 0) {
|
||||
this.logger.warn(
|
||||
`Provider "${provider.name}" has no authorization rules and will be ignored. Configure authorization rules to enable this provider.`
|
||||
);
|
||||
}
|
||||
|
||||
// Validate each rule has valid values (only if rules exist)
|
||||
if (provider.authorizationRules && provider.authorizationRules.length > 0) {
|
||||
for (const rule of provider.authorizationRules) {
|
||||
if (!rule.claim || !rule.claim.trim()) {
|
||||
throw new Error(
|
||||
`Provider "${provider.name}": Authorization rule claim cannot be empty`
|
||||
);
|
||||
}
|
||||
if (!rule.operator) {
|
||||
throw new Error(
|
||||
`Provider "${provider.name}": Authorization rule operator is required`
|
||||
);
|
||||
}
|
||||
if (!rule.value || rule.value.length === 0 || rule.value.every((v) => !v || !v.trim())) {
|
||||
throw new Error(
|
||||
`Provider "${provider.name}": Authorization rule for claim "${rule.claim}" must have at least one non-empty value`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up the provider object - remove UI-only fields
|
||||
const cleanedProvider: OidcProvider = {
|
||||
id: provider.id,
|
||||
@@ -184,6 +256,15 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
this.configService.set(this.configKey(), newConfig);
|
||||
await this.persist(newConfig);
|
||||
|
||||
// Clear the OIDC client configuration cache when a provider is updated
|
||||
// This ensures the new issuer/endpoints are used immediately
|
||||
if (this.clientConfigService) {
|
||||
this.clientConfigService.clearCache(cleanedProvider.id);
|
||||
this.logger.debug(
|
||||
`Cleared OIDC client configuration cache for provider ${cleanedProvider.id}`
|
||||
);
|
||||
}
|
||||
|
||||
return cleanedProvider;
|
||||
}
|
||||
|
||||
@@ -191,46 +272,52 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
allowedDomains?: string[];
|
||||
allowedEmails?: string[];
|
||||
allowedUserIds?: string[];
|
||||
googleWorkspaceDomain?: string;
|
||||
}): OidcAuthorizationRule[] {
|
||||
const rules: OidcAuthorizationRule[] = [];
|
||||
|
||||
// Convert email domains to endsWith rules
|
||||
// Only add if domains are provided AND not empty AND have non-empty values
|
||||
if (simpleAuth?.allowedDomains && simpleAuth.allowedDomains.length > 0) {
|
||||
rules.push({
|
||||
claim: 'email',
|
||||
operator: AuthorizationOperator.ENDS_WITH,
|
||||
value: simpleAuth.allowedDomains.map((domain: string) =>
|
||||
domain.startsWith('@') ? domain : `@${domain}`
|
||||
),
|
||||
});
|
||||
const validDomains = simpleAuth.allowedDomains.filter(
|
||||
(domain: string) => domain && domain.trim()
|
||||
);
|
||||
if (validDomains.length > 0) {
|
||||
rules.push({
|
||||
claim: 'email',
|
||||
operator: AuthorizationOperator.ENDS_WITH,
|
||||
value: validDomains.map((domain: string) =>
|
||||
domain.startsWith('@') ? domain : `@${domain}`
|
||||
),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Convert specific emails to equals rules
|
||||
// Only add if emails are provided AND not empty AND have non-empty values
|
||||
if (simpleAuth?.allowedEmails && simpleAuth.allowedEmails.length > 0) {
|
||||
rules.push({
|
||||
claim: 'email',
|
||||
operator: AuthorizationOperator.EQUALS,
|
||||
value: simpleAuth.allowedEmails,
|
||||
});
|
||||
const validEmails = simpleAuth.allowedEmails.filter(
|
||||
(email: string) => email && email.trim()
|
||||
);
|
||||
if (validEmails.length > 0) {
|
||||
rules.push({
|
||||
claim: 'email',
|
||||
operator: AuthorizationOperator.EQUALS,
|
||||
value: validEmails,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Convert user IDs to sub equals rules
|
||||
// Only add if user IDs are provided AND not empty AND have non-empty values
|
||||
if (simpleAuth?.allowedUserIds && simpleAuth.allowedUserIds.length > 0) {
|
||||
rules.push({
|
||||
claim: 'sub',
|
||||
operator: AuthorizationOperator.EQUALS,
|
||||
value: simpleAuth.allowedUserIds,
|
||||
});
|
||||
}
|
||||
|
||||
// Google Workspace domain (hd claim)
|
||||
if (simpleAuth?.googleWorkspaceDomain) {
|
||||
rules.push({
|
||||
claim: 'hd',
|
||||
operator: AuthorizationOperator.EQUALS,
|
||||
value: [simpleAuth.googleWorkspaceDomain],
|
||||
});
|
||||
const validUserIds = simpleAuth.allowedUserIds.filter((id: string) => id && id.trim());
|
||||
if (validUserIds.length > 0) {
|
||||
rules.push({
|
||||
claim: 'sub',
|
||||
operator: AuthorizationOperator.EQUALS,
|
||||
value: validUserIds,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return rules;
|
||||
@@ -254,6 +341,12 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
this.configService.set(this.configKey(), newConfig);
|
||||
await this.persist(newConfig);
|
||||
|
||||
// Clear the cache for the deleted provider
|
||||
if (this.clientConfigService) {
|
||||
this.clientConfigService.clearCache(id);
|
||||
this.logger.debug(`Cleared OIDC client configuration cache for deleted provider ${id}`);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -286,7 +379,6 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
allowedDomains?: string[];
|
||||
allowedEmails?: string[];
|
||||
allowedUserIds?: string[];
|
||||
googleWorkspaceDomain?: string;
|
||||
}
|
||||
);
|
||||
// Return provider with generated rules, removing UI-only fields
|
||||
@@ -304,6 +396,39 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
}),
|
||||
};
|
||||
|
||||
// Validate authorization rules for providers that have them
|
||||
for (const provider of processedConfig.providers) {
|
||||
if (!provider.authorizationRules || provider.authorizationRules.length === 0) {
|
||||
this.logger.warn(
|
||||
`Provider "${provider.name}" has no authorization rules and will be ignored. Configure authorization rules to enable this provider.`
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Validate each rule has valid values
|
||||
for (const rule of provider.authorizationRules) {
|
||||
if (!rule.claim || !rule.claim.trim()) {
|
||||
throw new Error(
|
||||
`Provider "${provider.name}": Authorization rule claim cannot be empty`
|
||||
);
|
||||
}
|
||||
if (!rule.operator) {
|
||||
throw new Error(
|
||||
`Provider "${provider.name}": Authorization rule operator is required`
|
||||
);
|
||||
}
|
||||
if (
|
||||
!rule.value ||
|
||||
rule.value.length === 0 ||
|
||||
rule.value.every((v) => !v || !v.trim())
|
||||
) {
|
||||
throw new Error(
|
||||
`Provider "${provider.name}": Authorization rule for claim "${rule.claim}" must have at least one non-empty value`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Validate OIDC discovery for all providers with issuer URLs
|
||||
const validationErrors: string[] = [];
|
||||
for (const provider of processedConfig.providers) {
|
||||
@@ -334,6 +459,13 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
this.configService.set(this.configKey(), processedConfig);
|
||||
await this.persist(processedConfig);
|
||||
|
||||
// Clear the OIDC client configuration cache to ensure fresh discovery
|
||||
// This fixes the issue where changing issuer URLs requires API restart
|
||||
if (this.clientConfigService) {
|
||||
this.clientConfigService.clearCache();
|
||||
this.logger.debug('Cleared OIDC client configuration cache after provider update');
|
||||
}
|
||||
|
||||
// Include validation results in response
|
||||
const response: { restartRequired: boolean; values: OidcConfig; warnings?: string[] } = {
|
||||
restartRequired: false,
|
||||
@@ -419,10 +551,6 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
if (rule.claim === 'sub' && rule.operator === AuthorizationOperator.EQUALS) {
|
||||
return true;
|
||||
}
|
||||
// Google Workspace domain
|
||||
if (rule.claim === 'hd' && rule.operator === AuthorizationOperator.EQUALS) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
});
|
||||
}
|
||||
@@ -431,13 +559,11 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
allowedDomains: string[];
|
||||
allowedEmails: string[];
|
||||
allowedUserIds: string[];
|
||||
googleWorkspaceDomain?: string;
|
||||
} {
|
||||
const simpleAuth = {
|
||||
allowedDomains: [] as string[],
|
||||
allowedEmails: [] as string[],
|
||||
allowedUserIds: [] as string[],
|
||||
googleWorkspaceDomain: undefined as string | undefined,
|
||||
};
|
||||
|
||||
rules.forEach((rule) => {
|
||||
@@ -449,12 +575,6 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
simpleAuth.allowedEmails = rule.value;
|
||||
} else if (rule.claim === 'sub' && rule.operator === AuthorizationOperator.EQUALS) {
|
||||
simpleAuth.allowedUserIds = rule.value;
|
||||
} else if (
|
||||
rule.claim === 'hd' &&
|
||||
rule.operator === AuthorizationOperator.EQUALS &&
|
||||
rule.value.length > 0
|
||||
) {
|
||||
simpleAuth.googleWorkspaceDomain = rule.value[0];
|
||||
}
|
||||
});
|
||||
|
||||
@@ -462,7 +582,36 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
}
|
||||
|
||||
private buildSlice(): SettingSlice {
|
||||
return mergeSettingSlices([this.oidcProvidersSlice()], { as: 'sso' });
|
||||
const providersSlice = this.oidcProvidersSlice();
|
||||
|
||||
// Add defaultAllowedOrigins to the properties
|
||||
providersSlice.properties.defaultAllowedOrigins = {
|
||||
type: 'array',
|
||||
items: { type: 'string' },
|
||||
title: 'Default Allowed Redirect Origins',
|
||||
default: [],
|
||||
description:
|
||||
'Additional trusted redirect origins to allow redirects from custom ports, reverse proxies, Tailscale, etc.',
|
||||
};
|
||||
|
||||
// Add the control for defaultAllowedOrigins before the providers control using UnraidSettingsLayout
|
||||
if (providersSlice.elements?.[0]?.elements) {
|
||||
providersSlice.elements[0].elements.unshift(
|
||||
createLabeledControl({
|
||||
scope: '#/properties/sso/properties/defaultAllowedOrigins',
|
||||
label: 'Allowed OIDC Redirect Origins',
|
||||
description:
|
||||
'Add trusted origins for OIDC redirection. These are URLs that the OIDC provider can redirect to after authentication when accessing Unraid through custom ports, reverse proxies, or Tailscale. Each origin should include the protocol and optionally a port (e.g., https://unraid.local:8443)',
|
||||
controlOptions: {
|
||||
format: 'array',
|
||||
inputType: 'text',
|
||||
placeholder: 'https://unraid.local:8443',
|
||||
},
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
return mergeSettingSlices([providersSlice], { as: 'sso' });
|
||||
}
|
||||
|
||||
private oidcProvidersSlice(): SettingSlice {
|
||||
@@ -498,7 +647,22 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
type: 'string',
|
||||
title: 'Issuer URL',
|
||||
format: 'uri',
|
||||
description: 'OIDC issuer URL (e.g., https://accounts.google.com)',
|
||||
allOf: [
|
||||
{
|
||||
pattern: OidcUrlPatterns.ISSUER_URL_PATTERN,
|
||||
errorMessage:
|
||||
'Must be a valid HTTP or HTTPS URL without trailing slashes or whitespace',
|
||||
},
|
||||
{
|
||||
not: {
|
||||
pattern: '\\.well-known',
|
||||
},
|
||||
errorMessage:
|
||||
'Cannot contain /.well-known/ paths. Use the base issuer URL instead (e.g., https://accounts.google.com instead of https://accounts.google.com/.well-known/openid-configuration)',
|
||||
},
|
||||
],
|
||||
description:
|
||||
'OIDC issuer URL (e.g., https://accounts.google.com). Cannot contain /.well-known/ paths - use the base issuer URL instead of the full discovery endpoint. Must not end with a trailing slash.',
|
||||
},
|
||||
authorizationEndpoint: {
|
||||
anyOf: [
|
||||
@@ -999,7 +1163,7 @@ export class OidcConfigPersistence extends ConfigFilePersister<OidcConfig> {
|
||||
scope: '#/properties/claim',
|
||||
label: 'JWT Claim:',
|
||||
description:
|
||||
'JWT claim to check (e.g., email, sub, groups, hd for Google hosted domain)',
|
||||
'JWT claim to check (e.g., email, sub, groups)',
|
||||
controlOptions: {
|
||||
inputType: 'text',
|
||||
placeholder: 'email',
|
||||
@@ -0,0 +1,14 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
|
||||
import { OidcAuthModule } from '@app/unraid-api/graph/resolvers/sso/auth/oidc-auth.module.js';
|
||||
import { OidcClientModule } from '@app/unraid-api/graph/resolvers/sso/client/oidc-client.module.js';
|
||||
import { OidcBaseModule } from '@app/unraid-api/graph/resolvers/sso/core/oidc-base.module.js';
|
||||
import { OidcService } from '@app/unraid-api/graph/resolvers/sso/core/oidc.service.js';
|
||||
import { OidcSessionModule } from '@app/unraid-api/graph/resolvers/sso/session/oidc-session.module.js';
|
||||
|
||||
@Module({
|
||||
imports: [OidcBaseModule, OidcSessionModule, OidcAuthModule, OidcClientModule],
|
||||
providers: [OidcService],
|
||||
exports: [OidcService, OidcBaseModule, OidcSessionModule, OidcAuthModule, OidcClientModule],
|
||||
})
|
||||
export class OidcCoreModule {}
|
||||
@@ -0,0 +1,160 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
|
||||
import * as client from 'openid-client';
|
||||
|
||||
import { OidcProvider } from '@app/unraid-api/graph/resolvers/sso/models/oidc-provider.model.js';
|
||||
import { OidcErrorHelper } from '@app/unraid-api/graph/resolvers/sso/utils/oidc-error.helper.js';
|
||||
|
||||
@Injectable()
|
||||
export class OidcValidationService {
|
||||
private readonly logger = new Logger(OidcValidationService.name);
|
||||
|
||||
constructor(private readonly configService: ConfigService) {}
|
||||
|
||||
/**
|
||||
* Validate OIDC provider configuration by attempting discovery
|
||||
* Returns validation result with helpful error messages for debugging
|
||||
*/
|
||||
async validateProvider(
|
||||
provider: OidcProvider
|
||||
): Promise<{ isValid: boolean; error?: string; details?: unknown }> {
|
||||
try {
|
||||
// Validate issuer URL is present
|
||||
if (!provider.issuer) {
|
||||
return {
|
||||
isValid: false,
|
||||
error: 'No issuer URL provided. Please specify the OIDC provider issuer URL.',
|
||||
details: { type: 'MISSING_ISSUER' },
|
||||
};
|
||||
}
|
||||
|
||||
// Validate issuer URL is valid
|
||||
let serverUrl: URL;
|
||||
try {
|
||||
serverUrl = new URL(provider.issuer);
|
||||
} catch (urlError) {
|
||||
return {
|
||||
isValid: false,
|
||||
error: `Invalid issuer URL format: '${provider.issuer}'. Please provide a valid URL.`,
|
||||
details: {
|
||||
type: 'INVALID_URL',
|
||||
originalError: urlError instanceof Error ? urlError.message : String(urlError),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Configure client options for HTTP if needed
|
||||
let clientOptions: any = undefined;
|
||||
if (serverUrl.protocol === 'http:') {
|
||||
this.logger.warn(
|
||||
`HTTP issuer URL detected for provider ${provider.id}: ${provider.issuer} - This is insecure`
|
||||
);
|
||||
clientOptions = {
|
||||
execute: [client.allowInsecureRequests],
|
||||
};
|
||||
}
|
||||
|
||||
// Attempt OIDC discovery
|
||||
await this.performDiscovery(provider, clientOptions);
|
||||
return { isValid: true };
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||
|
||||
// Log the raw error for debugging
|
||||
this.logger.log(`Raw discovery error for ${provider.id}: ${errorMessage}`);
|
||||
|
||||
// Use the helper to parse the error
|
||||
const { userFriendlyError, details } = OidcErrorHelper.parseDiscoveryError(
|
||||
error,
|
||||
provider.issuer
|
||||
);
|
||||
|
||||
this.logger.error(`Validation failed for provider ${provider.id}: ${errorMessage}`);
|
||||
|
||||
// Add debug logging for HTTP status errors
|
||||
if (errorMessage.includes('unexpected HTTP response status code')) {
|
||||
const baseUrl = provider.issuer?.endsWith('/.well-known/openid-configuration')
|
||||
? provider.issuer.replace('/.well-known/openid-configuration', '')
|
||||
: provider.issuer;
|
||||
this.logger.log(`Attempted to fetch: ${baseUrl}/.well-known/openid-configuration`);
|
||||
this.logger.error(`Full error details: ${errorMessage}`);
|
||||
}
|
||||
|
||||
return {
|
||||
isValid: false,
|
||||
error: userFriendlyError,
|
||||
details,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
async performDiscovery(provider: OidcProvider, clientOptions?: any): Promise<client.Configuration> {
|
||||
if (!provider.issuer) {
|
||||
throw new Error('No issuer URL provided');
|
||||
}
|
||||
|
||||
// Configure client auth method
|
||||
const clientAuth = provider.clientSecret
|
||||
? client.ClientSecretPost(provider.clientSecret)
|
||||
: undefined;
|
||||
|
||||
const serverUrl = new URL(provider.issuer);
|
||||
const discoveryUrl = `${provider.issuer}/.well-known/openid-configuration`;
|
||||
|
||||
this.logger.log(`Starting discovery for provider ${provider.id}`);
|
||||
this.logger.log(`Discovery URL: ${discoveryUrl}`);
|
||||
this.logger.log(`Client ID: ${provider.clientId}`);
|
||||
this.logger.log(`Client secret configured: ${provider.clientSecret ? 'Yes' : 'No'}`);
|
||||
|
||||
// Use provided client options or create default options with HTTP support if needed
|
||||
if (!clientOptions && serverUrl.protocol === 'http:') {
|
||||
this.logger.warn(
|
||||
`Allowing HTTP for ${provider.id} - This is insecure and should only be used for testing`
|
||||
);
|
||||
// For openid-client v6, use allowInsecureRequests in the execute array
|
||||
// This is deprecated but needed for local development with HTTP endpoints
|
||||
clientOptions = {
|
||||
execute: [client.allowInsecureRequests],
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
const config = await client.discovery(
|
||||
serverUrl,
|
||||
provider.clientId,
|
||||
undefined, // client metadata
|
||||
clientAuth,
|
||||
clientOptions
|
||||
);
|
||||
|
||||
this.logger.log(`Discovery successful for ${provider.id}`);
|
||||
this.logger.log(`Discovery response metadata:`);
|
||||
this.logger.log(` - issuer: ${config.serverMetadata().issuer}`);
|
||||
this.logger.log(
|
||||
` - authorization_endpoint: ${config.serverMetadata().authorization_endpoint}`
|
||||
);
|
||||
this.logger.log(` - token_endpoint: ${config.serverMetadata().token_endpoint}`);
|
||||
this.logger.log(
|
||||
` - userinfo_endpoint: ${config.serverMetadata().userinfo_endpoint || 'not provided'}`
|
||||
);
|
||||
this.logger.log(` - jwks_uri: ${config.serverMetadata().jwks_uri || 'not provided'}`);
|
||||
this.logger.log(
|
||||
` - response_types_supported: ${config.serverMetadata().response_types_supported?.join(', ') || 'not provided'}`
|
||||
);
|
||||
this.logger.log(
|
||||
` - scopes_supported: ${config.serverMetadata().scopes_supported?.join(', ') || 'not provided'}`
|
||||
);
|
||||
|
||||
return config;
|
||||
} catch (discoveryError) {
|
||||
this.logger.error(`Discovery failed for ${provider.id} at ${discoveryUrl}`);
|
||||
|
||||
if (discoveryError instanceof Error) {
|
||||
this.logger.error('Discovery error: %o', discoveryError);
|
||||
}
|
||||
|
||||
throw discoveryError;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,485 @@
|
||||
import { Logger } from '@nestjs/common';
|
||||
import { ConfigModule, ConfigService } from '@nestjs/config';
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
|
||||
import * as client from 'openid-client';
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { OidcAuthorizationService } from '@app/unraid-api/graph/resolvers/sso/auth/oidc-authorization.service.js';
|
||||
import { OidcClaimsService } from '@app/unraid-api/graph/resolvers/sso/auth/oidc-claims.service.js';
|
||||
import { OidcTokenExchangeService } from '@app/unraid-api/graph/resolvers/sso/auth/oidc-token-exchange.service.js';
|
||||
import { OidcClientConfigService } from '@app/unraid-api/graph/resolvers/sso/client/oidc-client-config.service.js';
|
||||
import { OidcRedirectUriService } from '@app/unraid-api/graph/resolvers/sso/client/oidc-redirect-uri.service.js';
|
||||
import { OidcConfigPersistence } from '@app/unraid-api/graph/resolvers/sso/core/oidc-config.service.js';
|
||||
import { OidcValidationService } from '@app/unraid-api/graph/resolvers/sso/core/oidc-validation.service.js';
|
||||
import { OidcService } from '@app/unraid-api/graph/resolvers/sso/core/oidc.service.js';
|
||||
import { OidcProvider } from '@app/unraid-api/graph/resolvers/sso/models/oidc-provider.model.js';
|
||||
import { OidcSessionService } from '@app/unraid-api/graph/resolvers/sso/session/oidc-session.service.js';
|
||||
import { OidcStateService } from '@app/unraid-api/graph/resolvers/sso/session/oidc-state.service.js';
|
||||
|
||||
describe('OidcService Integration Tests - Enhanced Logging', () => {
|
||||
let service: OidcService;
|
||||
let configPersistence: OidcConfigPersistence;
|
||||
let loggerSpy: any;
|
||||
let debugLogs: string[] = [];
|
||||
let errorLogs: string[] = [];
|
||||
let warnLogs: string[] = [];
|
||||
let logLogs: string[] = [];
|
||||
|
||||
beforeEach(async () => {
|
||||
// Clear log arrays
|
||||
debugLogs = [];
|
||||
errorLogs = [];
|
||||
warnLogs = [];
|
||||
logLogs = [];
|
||||
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
imports: [
|
||||
ConfigModule.forRoot({
|
||||
isGlobal: true,
|
||||
load: [() => ({ BASE_URL: 'http://test.local' })],
|
||||
}),
|
||||
],
|
||||
providers: [
|
||||
OidcService,
|
||||
OidcValidationService,
|
||||
OidcClientConfigService,
|
||||
OidcTokenExchangeService,
|
||||
{
|
||||
provide: OidcAuthorizationService,
|
||||
useValue: {
|
||||
checkAuthorization: vi.fn(),
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: OidcConfigPersistence,
|
||||
useValue: {
|
||||
getProvider: vi.fn(),
|
||||
saveProvider: vi.fn(),
|
||||
getConfig: vi.fn().mockReturnValue({
|
||||
providers: [],
|
||||
defaultAllowedOrigins: [],
|
||||
}),
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: OidcSessionService,
|
||||
useValue: {
|
||||
createSession: vi.fn().mockResolvedValue('mock-token'),
|
||||
validateSession: vi.fn(),
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: OidcStateService,
|
||||
useValue: {
|
||||
generateSecureState: vi.fn().mockResolvedValue('secure-state'),
|
||||
validateSecureState: vi.fn().mockResolvedValue({
|
||||
isValid: true,
|
||||
clientState: 'test-state',
|
||||
redirectUri: 'https://myapp.example.com/graphql/api/auth/oidc/callback',
|
||||
}),
|
||||
extractProviderFromState: vi.fn().mockReturnValue('test-provider'),
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: OidcRedirectUriService,
|
||||
useValue: {
|
||||
getRedirectUri: vi
|
||||
.fn()
|
||||
.mockResolvedValue(
|
||||
'https://myapp.example.com/graphql/api/auth/oidc/callback'
|
||||
),
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: OidcClaimsService,
|
||||
useValue: {
|
||||
parseIdToken: vi.fn().mockReturnValue({
|
||||
sub: 'user123',
|
||||
email: 'user@example.com',
|
||||
}),
|
||||
validateClaims: vi.fn().mockReturnValue('user123'),
|
||||
},
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
service = module.get<OidcService>(OidcService);
|
||||
configPersistence = module.get<OidcConfigPersistence>(OidcConfigPersistence);
|
||||
|
||||
// Spy on logger methods to capture logs
|
||||
loggerSpy = {
|
||||
debug: vi
|
||||
.spyOn(Logger.prototype, 'debug')
|
||||
.mockImplementation((message: string, ...args: any[]) => {
|
||||
debugLogs.push(message);
|
||||
}),
|
||||
error: vi
|
||||
.spyOn(Logger.prototype, 'error')
|
||||
.mockImplementation((message: string, ...args: any[]) => {
|
||||
errorLogs.push(message);
|
||||
}),
|
||||
warn: vi
|
||||
.spyOn(Logger.prototype, 'warn')
|
||||
.mockImplementation((message: string, ...args: any[]) => {
|
||||
warnLogs.push(message);
|
||||
}),
|
||||
log: vi
|
||||
.spyOn(Logger.prototype, 'log')
|
||||
.mockImplementation((message: string, ...args: any[]) => {
|
||||
logLogs.push(message);
|
||||
}),
|
||||
verbose: vi.spyOn(Logger.prototype, 'verbose').mockImplementation(() => {}),
|
||||
};
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
describe('Token Exchange Error Logging', () => {
|
||||
it('should log detailed error information when token exchange fails with Google (trailing slash issue)', async () => {
|
||||
// This simulates the issue from #1616 where a trailing slash causes failure
|
||||
const provider: OidcProvider = {
|
||||
id: 'google-test',
|
||||
name: 'Google Test',
|
||||
issuer: 'https://accounts.google.com/', // Trailing slash will cause issue
|
||||
clientId: 'test-client-id',
|
||||
clientSecret: 'test-client-secret',
|
||||
scopes: ['openid', 'email', 'profile'],
|
||||
authorizationRules: [
|
||||
{
|
||||
claim: 'email',
|
||||
operator: 'ENDS_WITH' as any,
|
||||
value: ['@example.com'],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
vi.mocked(configPersistence.getProvider).mockResolvedValue(provider);
|
||||
|
||||
try {
|
||||
await service.handleCallback({
|
||||
providerId: 'google-test',
|
||||
code: 'test-code',
|
||||
state: 'test-state',
|
||||
requestOrigin: 'http://test.local',
|
||||
fullCallbackUrl:
|
||||
'http://test.local/graphql/api/auth/oidc/callback?code=test-code&state=test-state',
|
||||
requestHeaders: { host: 'test.local' },
|
||||
});
|
||||
} catch (error) {
|
||||
// We expect this to fail
|
||||
}
|
||||
|
||||
// Verify that the service attempted to handle the callback
|
||||
// Note: Detailed token exchange logging now happens in OidcTokenExchangeService
|
||||
expect(errorLogs.length).toBeGreaterThan(0);
|
||||
// Changed logging format to use error extractor
|
||||
expect(errorLogs.some((log) => log.includes('Token exchange failed'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should log discovery failure details with invalid issuer URL', async () => {
|
||||
const provider: OidcProvider = {
|
||||
id: 'invalid-issuer',
|
||||
name: 'Invalid Issuer Test',
|
||||
issuer: 'https://invalid-oidc-provider.example.com', // Non-existent domain
|
||||
clientId: 'test-client-id',
|
||||
clientSecret: 'test-client-secret',
|
||||
scopes: ['openid', 'email'],
|
||||
authorizationRules: [],
|
||||
};
|
||||
|
||||
const validationService = new OidcValidationService(new ConfigService());
|
||||
const result = await validationService.validateProvider(provider);
|
||||
|
||||
expect(result.isValid).toBe(false);
|
||||
// Should now have more specific error message
|
||||
expect(result.error).toBeDefined();
|
||||
// The error should mention the domain cannot be resolved or connection failed
|
||||
expect(result.error).toMatch(
|
||||
/Cannot resolve domain name|Failed to connect to OIDC provider/
|
||||
);
|
||||
expect(result.details).toBeDefined();
|
||||
expect(result.details).toHaveProperty('type');
|
||||
// Should be either DNS_ERROR or FETCH_ERROR depending on the cause
|
||||
expect(['DNS_ERROR', 'FETCH_ERROR']).toContain((result.details as any).type);
|
||||
});
|
||||
|
||||
it('should log detailed HTTP error responses from discovery', async () => {
|
||||
const provider: OidcProvider = {
|
||||
id: 'http-error-test',
|
||||
name: 'HTTP Error Test',
|
||||
issuer: 'https://httpstat.us/500', // Returns 500 error
|
||||
clientId: 'test-client-id',
|
||||
clientSecret: 'test-client-secret',
|
||||
scopes: ['openid'],
|
||||
authorizationRules: [],
|
||||
};
|
||||
|
||||
vi.mocked(configPersistence.getProvider).mockResolvedValue(provider);
|
||||
|
||||
try {
|
||||
await service.validateProvider(provider);
|
||||
} catch (error) {
|
||||
// Expected to fail
|
||||
}
|
||||
|
||||
// Check that HTTP status details are logged (now in log level)
|
||||
expect(logLogs.some((log) => log.includes('Discovery URL:'))).toBe(true);
|
||||
expect(logLogs.some((log) => log.includes('Client ID:'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should log authorization URL building details', async () => {
|
||||
const provider: OidcProvider = {
|
||||
id: 'auth-url-test',
|
||||
name: 'Auth URL Test',
|
||||
issuer: 'https://accounts.google.com',
|
||||
clientId: 'test-client-id',
|
||||
clientSecret: 'test-client-secret',
|
||||
scopes: ['openid', 'email', 'profile'],
|
||||
authorizationRules: [],
|
||||
};
|
||||
|
||||
vi.mocked(configPersistence.getProvider).mockResolvedValue(provider);
|
||||
|
||||
try {
|
||||
await service.getAuthorizationUrl({
|
||||
providerId: 'auth-url-test',
|
||||
state: 'test-state',
|
||||
requestOrigin: 'http://test.local',
|
||||
requestHeaders: { host: 'test.local' },
|
||||
});
|
||||
|
||||
// Verify URL building logs
|
||||
expect(logLogs.some((log) => log.includes('Built authorization URL'))).toBe(true);
|
||||
expect(logLogs.some((log) => log.includes('Authorization parameters:'))).toBe(true);
|
||||
} catch (error) {
|
||||
// May fail due to real discovery, but we're interested in the logs
|
||||
}
|
||||
});
|
||||
|
||||
it('should log detailed information for manual endpoint configuration', async () => {
|
||||
const provider: OidcProvider = {
|
||||
id: 'manual-endpoints',
|
||||
name: 'Manual Endpoints Test',
|
||||
issuer: undefined,
|
||||
authorizationEndpoint: 'https://auth.example.com/authorize',
|
||||
tokenEndpoint: 'https://auth.example.com/token',
|
||||
clientId: 'test-client-id',
|
||||
clientSecret: 'test-client-secret',
|
||||
scopes: ['openid'],
|
||||
authorizationRules: [],
|
||||
};
|
||||
|
||||
vi.mocked(configPersistence.getProvider).mockResolvedValue(provider);
|
||||
|
||||
const authUrl = await service.getAuthorizationUrl({
|
||||
providerId: 'manual-endpoints',
|
||||
state: 'test-state',
|
||||
requestOrigin: 'http://test.local',
|
||||
requestHeaders: {
|
||||
'x-forwarded-host': 'test.local',
|
||||
'x-forwarded-proto': 'http',
|
||||
},
|
||||
});
|
||||
|
||||
// Verify manual endpoint logs
|
||||
expect(debugLogs.some((log) => log.includes('Built authorization URL'))).toBe(true);
|
||||
expect(debugLogs.some((log) => log.includes('client_id=test-client-id'))).toBe(true);
|
||||
expect(authUrl).toContain('https://auth.example.com/authorize');
|
||||
});
|
||||
|
||||
it('should log JWT claim validation failures with detailed context', async () => {
|
||||
const provider: OidcProvider = {
|
||||
id: 'jwt-validation-test',
|
||||
name: 'JWT Validation Test',
|
||||
issuer: 'https://accounts.google.com',
|
||||
clientId: 'test-client-id',
|
||||
clientSecret: 'test-client-secret',
|
||||
scopes: ['openid', 'email'],
|
||||
authorizationRules: [
|
||||
{
|
||||
claim: 'email',
|
||||
operator: 'ENDS_WITH' as any,
|
||||
value: ['@restricted.com'],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
vi.mocked(configPersistence.getProvider).mockResolvedValue(provider);
|
||||
|
||||
// Mock a scenario where JWT validation fails
|
||||
try {
|
||||
await service.handleCallback({
|
||||
providerId: 'jwt-validation-test',
|
||||
code: 'test-code',
|
||||
state: 'test-state',
|
||||
requestOrigin: 'http://test.local',
|
||||
fullCallbackUrl:
|
||||
'http://test.local/graphql/api/auth/oidc/callback?code=test-code&state=test-state',
|
||||
requestHeaders: { host: 'test.local' },
|
||||
});
|
||||
} catch (error) {
|
||||
// Expected to fail
|
||||
}
|
||||
|
||||
// The JWT error handling is now in OidcTokenExchangeService
|
||||
// We should see some error logged
|
||||
expect(errorLogs.length).toBeGreaterThan(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Discovery Endpoint Logging', () => {
|
||||
it('should log all discovery metadata when successful', async () => {
|
||||
// Use a real OIDC provider that works
|
||||
const provider: OidcProvider = {
|
||||
id: 'microsoft',
|
||||
name: 'Microsoft',
|
||||
issuer: 'https://login.microsoftonline.com/common/v2.0',
|
||||
clientId: 'test-client-id',
|
||||
clientSecret: 'test-client-secret',
|
||||
scopes: ['openid', 'email', 'profile'],
|
||||
authorizationRules: [],
|
||||
};
|
||||
|
||||
const validationService = new OidcValidationService(new ConfigService());
|
||||
|
||||
try {
|
||||
await validationService.performDiscovery(provider);
|
||||
} catch (error) {
|
||||
// May fail due to network, but we're checking logs
|
||||
}
|
||||
|
||||
// Verify discovery logging (now in log level)
|
||||
expect(logLogs.some((log) => log.includes('Starting discovery'))).toBe(true);
|
||||
expect(logLogs.some((log) => log.includes('Discovery URL:'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should log discovery failures with malformed JSON response', async () => {
|
||||
const provider: OidcProvider = {
|
||||
id: 'malformed-json',
|
||||
name: 'Malformed JSON Test',
|
||||
issuer: 'https://example.com/malformed',
|
||||
clientId: 'test-client-id',
|
||||
clientSecret: 'test-client-secret',
|
||||
scopes: ['openid'],
|
||||
authorizationRules: [],
|
||||
};
|
||||
|
||||
// Mock global fetch to return HTML instead of JSON
|
||||
const originalFetch = global.fetch;
|
||||
global.fetch = vi.fn().mockImplementation(() =>
|
||||
Promise.resolve(
|
||||
new Response('<html><body>Not JSON</body></html>', {
|
||||
status: 200,
|
||||
headers: { 'content-type': 'text/html' },
|
||||
})
|
||||
)
|
||||
);
|
||||
|
||||
const validationService = new OidcValidationService(new ConfigService());
|
||||
const result = await validationService.validateProvider(provider);
|
||||
|
||||
// Restore original fetch
|
||||
global.fetch = originalFetch;
|
||||
|
||||
expect(result.isValid).toBe(false);
|
||||
expect(result.error).toBeDefined();
|
||||
// The openid-client library will fail when it gets HTML instead of JSON
|
||||
// It returns "unexpected response content-type" error
|
||||
expect(result.error).toMatch(
|
||||
/Invalid OIDC discovery|malformed|doesn't conform|unexpected|content-type/i
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle and log HTTP vs HTTPS protocol differences', async () => {
|
||||
const httpProvider: OidcProvider = {
|
||||
id: 'http-local',
|
||||
name: 'HTTP Local Test',
|
||||
issuer: 'http://localhost:8080', // HTTP endpoint
|
||||
clientId: 'test-client-id',
|
||||
clientSecret: 'test-client-secret',
|
||||
scopes: ['openid'],
|
||||
authorizationRules: [],
|
||||
};
|
||||
|
||||
// Create a validation service and spy on its logger
|
||||
const validationService = new OidcValidationService(new ConfigService());
|
||||
|
||||
try {
|
||||
await validationService.validateProvider(httpProvider);
|
||||
} catch (error) {
|
||||
// Expected to fail if localhost:8080 isn't running
|
||||
}
|
||||
|
||||
// The HTTP logging happens in the validation service
|
||||
// We should check that HTTP issuers are detected
|
||||
expect(httpProvider.issuer).toMatch(/^http:/);
|
||||
// Verify that we're testing an HTTP endpoint
|
||||
expect(httpProvider.issuer).toBe('http://localhost:8080');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Request/Response Detail Logging', () => {
|
||||
it('should log complete request parameters for token exchange', async () => {
|
||||
const provider: OidcProvider = {
|
||||
id: 'token-params-test',
|
||||
name: 'Token Params Test',
|
||||
issuer: 'https://accounts.google.com',
|
||||
clientId: 'detailed-client-id',
|
||||
clientSecret: 'detailed-client-secret',
|
||||
scopes: ['openid', 'email', 'profile', 'offline_access'],
|
||||
authorizationRules: [],
|
||||
};
|
||||
|
||||
vi.mocked(configPersistence.getProvider).mockResolvedValue(provider);
|
||||
|
||||
try {
|
||||
await service.handleCallback({
|
||||
providerId: 'token-params-test',
|
||||
code: 'authorization-code-12345',
|
||||
state: 'state-with-signature',
|
||||
requestOrigin: 'https://myapp.example.com',
|
||||
fullCallbackUrl:
|
||||
'https://myapp.example.com/graphql/api/auth/oidc/callback?code=authorization-code-12345&state=state-with-signature&scope=openid+email+profile',
|
||||
requestHeaders: { host: 'myapp.example.com' },
|
||||
});
|
||||
} catch (error) {
|
||||
// Expected to fail
|
||||
}
|
||||
|
||||
// Verify that we attempted the operation
|
||||
// Detailed parameter logging is now in OidcTokenExchangeService
|
||||
expect(debugLogs.length).toBeGreaterThan(0);
|
||||
expect(debugLogs.some((log) => log.includes('Client ID: detailed-client-id'))).toBe(true);
|
||||
expect(debugLogs.some((log) => log.includes('Client secret configured: Yes'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should capture and log all error properties from openid-client', async () => {
|
||||
const provider: OidcProvider = {
|
||||
id: 'error-properties-test',
|
||||
name: 'Error Properties Test',
|
||||
issuer: 'https://expired-cert.badssl.com/', // SSL cert error
|
||||
clientId: 'test-client-id',
|
||||
clientSecret: 'test-client-secret',
|
||||
scopes: ['openid'],
|
||||
authorizationRules: [],
|
||||
};
|
||||
|
||||
const validationService = new OidcValidationService(new ConfigService());
|
||||
const result = await validationService.validateProvider(provider);
|
||||
|
||||
expect(result.isValid).toBe(false);
|
||||
expect(result.error).toBeDefined();
|
||||
// Should detect SSL/certificate issues or connection failure
|
||||
expect(result.error).toMatch(
|
||||
/SSL\/TLS certificate error|Failed to connect to OIDC provider|certificate/
|
||||
);
|
||||
expect(result.details).toBeDefined();
|
||||
expect(result.details).toHaveProperty('type');
|
||||
// Should be either SSL_ERROR or FETCH_ERROR
|
||||
expect(['SSL_ERROR', 'FETCH_ERROR']).toContain((result.details as any).type);
|
||||
});
|
||||
});
|
||||
});
|
||||
381
api/src/unraid-api/graph/resolvers/sso/core/oidc.service.test.ts
Normal file
381
api/src/unraid-api/graph/resolvers/sso/core/oidc.service.test.ts
Normal file
@@ -0,0 +1,381 @@
|
||||
import { CacheModule } from '@nestjs/cache-manager';
|
||||
import { UnauthorizedException } from '@nestjs/common';
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { OidcAuthorizationService } from '@app/unraid-api/graph/resolvers/sso/auth/oidc-authorization.service.js';
|
||||
import { OidcClaimsService } from '@app/unraid-api/graph/resolvers/sso/auth/oidc-claims.service.js';
|
||||
import { OidcTokenExchangeService } from '@app/unraid-api/graph/resolvers/sso/auth/oidc-token-exchange.service.js';
|
||||
import { OidcClientConfigService } from '@app/unraid-api/graph/resolvers/sso/client/oidc-client-config.service.js';
|
||||
import { OidcRedirectUriService } from '@app/unraid-api/graph/resolvers/sso/client/oidc-redirect-uri.service.js';
|
||||
import { OidcConfigPersistence } from '@app/unraid-api/graph/resolvers/sso/core/oidc-config.service.js';
|
||||
import { OidcValidationService } from '@app/unraid-api/graph/resolvers/sso/core/oidc-validation.service.js';
|
||||
import { OidcService } from '@app/unraid-api/graph/resolvers/sso/core/oidc.service.js';
|
||||
import { OidcProvider } from '@app/unraid-api/graph/resolvers/sso/models/oidc-provider.model.js';
|
||||
import { OidcSessionService } from '@app/unraid-api/graph/resolvers/sso/session/oidc-session.service.js';
|
||||
import { OidcStateService } from '@app/unraid-api/graph/resolvers/sso/session/oidc-state.service.js';
|
||||
|
||||
// Mock openid-client
|
||||
vi.mock('openid-client', () => ({
|
||||
buildAuthorizationUrl: vi.fn((config, params) => {
|
||||
const url = new URL(config.serverMetadata().authorization_endpoint);
|
||||
Object.entries(params).forEach(([key, value]) => {
|
||||
if (value !== undefined) {
|
||||
url.searchParams.set(key, String(value));
|
||||
}
|
||||
});
|
||||
return url;
|
||||
}),
|
||||
allowInsecureRequests: vi.fn(),
|
||||
}));
|
||||
|
||||
describe('OidcService Integration', () => {
|
||||
let service: OidcService;
|
||||
let oidcConfig: any;
|
||||
let sessionService: any;
|
||||
let stateService: OidcStateService;
|
||||
let redirectUriService: any;
|
||||
let clientConfigService: any;
|
||||
let tokenExchangeService: any;
|
||||
let claimsService: any;
|
||||
let authorizationService: any;
|
||||
|
||||
beforeEach(async () => {
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
imports: [CacheModule.register()],
|
||||
providers: [
|
||||
OidcService,
|
||||
{
|
||||
provide: OidcConfigPersistence,
|
||||
useValue: {
|
||||
getProvider: vi.fn(),
|
||||
getConfig: vi.fn().mockResolvedValue({
|
||||
providers: [],
|
||||
defaultAllowedOrigins: ['https://example.com'],
|
||||
}),
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: OidcSessionService,
|
||||
useValue: {
|
||||
createSession: vi.fn().mockResolvedValue('padded-token-123'),
|
||||
},
|
||||
},
|
||||
OidcStateService,
|
||||
{
|
||||
provide: OidcValidationService,
|
||||
useValue: {
|
||||
validateProvider: vi.fn().mockResolvedValue({ isValid: true }),
|
||||
performDiscovery: vi.fn(),
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: OidcAuthorizationService,
|
||||
useValue: {
|
||||
checkAuthorization: vi.fn(),
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: OidcRedirectUriService,
|
||||
useValue: {
|
||||
getRedirectUri: vi.fn().mockResolvedValue('https://example.com/callback'),
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: OidcClientConfigService,
|
||||
useValue: {
|
||||
getOrCreateConfig: vi.fn(),
|
||||
clearCache: vi.fn(),
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: OidcTokenExchangeService,
|
||||
useValue: {
|
||||
exchangeCodeForTokens: vi.fn(),
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: OidcClaimsService,
|
||||
useValue: {
|
||||
parseIdToken: vi.fn(),
|
||||
validateClaims: vi.fn(),
|
||||
},
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
service = module.get<OidcService>(OidcService);
|
||||
oidcConfig = module.get(OidcConfigPersistence);
|
||||
sessionService = module.get(OidcSessionService);
|
||||
stateService = module.get<OidcStateService>(OidcStateService);
|
||||
redirectUriService = module.get(OidcRedirectUriService);
|
||||
clientConfigService = module.get(OidcClientConfigService);
|
||||
tokenExchangeService = module.get(OidcTokenExchangeService);
|
||||
claimsService = module.get(OidcClaimsService);
|
||||
authorizationService = module.get(OidcAuthorizationService);
|
||||
});
|
||||
|
||||
describe('getAuthorizationUrl', () => {
|
||||
it('should generate authorization URL with custom endpoints', async () => {
|
||||
const provider: OidcProvider = {
|
||||
id: 'custom-provider',
|
||||
name: 'Custom Provider',
|
||||
clientId: 'test-client-id',
|
||||
clientSecret: 'test-secret',
|
||||
authorizationEndpoint: 'https://custom.example.com/auth',
|
||||
scopes: ['openid', 'profile'],
|
||||
authorizationRules: [],
|
||||
};
|
||||
|
||||
oidcConfig.getProvider.mockResolvedValue(provider);
|
||||
|
||||
const params = {
|
||||
providerId: 'custom-provider',
|
||||
state: 'client-state-123',
|
||||
requestOrigin: 'https://example.com',
|
||||
requestHeaders: { host: 'example.com' },
|
||||
};
|
||||
|
||||
const url = await service.getAuthorizationUrl(params);
|
||||
|
||||
expect(redirectUriService.getRedirectUri).toHaveBeenCalledWith('https://example.com', {
|
||||
host: 'example.com',
|
||||
});
|
||||
|
||||
const urlObj = new URL(url);
|
||||
expect(urlObj.origin).toBe('https://custom.example.com');
|
||||
expect(urlObj.pathname).toBe('/auth');
|
||||
expect(urlObj.searchParams.get('client_id')).toBe('test-client-id');
|
||||
expect(urlObj.searchParams.get('redirect_uri')).toBe('https://example.com/callback');
|
||||
expect(urlObj.searchParams.get('scope')).toBe('openid profile');
|
||||
expect(urlObj.searchParams.get('response_type')).toBe('code');
|
||||
expect(urlObj.searchParams.has('state')).toBe(true);
|
||||
});
|
||||
|
||||
it('should use OIDC discovery when no custom authorization endpoint', async () => {
|
||||
const provider: OidcProvider = {
|
||||
id: 'discovery-provider',
|
||||
name: 'Discovery Provider',
|
||||
clientId: 'test-client-id',
|
||||
issuer: 'https://discovery.example.com',
|
||||
scopes: ['openid'],
|
||||
authorizationRules: [],
|
||||
};
|
||||
|
||||
// Create a mock configuration object
|
||||
const mockConfig = {
|
||||
serverMetadata: vi.fn().mockReturnValue({
|
||||
authorization_endpoint: 'https://discovery.example.com/authorize',
|
||||
}),
|
||||
};
|
||||
|
||||
oidcConfig.getProvider.mockResolvedValue(provider);
|
||||
clientConfigService.getOrCreateConfig.mockResolvedValue(mockConfig);
|
||||
|
||||
const params = {
|
||||
providerId: 'discovery-provider',
|
||||
state: 'client-state-123',
|
||||
requestOrigin: 'https://example.com',
|
||||
requestHeaders: {},
|
||||
};
|
||||
|
||||
const url = await service.getAuthorizationUrl(params);
|
||||
|
||||
expect(clientConfigService.getOrCreateConfig).toHaveBeenCalledWith(provider);
|
||||
expect(url).toContain('https://discovery.example.com/authorize');
|
||||
});
|
||||
|
||||
it('should throw when provider not found', async () => {
|
||||
oidcConfig.getProvider.mockResolvedValue(null);
|
||||
|
||||
const params = {
|
||||
providerId: 'non-existent',
|
||||
state: 'state',
|
||||
requestOrigin: 'https://example.com',
|
||||
requestHeaders: {},
|
||||
};
|
||||
|
||||
await expect(service.getAuthorizationUrl(params)).rejects.toThrow(UnauthorizedException);
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleCallback', () => {
|
||||
it('should handle successful callback flow', async () => {
|
||||
const provider: OidcProvider = {
|
||||
id: 'test-provider',
|
||||
name: 'Test Provider',
|
||||
clientId: 'test-client-id',
|
||||
issuer: 'https://test.example.com',
|
||||
scopes: ['openid'],
|
||||
authorizationRules: [],
|
||||
};
|
||||
|
||||
const mockConfig = {
|
||||
serverMetadata: vi.fn().mockReturnValue({
|
||||
issuer: 'https://test.example.com',
|
||||
token_endpoint: 'https://test.example.com/token',
|
||||
}),
|
||||
};
|
||||
|
||||
const mockTokens = {
|
||||
id_token: 'id.token.here',
|
||||
access_token: 'access.token.here',
|
||||
};
|
||||
|
||||
const mockClaims = {
|
||||
sub: 'user123',
|
||||
email: 'user@example.com',
|
||||
};
|
||||
|
||||
oidcConfig.getProvider.mockResolvedValue(provider);
|
||||
clientConfigService.getOrCreateConfig.mockResolvedValue(mockConfig);
|
||||
tokenExchangeService.exchangeCodeForTokens.mockResolvedValue(mockTokens);
|
||||
claimsService.parseIdToken.mockReturnValue(mockClaims);
|
||||
claimsService.validateClaims.mockReturnValue('user123');
|
||||
|
||||
// Mock the OidcStateExtractor's static method
|
||||
const OidcStateExtractor = await import(
|
||||
'@app/unraid-api/graph/resolvers/sso/session/oidc-state-extractor.util.js'
|
||||
);
|
||||
vi.spyOn(OidcStateExtractor.OidcStateExtractor, 'extractAndValidateState').mockResolvedValue(
|
||||
{
|
||||
providerId: 'test-provider',
|
||||
originalState: 'original-state',
|
||||
clientState: 'original-state',
|
||||
redirectUri: 'https://example.com/callback',
|
||||
}
|
||||
);
|
||||
|
||||
const params = {
|
||||
providerId: 'test-provider',
|
||||
code: 'auth-code-123',
|
||||
state: 'secure-state',
|
||||
requestOrigin: 'https://example.com',
|
||||
fullCallbackUrl: 'https://example.com/callback?code=auth-code-123&state=secure-state',
|
||||
requestHeaders: {},
|
||||
};
|
||||
|
||||
const token = await service.handleCallback(params);
|
||||
|
||||
expect(token).toBe('padded-token-123');
|
||||
expect(tokenExchangeService.exchangeCodeForTokens).toHaveBeenCalled();
|
||||
expect(claimsService.parseIdToken).toHaveBeenCalledWith('id.token.here');
|
||||
expect(claimsService.validateClaims).toHaveBeenCalledWith(mockClaims);
|
||||
expect(authorizationService.checkAuthorization).toHaveBeenCalledWith(provider, mockClaims);
|
||||
expect(sessionService.createSession).toHaveBeenCalledWith('test-provider', 'user123');
|
||||
});
|
||||
|
||||
it('should throw when provider not found', async () => {
|
||||
oidcConfig.getProvider.mockResolvedValue(null);
|
||||
|
||||
const params = {
|
||||
providerId: 'non-existent',
|
||||
code: 'code',
|
||||
state: 'state',
|
||||
requestOrigin: 'https://example.com',
|
||||
fullCallbackUrl: 'https://example.com/callback',
|
||||
requestHeaders: {},
|
||||
};
|
||||
|
||||
await expect(service.handleCallback(params)).rejects.toThrow(UnauthorizedException);
|
||||
});
|
||||
|
||||
it('should handle authorization rejection', async () => {
|
||||
const provider: OidcProvider = {
|
||||
id: 'test-provider',
|
||||
name: 'Test Provider',
|
||||
clientId: 'test-client-id',
|
||||
issuer: 'https://test.example.com',
|
||||
scopes: ['openid'],
|
||||
authorizationRules: [],
|
||||
};
|
||||
|
||||
const mockConfig = {
|
||||
serverMetadata: vi.fn().mockReturnValue({
|
||||
issuer: 'https://test.example.com',
|
||||
token_endpoint: 'https://test.example.com/token',
|
||||
}),
|
||||
};
|
||||
|
||||
const mockTokens = {
|
||||
id_token: 'id.token.here',
|
||||
};
|
||||
|
||||
const mockClaims = {
|
||||
sub: 'user123',
|
||||
email: 'user@example.com',
|
||||
};
|
||||
|
||||
oidcConfig.getProvider.mockResolvedValue(provider);
|
||||
clientConfigService.getOrCreateConfig.mockResolvedValue(mockConfig);
|
||||
tokenExchangeService.exchangeCodeForTokens.mockResolvedValue(mockTokens);
|
||||
claimsService.parseIdToken.mockReturnValue(mockClaims);
|
||||
claimsService.validateClaims.mockReturnValue('user123');
|
||||
authorizationService.checkAuthorization.mockRejectedValue(
|
||||
new UnauthorizedException('Not authorized')
|
||||
);
|
||||
|
||||
// Mock the OidcStateExtractor's static method
|
||||
const OidcStateExtractor = await import(
|
||||
'@app/unraid-api/graph/resolvers/sso/session/oidc-state-extractor.util.js'
|
||||
);
|
||||
vi.spyOn(OidcStateExtractor.OidcStateExtractor, 'extractAndValidateState').mockResolvedValue(
|
||||
{
|
||||
providerId: 'test-provider',
|
||||
originalState: 'original-state',
|
||||
clientState: 'original-state',
|
||||
redirectUri: 'https://example.com/callback',
|
||||
}
|
||||
);
|
||||
|
||||
const params = {
|
||||
providerId: 'test-provider',
|
||||
code: 'auth-code-123',
|
||||
state: 'secure-state',
|
||||
requestOrigin: 'https://example.com',
|
||||
fullCallbackUrl: 'https://example.com/callback',
|
||||
requestHeaders: {},
|
||||
};
|
||||
|
||||
await expect(service.handleCallback(params)).rejects.toThrow(UnauthorizedException);
|
||||
});
|
||||
});
|
||||
|
||||
describe('validateProvider', () => {
|
||||
it('should clear cache and validate provider', async () => {
|
||||
const provider: OidcProvider = {
|
||||
id: 'test-provider',
|
||||
name: 'Test Provider',
|
||||
clientId: 'test-client-id',
|
||||
issuer: 'https://test.example.com',
|
||||
scopes: ['openid'],
|
||||
authorizationRules: [],
|
||||
};
|
||||
|
||||
const result = await service.validateProvider(provider);
|
||||
|
||||
expect(clientConfigService.clearCache).toHaveBeenCalledWith('test-provider');
|
||||
// The validation service mock already returns { isValid: true }
|
||||
expect(result).toEqual({ isValid: true });
|
||||
});
|
||||
});
|
||||
|
||||
describe('extractProviderFromState', () => {
|
||||
it('should extract provider from state', () => {
|
||||
const state = 'provider-id:original-state';
|
||||
|
||||
const result = service.extractProviderFromState(state);
|
||||
|
||||
expect(result.providerId).toBeDefined();
|
||||
expect(result.originalState).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getStateService', () => {
|
||||
it('should return state service', () => {
|
||||
const result = service.getStateService();
|
||||
expect(result).toBe(stateService);
|
||||
});
|
||||
});
|
||||
});
|
||||
243
api/src/unraid-api/graph/resolvers/sso/core/oidc.service.ts
Normal file
243
api/src/unraid-api/graph/resolvers/sso/core/oidc.service.ts
Normal file
@@ -0,0 +1,243 @@
|
||||
import { Injectable, Logger, UnauthorizedException } from '@nestjs/common';
|
||||
|
||||
import * as client from 'openid-client';
|
||||
|
||||
import { OidcAuthorizationService } from '@app/unraid-api/graph/resolvers/sso/auth/oidc-authorization.service.js';
|
||||
import { OidcClaimsService } from '@app/unraid-api/graph/resolvers/sso/auth/oidc-claims.service.js';
|
||||
import { OidcTokenExchangeService } from '@app/unraid-api/graph/resolvers/sso/auth/oidc-token-exchange.service.js';
|
||||
import { OidcClientConfigService } from '@app/unraid-api/graph/resolvers/sso/client/oidc-client-config.service.js';
|
||||
import { OidcRedirectUriService } from '@app/unraid-api/graph/resolvers/sso/client/oidc-redirect-uri.service.js';
|
||||
import { OidcConfigPersistence } from '@app/unraid-api/graph/resolvers/sso/core/oidc-config.service.js';
|
||||
import { OidcValidationService } from '@app/unraid-api/graph/resolvers/sso/core/oidc-validation.service.js';
|
||||
import { OidcProvider } from '@app/unraid-api/graph/resolvers/sso/models/oidc-provider.model.js';
|
||||
import { OidcSessionService } from '@app/unraid-api/graph/resolvers/sso/session/oidc-session.service.js';
|
||||
import { OidcStateExtractor } from '@app/unraid-api/graph/resolvers/sso/session/oidc-state-extractor.util.js';
|
||||
import { OidcStateService } from '@app/unraid-api/graph/resolvers/sso/session/oidc-state.service.js';
|
||||
import { ErrorExtractor } from '@app/unraid-api/utils/error-extractor.util.js';
|
||||
|
||||
export interface GetAuthorizationUrlParams {
|
||||
providerId: string;
|
||||
state: string;
|
||||
requestOrigin: string;
|
||||
requestHeaders: Record<string, string | string[] | undefined>;
|
||||
}
|
||||
|
||||
export interface HandleCallbackParams {
|
||||
providerId: string;
|
||||
code: string;
|
||||
state: string;
|
||||
requestOrigin: string;
|
||||
fullCallbackUrl: string;
|
||||
requestHeaders: Record<string, string | string[] | undefined>;
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class OidcService {
|
||||
private readonly logger = new Logger(OidcService.name);
|
||||
|
||||
constructor(
|
||||
private readonly oidcConfig: OidcConfigPersistence,
|
||||
private readonly sessionService: OidcSessionService,
|
||||
private readonly stateService: OidcStateService,
|
||||
private readonly validationService: OidcValidationService,
|
||||
private readonly authorizationService: OidcAuthorizationService,
|
||||
private readonly redirectUriService: OidcRedirectUriService,
|
||||
private readonly clientConfigService: OidcClientConfigService,
|
||||
private readonly tokenExchangeService: OidcTokenExchangeService,
|
||||
private readonly claimsService: OidcClaimsService
|
||||
) {}
|
||||
|
||||
async getAuthorizationUrl(params: GetAuthorizationUrlParams): Promise<string> {
|
||||
const { providerId, state, requestOrigin, requestHeaders } = params;
|
||||
|
||||
const provider = await this.oidcConfig.getProvider(providerId);
|
||||
if (!provider) {
|
||||
throw new UnauthorizedException(`Provider ${providerId} not found`);
|
||||
}
|
||||
|
||||
// Use requestOrigin with validation
|
||||
const redirectUri = await this.redirectUriService.getRedirectUri(requestOrigin, requestHeaders);
|
||||
|
||||
this.logger.debug(`Using redirect URI for authorization: ${redirectUri}`);
|
||||
this.logger.debug(`Request origin was: ${requestOrigin}`);
|
||||
|
||||
// Generate secure state with cryptographic signature, including redirect URI
|
||||
const secureState = await this.stateService.generateSecureState(providerId, state, redirectUri);
|
||||
|
||||
// Build authorization URL
|
||||
if (provider.authorizationEndpoint) {
|
||||
// Use custom authorization endpoint
|
||||
const authUrl = new URL(provider.authorizationEndpoint);
|
||||
|
||||
// Standard OAuth2 parameters
|
||||
authUrl.searchParams.set('client_id', provider.clientId);
|
||||
authUrl.searchParams.set('redirect_uri', redirectUri);
|
||||
authUrl.searchParams.set('scope', provider.scopes.join(' '));
|
||||
authUrl.searchParams.set('state', secureState);
|
||||
authUrl.searchParams.set('response_type', 'code');
|
||||
|
||||
this.logger.debug(`Built authorization URL for provider ${provider.id}`);
|
||||
this.logger.debug(
|
||||
`Authorization parameters: client_id=${provider.clientId}, redirect_uri=${redirectUri}, scope=${provider.scopes.join(' ')}, response_type=code`
|
||||
);
|
||||
|
||||
return authUrl.href;
|
||||
}
|
||||
|
||||
// Use OIDC discovery for providers without custom endpoints
|
||||
const config = await this.clientConfigService.getOrCreateConfig(provider);
|
||||
const parameters: Record<string, string> = {
|
||||
redirect_uri: redirectUri,
|
||||
scope: provider.scopes.join(' '),
|
||||
state: secureState,
|
||||
response_type: 'code',
|
||||
};
|
||||
|
||||
// For HTTP endpoints, we need to call allowInsecureRequests on the config
|
||||
if (provider.issuer) {
|
||||
try {
|
||||
const serverUrl = new URL(provider.issuer);
|
||||
if (serverUrl.protocol === 'http:') {
|
||||
this.logger.debug(`Allowing insecure requests for HTTP endpoint: ${provider.id}`);
|
||||
// allowInsecureRequests is deprecated but still needed for HTTP endpoints
|
||||
client.allowInsecureRequests(config);
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.warn(`Invalid issuer URL for provider ${provider.id}: ${provider.issuer}`);
|
||||
// Continue without special HTTP options
|
||||
}
|
||||
}
|
||||
|
||||
const authUrl = client.buildAuthorizationUrl(config, parameters);
|
||||
|
||||
this.logger.log(`Built authorization URL via discovery for provider ${provider.id}`);
|
||||
this.logger.log(`Authorization parameters: ${JSON.stringify(parameters)}`);
|
||||
|
||||
return authUrl.href;
|
||||
}
|
||||
|
||||
extractProviderFromState(state: string): { providerId: string; originalState: string } {
|
||||
return OidcStateExtractor.extractProviderFromState(state, this.stateService);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the state service for external utilities
|
||||
*/
|
||||
getStateService(): OidcStateService {
|
||||
return this.stateService;
|
||||
}
|
||||
|
||||
async handleCallback(params: HandleCallbackParams): Promise<string> {
|
||||
const { providerId, code, state, fullCallbackUrl } = params;
|
||||
|
||||
const provider = await this.oidcConfig.getProvider(providerId);
|
||||
if (!provider) {
|
||||
throw new UnauthorizedException(`Provider ${providerId} not found`);
|
||||
}
|
||||
|
||||
// Extract and validate state, including the stored redirect URI
|
||||
const stateInfo = await OidcStateExtractor.extractAndValidateState(state, this.stateService);
|
||||
if (!stateInfo.redirectUri) {
|
||||
throw new UnauthorizedException('Missing redirect URI in state');
|
||||
}
|
||||
|
||||
// Use the redirect URI that was stored during authorization
|
||||
const redirectUri = stateInfo.redirectUri;
|
||||
this.logger.debug(`Using stored redirect URI from state: ${redirectUri}`);
|
||||
|
||||
try {
|
||||
// Always use openid-client for consistency
|
||||
const config = await this.clientConfigService.getOrCreateConfig(provider);
|
||||
|
||||
// Log configuration details
|
||||
this.logger.debug(`Provider ${providerId} config loaded`);
|
||||
this.logger.debug(`Redirect URI: ${redirectUri}`);
|
||||
|
||||
// Build current URL for token exchange
|
||||
// CRITICAL: The URL used here MUST match the redirect_uri that was sent to the authorization endpoint
|
||||
// Google expects the exact same redirect_uri during token exchange
|
||||
const currentUrl = new URL(redirectUri);
|
||||
currentUrl.searchParams.set('code', code);
|
||||
currentUrl.searchParams.set('state', state);
|
||||
|
||||
// Copy additional parameters from the actual callback if provided
|
||||
if (fullCallbackUrl) {
|
||||
const actualUrl = new URL(fullCallbackUrl);
|
||||
// Copy over additional params that Google might have added (scope, authuser, prompt, etc)
|
||||
// but DO NOT change the base URL or path
|
||||
['scope', 'authuser', 'prompt', 'hd', 'session_state', 'iss'].forEach((param) => {
|
||||
const value = actualUrl.searchParams.get(param);
|
||||
if (value && !currentUrl.searchParams.has(param)) {
|
||||
currentUrl.searchParams.set(param, value);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Google returns iss in the response, openid-client v6 expects it
|
||||
// If not present, add it based on the provider's issuer
|
||||
if (!currentUrl.searchParams.has('iss') && provider.issuer) {
|
||||
currentUrl.searchParams.set('iss', provider.issuer);
|
||||
}
|
||||
|
||||
this.logger.debug(`Token exchange URL (matches redirect_uri): ${currentUrl.href}`);
|
||||
|
||||
// State was already validated in extractAndValidateState above, use that result
|
||||
// The clientState should be present after successful validation, but handle the edge case
|
||||
if (!stateInfo.clientState) {
|
||||
this.logger.warn('Client state missing after successful validation');
|
||||
throw new UnauthorizedException('Invalid state: missing client state');
|
||||
}
|
||||
const originalState = stateInfo.clientState;
|
||||
this.logger.debug(`Exchanging code for tokens with provider ${providerId}`);
|
||||
this.logger.debug(`Client state extracted: ${originalState}`);
|
||||
|
||||
// Use the token exchange service
|
||||
const tokens = await this.tokenExchangeService.exchangeCodeForTokens(
|
||||
config,
|
||||
provider,
|
||||
code,
|
||||
originalState,
|
||||
redirectUri,
|
||||
fullCallbackUrl
|
||||
);
|
||||
|
||||
// Parse ID token to get user info
|
||||
const claims = this.claimsService.parseIdToken(tokens.id_token);
|
||||
const userSub = this.claimsService.validateClaims(claims);
|
||||
|
||||
// Check authorization based on rules
|
||||
// This will throw a helpful error if misconfigured or unauthorized
|
||||
await this.authorizationService.checkAuthorization(provider, claims!);
|
||||
|
||||
// Create session and return padded token
|
||||
const paddedToken = await this.sessionService.createSession(providerId, userSub);
|
||||
|
||||
this.logger.log(`Successfully authenticated user ${userSub} via provider ${providerId}`);
|
||||
|
||||
return paddedToken;
|
||||
} catch (error) {
|
||||
const extracted = ErrorExtractor.extract(error);
|
||||
this.logger.error(`OAuth callback error: ${extracted.message}`);
|
||||
// Re-throw the original error if it's already an UnauthorizedException
|
||||
if (error instanceof UnauthorizedException) {
|
||||
throw error;
|
||||
}
|
||||
// Otherwise throw a generic error
|
||||
throw new UnauthorizedException('Authentication failed');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate OIDC provider configuration by attempting discovery
|
||||
* Returns validation result with helpful error messages for debugging
|
||||
*/
|
||||
async validateProvider(
|
||||
provider: OidcProvider
|
||||
): Promise<{ isValid: boolean; error?: string; details?: unknown }> {
|
||||
// Clear any cached config for this provider to force fresh validation
|
||||
this.clientConfigService.clearCache(provider.id);
|
||||
|
||||
// Delegate to the validation service
|
||||
return this.validationService.validateProvider(provider);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
import { Field, ObjectType } from '@nestjs/graphql';
|
||||
|
||||
import { OidcProvider } from '@app/unraid-api/graph/resolvers/sso/models/oidc-provider.model.js';
|
||||
|
||||
@ObjectType()
|
||||
export class OidcConfiguration {
|
||||
@Field(() => [OidcProvider], { description: 'List of configured OIDC providers' })
|
||||
providers!: OidcProvider[];
|
||||
|
||||
@Field(() => [String], {
|
||||
nullable: true,
|
||||
description:
|
||||
'Default allowed redirect origins that apply to all OIDC providers (e.g., Tailscale domains)',
|
||||
})
|
||||
defaultAllowedOrigins?: string[];
|
||||
}
|
||||
@@ -80,9 +80,11 @@ export class OidcProvider {
|
||||
@Field(() => String, {
|
||||
description:
|
||||
'OIDC issuer URL (e.g., https://accounts.google.com). Required for auto-discovery via /.well-known/openid-configuration',
|
||||
nullable: true,
|
||||
})
|
||||
@IsUrl()
|
||||
issuer!: string;
|
||||
@IsOptional()
|
||||
issuer?: string;
|
||||
|
||||
@Field(() => String, {
|
||||
nullable: true,
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user