mirror of
https://github.com/unraid/api.git
synced 2026-01-02 14:40:01 -06:00
Compare commits
77 Commits
4.9.2-buil
...
v4.13.1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
58b5544bea | ||
|
|
a4ff3c4092 | ||
|
|
1e0a54d9ef | ||
|
|
096fe98710 | ||
|
|
57217852a3 | ||
|
|
979a267bc5 | ||
|
|
96c120f9b2 | ||
|
|
a2c5d2495f | ||
|
|
b3216874fa | ||
|
|
27dbfde845 | ||
|
|
1a25fedd23 | ||
|
|
ad6aa3b674 | ||
|
|
9c4e764c95 | ||
|
|
20c2d5b445 | ||
|
|
85a441b51d | ||
|
|
c9577e9bf2 | ||
|
|
18b5209087 | ||
|
|
ec8f4f38c8 | ||
|
|
db0e725107 | ||
|
|
5afca5ecba | ||
|
|
beab83b56e | ||
|
|
78997a02c6 | ||
|
|
3534d6fdd7 | ||
|
|
557b03f882 | ||
|
|
514a0ef560 | ||
|
|
dfe352dfa1 | ||
|
|
8005b8c3b6 | ||
|
|
d6fa102d06 | ||
|
|
52f22678e3 | ||
|
|
23ef760d76 | ||
|
|
6ea94f061d | ||
|
|
782d5ebadc | ||
|
|
dfe363bc37 | ||
|
|
6b3b951d82 | ||
|
|
5449e30eed | ||
|
|
dc12656f81 | ||
|
|
f14b74af91 | ||
|
|
e2fa648d1c | ||
|
|
3b00fec5fd | ||
|
|
4ff6a1aaa0 | ||
|
|
86b6c4f85b | ||
|
|
45bd73698b | ||
|
|
fee7d4613e | ||
|
|
b6acf50c0d | ||
|
|
8279531f2b | ||
|
|
0a18b38008 | ||
|
|
23b2b88461 | ||
|
|
f5352e3a26 | ||
|
|
9dfdb8dce7 | ||
|
|
407585cd40 | ||
|
|
05056e7ca1 | ||
|
|
a74d935b56 | ||
|
|
2c62e0ad09 | ||
|
|
1a8da6d92b | ||
|
|
81808ada0f | ||
|
|
eecd9b1017 | ||
|
|
441e1805c1 | ||
|
|
29dcb7d0f0 | ||
|
|
1a7d35d3f6 | ||
|
|
af33e999a0 | ||
|
|
85a35804c1 | ||
|
|
a35c8ff2f1 | ||
|
|
153e7a1e3a | ||
|
|
e73fc356cb | ||
|
|
e1a7a3d22d | ||
|
|
53b05ebe5e | ||
|
|
2ed1308e40 | ||
|
|
6c03df2b97 | ||
|
|
074370c42c | ||
|
|
f34a33bc9f | ||
|
|
c7801a9236 | ||
|
|
dd759d9f0f | ||
|
|
74da8d81ef | ||
|
|
33e0b1ab24 | ||
|
|
ca4e2db1f2 | ||
|
|
ea20d1e211 | ||
|
|
79c57b8ed0 |
123
.claude/settings.json
Normal file
123
.claude/settings.json
Normal file
@@ -0,0 +1,123 @@
|
||||
{
|
||||
"permissions": {
|
||||
"allow": [
|
||||
"# Development Commands",
|
||||
"Bash(pnpm install)",
|
||||
"Bash(pnpm dev)",
|
||||
"Bash(pnpm build)",
|
||||
"Bash(pnpm test)",
|
||||
"Bash(pnpm test:*)",
|
||||
"Bash(pnpm lint)",
|
||||
"Bash(pnpm lint:fix)",
|
||||
"Bash(pnpm type-check)",
|
||||
"Bash(pnpm codegen)",
|
||||
"Bash(pnpm storybook)",
|
||||
"Bash(pnpm --filter * dev)",
|
||||
"Bash(pnpm --filter * build)",
|
||||
"Bash(pnpm --filter * test)",
|
||||
"Bash(pnpm --filter * lint)",
|
||||
"Bash(pnpm --filter * codegen)",
|
||||
|
||||
"# Git Commands (read-only)",
|
||||
"Bash(git status)",
|
||||
"Bash(git diff)",
|
||||
"Bash(git log)",
|
||||
"Bash(git branch)",
|
||||
"Bash(git remote -v)",
|
||||
|
||||
"# Search Commands",
|
||||
"Bash(rg *)",
|
||||
|
||||
"# File System (read-only)",
|
||||
"Bash(ls)",
|
||||
"Bash(ls -la)",
|
||||
"Bash(pwd)",
|
||||
"Bash(find . -name)",
|
||||
"Bash(find . -type)",
|
||||
|
||||
"# Node/NPM Commands",
|
||||
"Bash(node --version)",
|
||||
"Bash(pnpm --version)",
|
||||
"Bash(npx --version)",
|
||||
|
||||
"# Environment Commands",
|
||||
"Bash(echo $*)",
|
||||
"Bash(which *)",
|
||||
|
||||
"# Process Commands",
|
||||
"Bash(ps aux | grep)",
|
||||
"Bash(lsof -i)",
|
||||
|
||||
"# Documentation Domains",
|
||||
"WebFetch(domain:tailwindcss.com)",
|
||||
"WebFetch(domain:github.com)",
|
||||
"WebFetch(domain:reka-ui.com)",
|
||||
"WebFetch(domain:nodejs.org)",
|
||||
"WebFetch(domain:pnpm.io)",
|
||||
"WebFetch(domain:vitejs.dev)",
|
||||
"WebFetch(domain:nuxt.com)",
|
||||
"WebFetch(domain:nestjs.com)",
|
||||
|
||||
"# IDE Integration",
|
||||
"mcp__ide__getDiagnostics",
|
||||
|
||||
"# Browser MCP (for testing)",
|
||||
"mcp__browsermcp__browser_navigate",
|
||||
"mcp__browsermcp__browser_click",
|
||||
"mcp__browsermcp__browser_screenshot"
|
||||
],
|
||||
"deny": [
|
||||
"# Dangerous Commands",
|
||||
"Bash(rm -rf)",
|
||||
"Bash(chmod 777)",
|
||||
"Bash(curl)",
|
||||
"Bash(wget)",
|
||||
"Bash(ssh)",
|
||||
"Bash(scp)",
|
||||
"Bash(sudo)",
|
||||
"Bash(su)",
|
||||
"Bash(pkill)",
|
||||
"Bash(kill)",
|
||||
"Bash(killall)",
|
||||
"Bash(python)",
|
||||
"Bash(python3)",
|
||||
"Bash(pip)",
|
||||
"Bash(npm)",
|
||||
"Bash(yarn)",
|
||||
"Bash(apt)",
|
||||
"Bash(brew)",
|
||||
"Bash(systemctl)",
|
||||
"Bash(service)",
|
||||
"Bash(docker)",
|
||||
"Bash(docker-compose)",
|
||||
|
||||
"# File Modification (use Edit/Write tools instead)",
|
||||
"Bash(sed)",
|
||||
"Bash(awk)",
|
||||
"Bash(perl)",
|
||||
"Bash(echo > *)",
|
||||
"Bash(echo >> *)",
|
||||
"Bash(cat > *)",
|
||||
"Bash(cat >> *)",
|
||||
"Bash(tee)",
|
||||
|
||||
"# Git Write Commands (require explicit user action)",
|
||||
"Bash(git add)",
|
||||
"Bash(git commit)",
|
||||
"Bash(git push)",
|
||||
"Bash(git pull)",
|
||||
"Bash(git merge)",
|
||||
"Bash(git rebase)",
|
||||
"Bash(git checkout)",
|
||||
"Bash(git reset)",
|
||||
"Bash(git clean)",
|
||||
|
||||
"# Package Management Write Commands",
|
||||
"Bash(pnpm add)",
|
||||
"Bash(pnpm remove)",
|
||||
"Bash(pnpm update)",
|
||||
"Bash(pnpm upgrade)"
|
||||
]
|
||||
},
|
||||
"enableAllProjectMcpServers": false
|
||||
}
|
||||
@@ -1,18 +0,0 @@
|
||||
{
|
||||
"permissions": {
|
||||
"allow": [
|
||||
"Bash(rg:*)",
|
||||
"Bash(find:*)",
|
||||
"Bash(pnpm codegen:*)",
|
||||
"Bash(pnpm dev:*)",
|
||||
"Bash(pnpm build:*)",
|
||||
"Bash(pnpm test:*)",
|
||||
"Bash(grep:*)",
|
||||
"Bash(pnpm type-check:*)",
|
||||
"Bash(pnpm lint:*)",
|
||||
"Bash(pnpm --filter ./api lint)",
|
||||
"Bash(mv:*)"
|
||||
]
|
||||
},
|
||||
"enableAllProjectMcpServers": false
|
||||
}
|
||||
@@ -10,4 +10,5 @@ alwaysApply: false
|
||||
* Test suite is VITEST, do not use jest
|
||||
pnpm --filter ./api test
|
||||
* Prefer to not mock simple dependencies
|
||||
* For error testing, use `.rejects.toThrow()` without arguments - don't test exact error message strings unless the message format is specifically what you're testing
|
||||
|
||||
|
||||
@@ -4,6 +4,10 @@ globs: **/*.test.ts,**/__test__/components/**/*.ts,**/__test__/store/**/*.ts,**/
|
||||
alwaysApply: false
|
||||
---
|
||||
|
||||
## General Testing Best Practices
|
||||
- **Error Testing:** Use `.rejects.toThrow()` without arguments to test that functions throw errors. Don't test exact error message strings unless the message format is specifically what you're testing
|
||||
- **Focus on Behavior:** Test what the code does, not implementation details like exact error message wording
|
||||
|
||||
## Vue Component Testing Best Practices
|
||||
- This is a Nuxt.js app but we are testing with vitest outside of the Nuxt environment
|
||||
- Nuxt is currently set to auto import so some vue files may need compute or ref imported
|
||||
|
||||
20
.github/CODEOWNERS
vendored
20
.github/CODEOWNERS
vendored
@@ -1,20 +0,0 @@
|
||||
# Default owners for everything in the repo
|
||||
* @elibosley @pujitm @mdatelle @zackspear
|
||||
|
||||
# API specific files
|
||||
/api/ @elibosley @pujitm @mdatelle
|
||||
|
||||
# Web frontend files
|
||||
/web/ @elibosley @mdatelle @zackspear
|
||||
|
||||
# Plugin related files
|
||||
/plugin/ @elibosley
|
||||
|
||||
# Unraid UI specific files
|
||||
/unraid-ui/ @mdatelle @zackspear @pujitm
|
||||
|
||||
# GitHub workflows and configuration
|
||||
/.github/ @elibosley
|
||||
|
||||
# Documentation
|
||||
*.md @elibosley @pujitm @mdatelle @zackspear
|
||||
6
.github/workflows/build-plugin.yml
vendored
6
.github/workflows/build-plugin.yml
vendored
@@ -88,19 +88,19 @@ jobs:
|
||||
pnpm install --frozen-lockfile --filter @unraid/connect-plugin
|
||||
|
||||
- name: Download Unraid UI Components
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
name: unraid-wc-ui
|
||||
path: ${{ github.workspace }}/plugin/source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components/uui
|
||||
merge-multiple: true
|
||||
- name: Download Unraid Web Components
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
pattern: unraid-wc-rich
|
||||
path: ${{ github.workspace }}/plugin/source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components/nuxt
|
||||
merge-multiple: true
|
||||
- name: Download Unraid API
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
name: unraid-api
|
||||
path: ${{ github.workspace }}/plugin/api/
|
||||
|
||||
103
.github/workflows/claude-code-review.yml
vendored
Normal file
103
.github/workflows/claude-code-review.yml
vendored
Normal file
@@ -0,0 +1,103 @@
|
||||
name: Claude Code Review
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize]
|
||||
# Skip reviews for non-code changes
|
||||
paths-ignore:
|
||||
- "**/*.md"
|
||||
- "**/package-lock.json"
|
||||
- "**/pnpm-lock.yaml"
|
||||
- "**/.gitignore"
|
||||
- "**/LICENSE"
|
||||
- "**/*.config.js"
|
||||
- "**/*.config.ts"
|
||||
- "**/tsconfig.json"
|
||||
- "**/.github/workflows/*.yml"
|
||||
- "**/docs/**"
|
||||
|
||||
jobs:
|
||||
claude-review:
|
||||
# Skip review for bot PRs and WIP/skip-review PRs
|
||||
# Only run if changes are significant (>10 lines)
|
||||
if: |
|
||||
(github.event.pull_request.additions > 10 || github.event.pull_request.deletions > 10) &&
|
||||
!contains(github.event.pull_request.title, '[skip-review]') &&
|
||||
!contains(github.event.pull_request.title, '[WIP]') &&
|
||||
!endsWith(github.event.pull_request.user.login, '[bot]') &&
|
||||
github.event.pull_request.user.login != 'dependabot' &&
|
||||
github.event.pull_request.user.login != 'renovate'
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: read
|
||||
issues: read
|
||||
id-token: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Run Claude Code Review
|
||||
id: claude-review
|
||||
uses: anthropics/claude-code-action@beta
|
||||
with:
|
||||
claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
|
||||
|
||||
# Optional: Specify model (defaults to Claude Sonnet 4, uncomment for Claude Opus 4)
|
||||
# model: "claude-opus-4-20250514"
|
||||
|
||||
# Direct prompt for automated review (no @claude mention needed)
|
||||
direct_prompt: |
|
||||
IMPORTANT: Review ONLY the DIFF/CHANGESET - the actual lines that were added or modified in this PR.
|
||||
DO NOT review the entire file context, only analyze the specific changes being made.
|
||||
|
||||
Look for HIGH-PRIORITY issues in the CHANGED LINES ONLY:
|
||||
|
||||
1. CRITICAL BUGS: Logic errors, null pointer issues, infinite loops, race conditions
|
||||
2. SECURITY: SQL injection, XSS, authentication bypass, exposed secrets, unsafe operations
|
||||
3. BREAKING CHANGES: API contract violations, removed exports, changed function signatures
|
||||
4. DATA LOSS RISKS: Destructive operations without safeguards, missing data validation
|
||||
|
||||
DO NOT comment on:
|
||||
- Code that wasn't changed in this PR
|
||||
- Style, formatting, or documentation
|
||||
- Test coverage (unless tests are broken by the changes)
|
||||
- Minor optimizations or best practices
|
||||
- Existing code issues that weren't introduced by this PR
|
||||
|
||||
If you find no critical issues in the DIFF, respond with: "✅ No critical issues found in changes"
|
||||
|
||||
Keep response under 10 lines. Reference specific line numbers from the diff when reporting issues.
|
||||
|
||||
# Optional: Use sticky comments to make Claude reuse the same comment on subsequent pushes to the same PR
|
||||
use_sticky_comment: true
|
||||
|
||||
# Context-aware review based on PR characteristics
|
||||
# Uncomment to enable different review strategies based on context
|
||||
# direct_prompt: |
|
||||
# ${{
|
||||
# (github.event.pull_request.additions > 500) &&
|
||||
# 'Large PR detected. Focus only on architectural issues and breaking changes. Skip minor issues.' ||
|
||||
# contains(github.event.pull_request.title, 'fix') &&
|
||||
# 'Bug fix PR: Verify the fix addresses the root cause and check for regression risks.' ||
|
||||
# contains(github.event.pull_request.title, 'deps') &&
|
||||
# 'Dependency update: Check for breaking changes and security advisories only.' ||
|
||||
# contains(github.event.pull_request.title, 'refactor') &&
|
||||
# 'Refactor PR: Verify no behavior changes and check for performance regressions.' ||
|
||||
# contains(github.event.pull_request.title, 'feat') &&
|
||||
# 'New feature: Check for security issues, edge cases, and integration problems only.' ||
|
||||
# 'Standard review: Check for critical bugs, security issues, and breaking changes only.'
|
||||
# }}
|
||||
|
||||
# Optional: Add specific tools for running tests or linting
|
||||
# allowed_tools: "Bash(npm run test),Bash(npm run lint),Bash(npm run typecheck)"
|
||||
|
||||
# Optional: Skip review for certain conditions
|
||||
# if: |
|
||||
# !contains(github.event.pull_request.title, '[skip-review]') &&
|
||||
# !contains(github.event.pull_request.title, '[WIP]')
|
||||
|
||||
64
.github/workflows/claude.yml
vendored
Normal file
64
.github/workflows/claude.yml
vendored
Normal file
@@ -0,0 +1,64 @@
|
||||
name: Claude Code
|
||||
|
||||
on:
|
||||
issue_comment:
|
||||
types: [created]
|
||||
pull_request_review_comment:
|
||||
types: [created]
|
||||
issues:
|
||||
types: [opened, assigned]
|
||||
pull_request_review:
|
||||
types: [submitted]
|
||||
|
||||
jobs:
|
||||
claude:
|
||||
if: |
|
||||
(github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude')) ||
|
||||
(github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude')) ||
|
||||
(github.event_name == 'pull_request_review' && contains(github.event.review.body, '@claude')) ||
|
||||
(github.event_name == 'issues' && (contains(github.event.issue.body, '@claude') || contains(github.event.issue.title, '@claude')))
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: read
|
||||
issues: read
|
||||
id-token: write
|
||||
actions: read # Required for Claude to read CI results on PRs
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Run Claude Code
|
||||
id: claude
|
||||
uses: anthropics/claude-code-action@beta
|
||||
with:
|
||||
claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
|
||||
|
||||
# This is an optional setting that allows Claude to read CI results on PRs
|
||||
additional_permissions: |
|
||||
actions: read
|
||||
|
||||
# Optional: Specify model (defaults to Claude Sonnet 4, uncomment for Claude Opus 4)
|
||||
# model: "claude-opus-4-20250514"
|
||||
|
||||
# Optional: Customize the trigger phrase (default: @claude)
|
||||
# trigger_phrase: "/claude"
|
||||
|
||||
# Optional: Trigger when specific user is assigned to an issue
|
||||
# assignee_trigger: "claude-bot"
|
||||
|
||||
# Optional: Allow Claude to run specific commands
|
||||
# allowed_tools: "Bash(npm install),Bash(npm run build),Bash(npm run test:*),Bash(npm run lint:*)"
|
||||
|
||||
# Optional: Add custom instructions for Claude to customize its behavior for your project
|
||||
# custom_instructions: |
|
||||
# Follow our coding standards
|
||||
# Ensure all new code has tests
|
||||
# Use TypeScript for new files
|
||||
|
||||
# Optional: Custom environment variables for Claude
|
||||
# claude_env: |
|
||||
# NODE_ENV: test
|
||||
|
||||
19
.github/workflows/create-docusaurus-pr.yml
vendored
19
.github/workflows/create-docusaurus-pr.yml
vendored
@@ -37,9 +37,26 @@ jobs:
|
||||
echo "Source directory does not exist!"
|
||||
exit 1
|
||||
fi
|
||||
# Remove old API docs but preserve other folders
|
||||
rm -rf docs-repo/docs/API/
|
||||
mkdir -p docs-repo/docs/API
|
||||
|
||||
# Copy all markdown files and maintain directory structure
|
||||
cp -r source-repo/api/docs/public/. docs-repo/docs/API/
|
||||
|
||||
# Clean and copy images directory specifically
|
||||
rm -rf docs-repo/docs/API/images/
|
||||
mkdir -p docs-repo/docs/API/images
|
||||
|
||||
# Copy images from public/images if they exist
|
||||
if [ -d "source-repo/api/docs/public/images" ]; then
|
||||
cp -r source-repo/api/docs/public/images/. docs-repo/docs/API/images/
|
||||
fi
|
||||
|
||||
# Also copy any images from the parent docs/images directory
|
||||
if [ -d "source-repo/api/docs/images" ]; then
|
||||
cp -r source-repo/api/docs/images/. docs-repo/docs/API/images/
|
||||
fi
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
@@ -53,7 +70,7 @@ jobs:
|
||||
Changes were automatically generated from api/docs/* directory.
|
||||
|
||||
@coderabbitai ignore
|
||||
reviewers: ljm42, elibosley, pujitm, mdatelle
|
||||
reviewers: ljm42, elibosley
|
||||
branch: update-api-docs
|
||||
base: main
|
||||
delete-branch: true
|
||||
|
||||
4
.github/workflows/deploy-storybook.yml
vendored
4
.github/workflows/deploy-storybook.yml
vendored
@@ -25,7 +25,7 @@ jobs:
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
node-version: '22.18.0'
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
@@ -33,7 +33,7 @@ jobs:
|
||||
run_install: false
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.4.3
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential libvirt-daemon-system
|
||||
version: 1.0
|
||||
|
||||
6
.github/workflows/main.yml
vendored
6
.github/workflows/main.yml
vendored
@@ -45,7 +45,7 @@ jobs:
|
||||
node-version-file: ".nvmrc"
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.4.3
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential libvirt-daemon-system
|
||||
version: 1.0
|
||||
@@ -190,7 +190,7 @@ jobs:
|
||||
${{ runner.os }}-pnpm-store-
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.4.3
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential
|
||||
version: 1.0
|
||||
@@ -267,7 +267,7 @@ jobs:
|
||||
${{ runner.os }}-pnpm-store-
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.4.3
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential
|
||||
version: 1.0
|
||||
|
||||
2
.github/workflows/release-production.yml
vendored
2
.github/workflows/release-production.yml
vendored
@@ -30,7 +30,7 @@ jobs:
|
||||
prerelease: false
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '22.17.0'
|
||||
node-version: '22.18.0'
|
||||
- run: |
|
||||
cat << 'EOF' > release-notes.txt
|
||||
${{ steps.release-info.outputs.body }}
|
||||
|
||||
6
.github/workflows/test-libvirt.yml
vendored
6
.github/workflows/test-libvirt.yml
vendored
@@ -28,10 +28,10 @@ jobs:
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.13.5"
|
||||
python-version: "3.13.6"
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.4.3
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.3
|
||||
with:
|
||||
packages: libvirt-dev
|
||||
version: 1.0
|
||||
@@ -44,7 +44,7 @@ jobs:
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10
|
||||
version: 10.14.0
|
||||
run_install: false
|
||||
|
||||
- name: Get pnpm store directory
|
||||
|
||||
9
.gitignore
vendored
9
.gitignore
vendored
@@ -76,6 +76,9 @@ typescript
|
||||
# Github actions
|
||||
RELEASE_NOTES.md
|
||||
|
||||
# Test backups
|
||||
api/dev/configs/api.json.backup
|
||||
|
||||
# Docker Deploy Folder
|
||||
deploy/*
|
||||
!deploy/.gitkeep
|
||||
@@ -109,3 +112,9 @@ plugin/source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/dat
|
||||
|
||||
# Config file that changes between versions
|
||||
api/dev/Unraid.net/myservers.cfg
|
||||
|
||||
# Claude local settings
|
||||
.claude/settings.local.json
|
||||
|
||||
# local Mise settings
|
||||
.mise.toml
|
||||
|
||||
@@ -1 +1 @@
|
||||
{".":"4.9.2"}
|
||||
{".":"4.13.1"}
|
||||
|
||||
14
.vscode/settings.json
vendored
14
.vscode/settings.json
vendored
@@ -1,14 +0,0 @@
|
||||
{
|
||||
"files.associations": {
|
||||
"*.page": "php"
|
||||
},
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.fixAll": "never",
|
||||
"source.fixAll.eslint": "explicit"
|
||||
},
|
||||
"i18n-ally.localesPaths": ["locales"],
|
||||
"i18n-ally.keystyle": "flat",
|
||||
"eslint.experimental.useFlatConfig": true,
|
||||
"typescript.preferences.importModuleSpecifier": "non-relative",
|
||||
"javascript.preferences.importModuleSpecifier": "non-relative"
|
||||
}
|
||||
22
.vscode/sftp-template.json
vendored
22
.vscode/sftp-template.json
vendored
@@ -1,22 +0,0 @@
|
||||
{
|
||||
"_comment": "rename this file to .vscode/sftp.json and replace name/host/privateKeyPath for your system",
|
||||
"name": "Tower",
|
||||
"host": "Tower.local",
|
||||
"protocol": "sftp",
|
||||
"port": 22,
|
||||
"username": "root",
|
||||
"privateKeyPath": "C:/Users/username/.ssh/tower",
|
||||
"remotePath": "/",
|
||||
"context": "plugin/source/dynamix.unraid.net/",
|
||||
"uploadOnSave": true,
|
||||
"useTempFile": false,
|
||||
"openSsh": false,
|
||||
"ignore": [
|
||||
"// comment: ignore dot files/dirs in root of repo",
|
||||
".github",
|
||||
".vscode",
|
||||
".git",
|
||||
".DS_Store"
|
||||
]
|
||||
}
|
||||
|
||||
81
@tailwind-shared/base-utilities.css
Normal file
81
@tailwind-shared/base-utilities.css
Normal file
@@ -0,0 +1,81 @@
|
||||
@custom-variant dark (&:where(.dark, .dark *));
|
||||
|
||||
@layer utilities {
|
||||
:host {
|
||||
--tw-divide-y-reverse: 0;
|
||||
--tw-border-style: solid;
|
||||
--tw-font-weight: initial;
|
||||
--tw-tracking: initial;
|
||||
--tw-translate-x: 0;
|
||||
--tw-translate-y: 0;
|
||||
--tw-translate-z: 0;
|
||||
--tw-rotate-x: rotateX(0);
|
||||
--tw-rotate-y: rotateY(0);
|
||||
--tw-rotate-z: rotateZ(0);
|
||||
--tw-skew-x: skewX(0);
|
||||
--tw-skew-y: skewY(0);
|
||||
--tw-space-x-reverse: 0;
|
||||
--tw-gradient-position: initial;
|
||||
--tw-gradient-from: #0000;
|
||||
--tw-gradient-via: #0000;
|
||||
--tw-gradient-to: #0000;
|
||||
--tw-gradient-stops: initial;
|
||||
--tw-gradient-via-stops: initial;
|
||||
--tw-gradient-from-position: 0%;
|
||||
--tw-gradient-via-position: 50%;
|
||||
--tw-gradient-to-position: 100%;
|
||||
--tw-shadow: 0 0 #0000;
|
||||
--tw-shadow-color: initial;
|
||||
--tw-inset-shadow: 0 0 #0000;
|
||||
--tw-inset-shadow-color: initial;
|
||||
--tw-ring-color: initial;
|
||||
--tw-ring-shadow: 0 0 #0000;
|
||||
--tw-inset-ring-color: initial;
|
||||
--tw-inset-ring-shadow: 0 0 #0000;
|
||||
--tw-ring-inset: initial;
|
||||
--tw-ring-offset-width: 0px;
|
||||
--tw-ring-offset-color: #fff;
|
||||
--tw-ring-offset-shadow: 0 0 #0000;
|
||||
--tw-blur: initial;
|
||||
--tw-brightness: initial;
|
||||
--tw-contrast: initial;
|
||||
--tw-grayscale: initial;
|
||||
--tw-hue-rotate: initial;
|
||||
--tw-invert: initial;
|
||||
--tw-opacity: initial;
|
||||
--tw-saturate: initial;
|
||||
--tw-sepia: initial;
|
||||
--tw-drop-shadow: initial;
|
||||
--tw-duration: initial;
|
||||
--tw-ease: initial;
|
||||
}
|
||||
}
|
||||
|
||||
@layer base {
|
||||
*,
|
||||
::after,
|
||||
::before,
|
||||
::backdrop,
|
||||
::file-selector-button {
|
||||
border-color: hsl(var(--border));
|
||||
}
|
||||
|
||||
|
||||
|
||||
body {
|
||||
--color-alpha: #1c1b1b;
|
||||
--color-beta: #f2f2f2;
|
||||
--color-gamma: #999999;
|
||||
--color-gamma-opaque: rgba(153, 153, 153, 0.5);
|
||||
--color-customgradient-start: rgba(242, 242, 242, 0);
|
||||
--color-customgradient-end: rgba(242, 242, 242, 0.85);
|
||||
--shadow-beta: 0 25px 50px -12px rgba(242, 242, 242, 0.15);
|
||||
--ring-offset-shadow: 0 0 var(--color-beta);
|
||||
--ring-shadow: 0 0 var(--color-beta);
|
||||
}
|
||||
|
||||
button:not(:disabled),
|
||||
[role='button']:not(:disabled) {
|
||||
cursor: pointer;
|
||||
}
|
||||
}
|
||||
130
@tailwind-shared/css-variables.css
Normal file
130
@tailwind-shared/css-variables.css
Normal file
@@ -0,0 +1,130 @@
|
||||
/* Hybrid theme system: Native CSS + Theme Store fallback */
|
||||
@layer base {
|
||||
/* Light mode defaults */
|
||||
:root {
|
||||
--background: 0 0% 100%;
|
||||
--foreground: 0 0% 3.9%;
|
||||
--muted: 0 0% 96.1%;
|
||||
--muted-foreground: 0 0% 45.1%;
|
||||
--popover: 0 0% 100%;
|
||||
--popover-foreground: 0 0% 3.9%;
|
||||
--card: 0 0% 100%;
|
||||
--card-foreground: 0 0% 3.9%;
|
||||
--border: 0 0% 89.8%;
|
||||
--input: 0 0% 89.8%;
|
||||
--primary: 0 0% 9%;
|
||||
--primary-foreground: 0 0% 98%;
|
||||
--secondary: 0 0% 96.1%;
|
||||
--secondary-foreground: 0 0% 9%;
|
||||
--accent: 0 0% 96.1%;
|
||||
--accent-foreground: 0 0% 9%;
|
||||
--destructive: 0 84.2% 60.2%;
|
||||
--destructive-foreground: 0 0% 98%;
|
||||
--ring: 0 0% 3.9%;
|
||||
--chart-1: 12 76% 61%;
|
||||
--chart-2: 173 58% 39%;
|
||||
--chart-3: 197 37% 24%;
|
||||
--chart-4: 43 74% 66%;
|
||||
--chart-5: 27 87% 67%;
|
||||
}
|
||||
|
||||
/* Dark mode */
|
||||
.dark {
|
||||
--background: 0 0% 3.9%;
|
||||
--foreground: 0 0% 98%;
|
||||
--muted: 0 0% 14.9%;
|
||||
--muted-foreground: 0 0% 63.9%;
|
||||
--popover: 0 0% 3.9%;
|
||||
--popover-foreground: 0 0% 98%;
|
||||
--card: 0 0% 3.9%;
|
||||
--card-foreground: 0 0% 98%;
|
||||
--border: 0 0% 14.9%;
|
||||
--input: 0 0% 14.9%;
|
||||
--primary: 0 0% 98%;
|
||||
--primary-foreground: 0 0% 9%;
|
||||
--secondary: 0 0% 14.9%;
|
||||
--secondary-foreground: 0 0% 98%;
|
||||
--accent: 0 0% 14.9%;
|
||||
--accent-foreground: 0 0% 98%;
|
||||
--destructive: 0 62.8% 30.6%;
|
||||
--destructive-foreground: 0 0% 98%;
|
||||
--ring: 0 0% 83.1%;
|
||||
--chart-1: 220 70% 50%;
|
||||
--chart-2: 160 60% 45%;
|
||||
--chart-3: 30 80% 55%;
|
||||
--chart-4: 280 65% 60%;
|
||||
--chart-5: 340 75% 55%;
|
||||
}
|
||||
|
||||
/* Alternative class-based dark mode support for specific Unraid themes */
|
||||
.dark[data-theme='black'],
|
||||
.dark[data-theme='gray'] {
|
||||
--background: 0 0% 3.9%;
|
||||
--foreground: 0 0% 98%;
|
||||
--border: 0 0% 14.9%;
|
||||
}
|
||||
|
||||
/* For web components: inherit CSS variables from the host */
|
||||
:host {
|
||||
--background: inherit;
|
||||
--foreground: inherit;
|
||||
--muted: inherit;
|
||||
--muted-foreground: inherit;
|
||||
--popover: inherit;
|
||||
--popover-foreground: inherit;
|
||||
--card: inherit;
|
||||
--card-foreground: inherit;
|
||||
--border: inherit;
|
||||
--input: inherit;
|
||||
--primary: inherit;
|
||||
--primary-foreground: inherit;
|
||||
--secondary: inherit;
|
||||
--secondary-foreground: inherit;
|
||||
--accent: inherit;
|
||||
--accent-foreground: inherit;
|
||||
--destructive: inherit;
|
||||
--destructive-foreground: inherit;
|
||||
--ring: inherit;
|
||||
--chart-1: inherit;
|
||||
--chart-2: inherit;
|
||||
--chart-3: inherit;
|
||||
--chart-4: inherit;
|
||||
--chart-5: inherit;
|
||||
}
|
||||
|
||||
/* Class-based dark mode support for web components using :host-context */
|
||||
:host-context(.dark) {
|
||||
--background: 0 0% 3.9%;
|
||||
--foreground: 0 0% 98%;
|
||||
--muted: 0 0% 14.9%;
|
||||
--muted-foreground: 0 0% 63.9%;
|
||||
--popover: 0 0% 3.9%;
|
||||
--popover-foreground: 0 0% 98%;
|
||||
--card: 0 0% 3.9%;
|
||||
--card-foreground: 0 0% 98%;
|
||||
--border: 0 0% 14.9%;
|
||||
--input: 0 0% 14.9%;
|
||||
--primary: 0 0% 98%;
|
||||
--primary-foreground: 0 0% 9%;
|
||||
--secondary: 0 0% 14.9%;
|
||||
--secondary-foreground: 0 0% 98%;
|
||||
--accent: 0 0% 14.9%;
|
||||
--accent-foreground: 0 0% 98%;
|
||||
--destructive: 0 62.8% 30.6%;
|
||||
--destructive-foreground: 0 0% 98%;
|
||||
--ring: 0 0% 83.1%;
|
||||
--chart-1: 220 70% 50%;
|
||||
--chart-2: 160 60% 45%;
|
||||
--chart-3: 30 80% 55%;
|
||||
--chart-4: 280 65% 60%;
|
||||
--chart-5: 340 75% 55%;
|
||||
}
|
||||
|
||||
/* Alternative class-based dark mode support for specific Unraid themes */
|
||||
:host-context(.dark[data-theme='black']),
|
||||
:host-context(.dark[data-theme='gray']) {
|
||||
--background: 0 0% 3.9%;
|
||||
--foreground: 0 0% 98%;
|
||||
--border: 0 0% 14.9%;
|
||||
}
|
||||
}
|
||||
5
@tailwind-shared/index.css
Normal file
5
@tailwind-shared/index.css
Normal file
@@ -0,0 +1,5 @@
|
||||
/* Tailwind Shared Styles - Single entry point for all shared CSS */
|
||||
@import './css-variables.css';
|
||||
@import './unraid-theme.css';
|
||||
@import './base-utilities.css';
|
||||
@import './sonner.css';
|
||||
@@ -662,4 +662,4 @@
|
||||
.sonner-loader[data-visible='false'] {
|
||||
opacity: 0;
|
||||
transform: scale(0.8) translate(-50%, -50%);
|
||||
}
|
||||
}
|
||||
259
@tailwind-shared/unraid-theme.css
Normal file
259
@tailwind-shared/unraid-theme.css
Normal file
@@ -0,0 +1,259 @@
|
||||
@theme static {
|
||||
/* Breakpoints */
|
||||
--breakpoint-xs: 30rem;
|
||||
--breakpoint-2xl: 100rem;
|
||||
--breakpoint-3xl: 120rem;
|
||||
/* Container settings */
|
||||
--container-center: true;
|
||||
--container-padding: 2rem;
|
||||
--container-screen-2xl: 1400px;
|
||||
|
||||
/* Font families */
|
||||
--font-sans:
|
||||
clear-sans, ui-sans-serif, system-ui, sans-serif, 'Apple Color Emoji', 'Segoe UI Emoji',
|
||||
'Segoe UI Symbol', 'Noto Color Emoji';
|
||||
|
||||
/* Grid template columns */
|
||||
--grid-template-columns-settings: 35% 1fr;
|
||||
|
||||
/* Border color default */
|
||||
--default-border-color: var(--color-border);
|
||||
--ui-border-muted: hsl(var(--border));
|
||||
--ui-radius: 0.5rem;
|
||||
--ui-primary: var(--color-primary-500);
|
||||
--ui-primary-hover: var(--color-primary-600);
|
||||
--ui-primary-active: var(--color-primary-700);
|
||||
|
||||
/* Color palette */
|
||||
--color-inherit: inherit;
|
||||
--color-transparent: transparent;
|
||||
--color-black: #1c1b1b;
|
||||
--color-grey-darkest: #222;
|
||||
--color-grey-darker: #606f7b;
|
||||
--color-grey-dark: #383735;
|
||||
--color-grey-mid: #999999;
|
||||
--color-grey: #e0e0e0;
|
||||
--color-grey-light: #dae1e7;
|
||||
--color-grey-lighter: #f1f5f8;
|
||||
--color-grey-lightest: #f2f2f2;
|
||||
--color-white: #ffffff;
|
||||
|
||||
/* Unraid colors */
|
||||
--color-yellow-accent: #e9bf41;
|
||||
--color-orange-dark: #f15a2c;
|
||||
--color-orange: #ff8c2f;
|
||||
|
||||
/* Unraid red palette */
|
||||
--color-unraid-red: #e22828;
|
||||
--color-unraid-red-50: #fef2f2;
|
||||
--color-unraid-red-100: #ffe1e1;
|
||||
--color-unraid-red-200: #ffc9c9;
|
||||
--color-unraid-red-300: #fea3a3;
|
||||
--color-unraid-red-400: #fc6d6d;
|
||||
--color-unraid-red-500: #f43f3f;
|
||||
--color-unraid-red-600: #e22828;
|
||||
--color-unraid-red-700: #bd1818;
|
||||
--color-unraid-red-800: #9c1818;
|
||||
--color-unraid-red-900: #821a1a;
|
||||
--color-unraid-red-950: #470808;
|
||||
|
||||
/* Unraid green palette */
|
||||
--color-unraid-green: #63a659;
|
||||
--color-unraid-green-50: #f5f9f4;
|
||||
--color-unraid-green-100: #e7f3e5;
|
||||
--color-unraid-green-200: #d0e6cc;
|
||||
--color-unraid-green-300: #aad1a4;
|
||||
--color-unraid-green-400: #7db474;
|
||||
--color-unraid-green-500: #63a659;
|
||||
--color-unraid-green-600: #457b3e;
|
||||
--color-unraid-green-700: #396134;
|
||||
--color-unraid-green-800: #314e2d;
|
||||
--color-unraid-green-900: #284126;
|
||||
--color-unraid-green-950: #122211;
|
||||
|
||||
/* Primary colors (orange) */
|
||||
--color-primary-50: #fff7ed;
|
||||
--color-primary-100: #ffedd5;
|
||||
--color-primary-200: #fed7aa;
|
||||
--color-primary-300: #fdba74;
|
||||
--color-primary-400: #fb923c;
|
||||
--color-primary-500: #ff6600;
|
||||
--color-primary-600: #ea580c;
|
||||
--color-primary-700: #c2410c;
|
||||
--color-primary-800: #9a3412;
|
||||
--color-primary-900: #7c2d12;
|
||||
--color-primary-950: #431407;
|
||||
|
||||
/* Header colors */
|
||||
--color-header-text-primary: var(--header-text-primary);
|
||||
--color-header-text-secondary: var(--header-text-secondary);
|
||||
--color-header-background-color: var(--header-background-color);
|
||||
|
||||
/* Legacy colors */
|
||||
--color-alpha: var(--color-alpha);
|
||||
--color-beta: var(--color-beta);
|
||||
--color-gamma: var(--color-gamma);
|
||||
--color-gamma-opaque: var(--color-gamma-opaque);
|
||||
--color-customgradient-start: var(--color-customgradient-start);
|
||||
--color-customgradient-end: var(--color-customgradient-end);
|
||||
|
||||
/* Gradients */
|
||||
--color-header-gradient-start: var(--header-gradient-start);
|
||||
--color-header-gradient-end: var(--header-gradient-end);
|
||||
--color-banner-gradient: var(--banner-gradient);
|
||||
|
||||
/* Font sizes */
|
||||
--font-10px: 10px;
|
||||
--font-12px: 12px;
|
||||
--font-14px: 14px;
|
||||
--font-16px: 16px;
|
||||
--font-18px: 18px;
|
||||
--font-20px: 20px;
|
||||
--font-24px: 24px;
|
||||
--font-30px: 30px;
|
||||
|
||||
/* Spacing */
|
||||
--spacing-4_5: 1.125rem;
|
||||
--spacing--8px: -8px;
|
||||
--spacing-2px: 2px;
|
||||
--spacing-4px: 4px;
|
||||
--spacing-6px: 6px;
|
||||
--spacing-8px: 8px;
|
||||
--spacing-10px: 10px;
|
||||
--spacing-12px: 12px;
|
||||
--spacing-14px: 14px;
|
||||
--spacing-16px: 16px;
|
||||
--spacing-20px: 20px;
|
||||
--spacing-24px: 24px;
|
||||
--spacing-28px: 28px;
|
||||
--spacing-32px: 32px;
|
||||
--spacing-36px: 36px;
|
||||
--spacing-40px: 40px;
|
||||
--spacing-64px: 64px;
|
||||
--spacing-80px: 80px;
|
||||
--spacing-90px: 90px;
|
||||
--spacing-150px: 150px;
|
||||
--spacing-160px: 160px;
|
||||
--spacing-200px: 200px;
|
||||
--spacing-260px: 260px;
|
||||
--spacing-300px: 300px;
|
||||
--spacing-310px: 310px;
|
||||
--spacing-350px: 350px;
|
||||
--spacing-448px: 448px;
|
||||
--spacing-512px: 512px;
|
||||
--spacing-640px: 640px;
|
||||
--spacing-800px: 800px;
|
||||
|
||||
/* Width and Height values */
|
||||
--width-36px: 36px;
|
||||
--height-36px: 36px;
|
||||
|
||||
/* Min/Max widths */
|
||||
--min-width-86px: 86px;
|
||||
--min-width-160px: 160px;
|
||||
--min-width-260px: 260px;
|
||||
--min-width-300px: 300px;
|
||||
--min-width-310px: 310px;
|
||||
--min-width-350px: 350px;
|
||||
--min-width-800px: 800px;
|
||||
|
||||
--max-width-86px: 86px;
|
||||
--max-width-160px: 160px;
|
||||
--max-width-260px: 260px;
|
||||
--max-width-300px: 300px;
|
||||
--max-width-310px: 310px;
|
||||
--max-width-350px: 350px;
|
||||
--max-width-640px: 640px;
|
||||
--max-width-800px: 800px;
|
||||
--max-width-1024px: 1024px;
|
||||
|
||||
/* Animations */
|
||||
--animate-mark-2: mark-2 1.5s ease infinite;
|
||||
--animate-mark-3: mark-3 1.5s ease infinite;
|
||||
--animate-mark-6: mark-6 1.5s ease infinite;
|
||||
--animate-mark-7: mark-7 1.5s ease infinite;
|
||||
|
||||
/* Radius */
|
||||
--radius: 0.5rem;
|
||||
|
||||
/* Text Resizing */
|
||||
--text-xs: 1.2rem; /* 12px at 10px base */
|
||||
--text-sm: 1.4rem; /* 14px at 10px base */
|
||||
--text-base: 1.6rem; /* 16px at 10px base */
|
||||
--text-lg: 1.8rem; /* 18px at 10px base */
|
||||
--text-xl: 2rem; /* 20px at 10px base */
|
||||
--text-2xl: 2.4rem; /* 24px at 10px base */
|
||||
--text-3xl: 3rem; /* 30px at 10px base */
|
||||
--text-4xl: 3.6rem; /* 36px at 10px base */
|
||||
--text-5xl: 4.8rem; /* 48px at 10px base */
|
||||
--text-6xl: 6rem; /* 60px at 10px base */
|
||||
--text-7xl: 7.2rem; /* 72px at 10px base */
|
||||
--text-8xl: 9.6rem; /* 96px at 10px base */
|
||||
--text-9xl: 12.8rem; /* 128px at 10px base */
|
||||
--spacing: 0.4rem; /* 4px at 10px base */
|
||||
}
|
||||
|
||||
/* Keyframes */
|
||||
@keyframes mark-2 {
|
||||
50% {
|
||||
transform: translateY(-40px);
|
||||
}
|
||||
to {
|
||||
transform: translateY(0);
|
||||
}
|
||||
}
|
||||
|
||||
@keyframes mark-3 {
|
||||
50% {
|
||||
transform: translateY(-62px);
|
||||
}
|
||||
to {
|
||||
transform: translateY(0);
|
||||
}
|
||||
}
|
||||
|
||||
@keyframes mark-6 {
|
||||
50% {
|
||||
transform: translateY(40px);
|
||||
}
|
||||
to {
|
||||
transform: translateY(0);
|
||||
}
|
||||
}
|
||||
|
||||
@keyframes mark-7 {
|
||||
50% {
|
||||
transform: translateY(62px);
|
||||
}
|
||||
to {
|
||||
transform: translateY(0);
|
||||
}
|
||||
}
|
||||
|
||||
/* Theme colors that reference CSS variables */
|
||||
@theme inline {
|
||||
--color-background: hsl(var(--background));
|
||||
--color-foreground: hsl(var(--foreground));
|
||||
--color-muted: hsl(var(--muted));
|
||||
--color-muted-foreground: hsl(var(--muted-foreground));
|
||||
--color-popover: hsl(var(--popover));
|
||||
--color-popover-foreground: hsl(var(--popover-foreground));
|
||||
--color-card: hsl(var(--card));
|
||||
--color-card-foreground: hsl(var(--card-foreground));
|
||||
--color-border: hsl(var(--border));
|
||||
--color-input: hsl(var(--input));
|
||||
--color-primary: hsl(var(--primary));
|
||||
--color-primary-foreground: hsl(var(--primary-foreground));
|
||||
--color-secondary: hsl(var(--secondary));
|
||||
--color-secondary-foreground: hsl(var(--secondary-foreground));
|
||||
--color-accent: hsl(var(--accent));
|
||||
--color-accent-foreground: hsl(var(--accent-foreground));
|
||||
--color-destructive: hsl(var(--destructive));
|
||||
--color-destructive-foreground: hsl(var(--destructive-foreground));
|
||||
--color-ring: hsl(var(--ring));
|
||||
--color-chart-1: hsl(var(--chart-1, 12 76% 61%));
|
||||
--color-chart-2: hsl(var(--chart-2, 173 58% 39%));
|
||||
--color-chart-3: hsl(var(--chart-3, 197 37% 24%));
|
||||
--color-chart-4: hsl(var(--chart-4, 43 74% 66%));
|
||||
--color-chart-5: hsl(var(--chart-5, 27 87% 67%));
|
||||
}
|
||||
22
CLAUDE.md
22
CLAUDE.md
@@ -46,6 +46,16 @@ cd api && pnpm codegen # Generate GraphQL types
|
||||
pnpm unraid:deploy <SERVER_IP> # Deploy all to Unraid server
|
||||
```
|
||||
|
||||
### Developer Tools
|
||||
|
||||
```bash
|
||||
unraid-api developer # Interactive prompt for tools
|
||||
unraid-api developer --sandbox true # Enable GraphQL sandbox
|
||||
unraid-api developer --sandbox false # Disable GraphQL sandbox
|
||||
unraid-api developer --enable-modal # Enable modal testing tool
|
||||
unraid-api developer --disable-modal # Disable modal testing tool
|
||||
```
|
||||
|
||||
## Architecture Notes
|
||||
|
||||
### API Structure (NestJS)
|
||||
@@ -110,6 +120,13 @@ Enables GraphQL playground at `http://tower.local/graphql`
|
||||
|
||||
### Testing Guidelines
|
||||
|
||||
#### General Testing Best Practices
|
||||
|
||||
- **Error Testing:** Use `.rejects.toThrow()` without arguments to test that functions throw errors. Don't test exact error message strings unless the message format is specifically what you're testing
|
||||
- **Focus on Behavior:** Test what the code does, not implementation details like exact error message wording
|
||||
- **Avoid Brittleness:** Don't write tests that break when minor changes are made to error messages, log formats, or other non-essential details
|
||||
- **Use Mocks Correctly**: Mocks should be used as nouns, not verbs.
|
||||
|
||||
#### Vue Component Testing
|
||||
|
||||
- This is a Nuxt.js app but we are testing with vitest outside of the Nuxt environment
|
||||
@@ -135,3 +152,8 @@ Enables GraphQL playground at `http://tower.local/graphql`
|
||||
- Place all mock declarations at the top level
|
||||
- Use factory functions for module mocks to avoid hoisting issues
|
||||
- Clear mocks between tests to ensure isolation
|
||||
|
||||
## Development Memories
|
||||
|
||||
- We are using tailwind v4 we do not need a tailwind config anymore
|
||||
- always search the internet for tailwind v4 documentation when making tailwind related style changes
|
||||
10
api/.depcheckrc
Normal file
10
api/.depcheckrc
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"parsers": {
|
||||
"**/*.ts": [
|
||||
"@depcheck/parser-typescript",
|
||||
{
|
||||
"project": "tsconfig.json"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -15,6 +15,9 @@ PATHS_ACTIVATION_BASE=./dev/activation
|
||||
PATHS_PASSWD=./dev/passwd
|
||||
PATHS_RCLONE_SOCKET=./dev/rclone-socket
|
||||
PATHS_LOG_BASE=./dev/log # Where we store logs
|
||||
PATHS_LOGS_FILE=./dev/log/graphql-api.log
|
||||
PATHS_CONNECT_STATUS_FILE_PATH=./dev/connectStatus.json # Connect plugin status file
|
||||
PATHS_OIDC_JSON=./dev/configs/oidc.local.json
|
||||
ENVIRONMENT="development"
|
||||
NODE_ENV="development"
|
||||
PORT="3001"
|
||||
|
||||
@@ -13,5 +13,6 @@ PATHS_PARITY_CHECKS=./dev/states/parity-checks.log
|
||||
PATHS_CONFIG_MODULES=./dev/configs
|
||||
PATHS_ACTIVATION_BASE=./dev/activation
|
||||
PATHS_PASSWD=./dev/passwd
|
||||
PATHS_LOGS_FILE=./dev/log/graphql-api.log
|
||||
PORT=5000
|
||||
NODE_ENV="test"
|
||||
|
||||
105
api/.eslintrc.ts
105
api/.eslintrc.ts
@@ -4,54 +4,59 @@ import noRelativeImportPaths from 'eslint-plugin-no-relative-import-paths';
|
||||
import prettier from 'eslint-plugin-prettier';
|
||||
import tseslint from 'typescript-eslint';
|
||||
|
||||
export default tseslint.config(eslint.configs.recommended, ...tseslint.configs.recommended, {
|
||||
plugins: {
|
||||
'no-relative-import-paths': noRelativeImportPaths,
|
||||
prettier: prettier,
|
||||
import: importPlugin,
|
||||
export default tseslint.config(
|
||||
eslint.configs.recommended,
|
||||
...tseslint.configs.recommended,
|
||||
{
|
||||
ignores: ['src/graphql/generated/client/**/*', 'src/**/**/dummy-process.js'],
|
||||
},
|
||||
rules: {
|
||||
'@typescript-eslint/no-redundant-type-constituents': 'off',
|
||||
'@typescript-eslint/no-unsafe-call': 'off',
|
||||
'@typescript-eslint/naming-convention': 'off',
|
||||
'@typescript-eslint/no-unsafe-assignment': 'off',
|
||||
'@typescript-eslint/no-unsafe-return': 'off',
|
||||
'@typescript-eslint/ban-types': 'off',
|
||||
'@typescript-eslint/no-explicit-any': 'off',
|
||||
'@typescript-eslint/no-empty-object-type': 'off',
|
||||
'no-use-before-define': ['off'],
|
||||
'no-multiple-empty-lines': ['error', { max: 1, maxBOF: 0, maxEOF: 1 }],
|
||||
'@typescript-eslint/no-unused-vars': 'off',
|
||||
'@typescript-eslint/no-unused-expressions': 'off',
|
||||
'import/no-unresolved': 'off',
|
||||
'import/no-absolute-path': 'off',
|
||||
'import/prefer-default-export': 'off',
|
||||
'no-relative-import-paths/no-relative-import-paths': [
|
||||
'error',
|
||||
{ allowSameFolder: false, rootDir: 'src', prefix: '@app' },
|
||||
],
|
||||
'prettier/prettier': 'error',
|
||||
'import/extensions': [
|
||||
'error',
|
||||
'ignorePackages',
|
||||
{
|
||||
js: 'always',
|
||||
ts: 'always',
|
||||
},
|
||||
],
|
||||
'no-restricted-globals': [
|
||||
'error',
|
||||
{
|
||||
name: '__dirname',
|
||||
message: 'Use import.meta.url instead of __dirname in ESM',
|
||||
},
|
||||
{
|
||||
name: '__filename',
|
||||
message: 'Use import.meta.url instead of __filename in ESM',
|
||||
},
|
||||
],
|
||||
'eol-last': ['error', 'always'],
|
||||
},
|
||||
|
||||
ignores: ['src/graphql/generated/client/**/*'],
|
||||
});
|
||||
{
|
||||
plugins: {
|
||||
'no-relative-import-paths': noRelativeImportPaths,
|
||||
prettier: prettier,
|
||||
import: importPlugin,
|
||||
},
|
||||
rules: {
|
||||
'@typescript-eslint/no-redundant-type-constituents': 'off',
|
||||
'@typescript-eslint/no-unsafe-call': 'off',
|
||||
'@typescript-eslint/naming-convention': 'off',
|
||||
'@typescript-eslint/no-unsafe-assignment': 'off',
|
||||
'@typescript-eslint/no-unsafe-return': 'off',
|
||||
'@typescript-eslint/ban-types': 'off',
|
||||
'@typescript-eslint/no-explicit-any': 'off',
|
||||
'@typescript-eslint/no-empty-object-type': 'off',
|
||||
'no-use-before-define': ['off'],
|
||||
'no-multiple-empty-lines': ['error', { max: 1, maxBOF: 0, maxEOF: 1 }],
|
||||
'@typescript-eslint/no-unused-vars': 'off',
|
||||
'@typescript-eslint/no-unused-expressions': 'off',
|
||||
'import/no-unresolved': 'off',
|
||||
'import/no-absolute-path': 'off',
|
||||
'import/prefer-default-export': 'off',
|
||||
'no-relative-import-paths/no-relative-import-paths': [
|
||||
'error',
|
||||
{ allowSameFolder: false, rootDir: 'src', prefix: '@app' },
|
||||
],
|
||||
'prettier/prettier': 'error',
|
||||
'import/extensions': [
|
||||
'error',
|
||||
'ignorePackages',
|
||||
{
|
||||
js: 'always',
|
||||
ts: 'always',
|
||||
},
|
||||
],
|
||||
'no-restricted-globals': [
|
||||
'error',
|
||||
{
|
||||
name: '__dirname',
|
||||
message: 'Use import.meta.url instead of __dirname in ESM',
|
||||
},
|
||||
{
|
||||
name: '__filename',
|
||||
message: 'Use import.meta.url instead of __filename in ESM',
|
||||
},
|
||||
],
|
||||
'eol-last': ['error', 'always'],
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
9
api/.gitignore
vendored
9
api/.gitignore
vendored
@@ -82,3 +82,12 @@ deploy/*
|
||||
.idea
|
||||
|
||||
!**/*.login.*
|
||||
|
||||
# local api configs - don't need project-wide tracking
|
||||
dev/connectStatus.json
|
||||
dev/configs/*
|
||||
# local status - doesn't need to be tracked
|
||||
dev/connectStatus.json
|
||||
|
||||
# local OIDC config for testing - contains secrets
|
||||
dev/configs/oidc.local.json
|
||||
|
||||
9
api/.vscode/settings.json
vendored
9
api/.vscode/settings.json
vendored
@@ -1,9 +0,0 @@
|
||||
{
|
||||
"eslint.lintTask.options": "--flag unstable_ts_config",
|
||||
"eslint.options": {
|
||||
"flags": ["unstable_ts_config"],
|
||||
"overrideConfigFile": ".eslintrc.ts"
|
||||
},
|
||||
"typescript.preferences.importModuleSpecifier": "non-relative",
|
||||
"javascript.preferences.importModuleSpecifier": "non-relative"
|
||||
}
|
||||
109
api/CHANGELOG.md
109
api/CHANGELOG.md
@@ -1,5 +1,114 @@
|
||||
# Changelog
|
||||
|
||||
## [4.13.1](https://github.com/unraid/api/compare/v4.13.0...v4.13.1) (2025-08-15)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* insecure routes not working for SSO ([#1587](https://github.com/unraid/api/issues/1587)) ([a4ff3c4](https://github.com/unraid/api/commit/a4ff3c40926915f6989ed4af679b30cf295ea15d))
|
||||
|
||||
## [4.13.0](https://github.com/unraid/api/compare/v4.12.0...v4.13.0) (2025-08-15)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* `createDockerFolder` & `setDockerFolderChildren` mutations ([#1558](https://github.com/unraid/api/issues/1558)) ([557b03f](https://github.com/unraid/api/commit/557b03f8829d3f179b5e26162fa250121cb33420))
|
||||
* `deleteDockerEntries` mutation ([#1564](https://github.com/unraid/api/issues/1564)) ([78997a0](https://github.com/unraid/api/commit/78997a02c6d96ec0ed75352dfc9849524147428c))
|
||||
* add `moveDockerEntriesToFolder` mutation ([#1569](https://github.com/unraid/api/issues/1569)) ([20c2d5b](https://github.com/unraid/api/commit/20c2d5b4457ad50d1e287fb3141aa98e8e7de665))
|
||||
* add docker -> organizer query ([#1555](https://github.com/unraid/api/issues/1555)) ([dfe352d](https://github.com/unraid/api/commit/dfe352dfa1bd6aa059cab56357ba6bff5e8ed7cb))
|
||||
* connect settings page updated for responsive webgui ([#1585](https://github.com/unraid/api/issues/1585)) ([96c120f](https://github.com/unraid/api/commit/96c120f9b24d3c91df5e9401917c8994eef36c46))
|
||||
* implement OIDC provider management in GraphQL API ([#1563](https://github.com/unraid/api/issues/1563)) ([979a267](https://github.com/unraid/api/commit/979a267bc5e128a8b789f0123e23c61860ebb11b))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* change config file loading error log to debug ([#1565](https://github.com/unraid/api/issues/1565)) ([3534d6f](https://github.com/unraid/api/commit/3534d6fdd7c59e65615167cfe306deebad9ca4d3))
|
||||
* **connect:** remove unraid-api folder before creating symlink ([#1556](https://github.com/unraid/api/issues/1556)) ([514a0ef](https://github.com/unraid/api/commit/514a0ef560a90595f774b6c0db60f1d2b4cd853c))
|
||||
* **deps:** pin dependencies ([#1586](https://github.com/unraid/api/issues/1586)) ([5721785](https://github.com/unraid/api/commit/57217852a337ead4c8c8e7596d1b7d590b64a26f))
|
||||
* **deps:** update all non-major dependencies ([#1543](https://github.com/unraid/api/issues/1543)) ([18b5209](https://github.com/unraid/api/commit/18b52090874c0ba86878d0f7e31bf0dc42734d75))
|
||||
* **deps:** update all non-major dependencies ([#1579](https://github.com/unraid/api/issues/1579)) ([ad6aa3b](https://github.com/unraid/api/commit/ad6aa3b6743aeeb42eff34d1c89ad874dfd0af09))
|
||||
* refactor API client to support Unix socket connections ([#1575](https://github.com/unraid/api/issues/1575)) ([a2c5d24](https://github.com/unraid/api/commit/a2c5d2495ffc02efa1ec5c63f0a1c5d23c9ed7ff))
|
||||
* **theme:** API key white text on white background ([#1584](https://github.com/unraid/api/issues/1584)) ([b321687](https://github.com/unraid/api/commit/b3216874faae208cdfc3edec719629fce428b6a3))
|
||||
|
||||
## [4.12.0](https://github.com/unraid/api/compare/v4.11.0...v4.12.0) (2025-07-30)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add ups monitoring to graphql api ([#1526](https://github.com/unraid/api/issues/1526)) ([6ea94f0](https://github.com/unraid/api/commit/6ea94f061d5b2e6c6fbfa6949006960501e3f4e7))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* enhance plugin management with interactive removal prompts ([#1549](https://github.com/unraid/api/issues/1549)) ([23ef760](https://github.com/unraid/api/commit/23ef760d763c525a38108048200fa73fc8531aed))
|
||||
* remove connect api plugin upon removal of Connect Unraid plugin ([#1548](https://github.com/unraid/api/issues/1548)) ([782d5eb](https://github.com/unraid/api/commit/782d5ebadc67854298f3b2355255983024d2a225))
|
||||
* SSO not being detected ([#1546](https://github.com/unraid/api/issues/1546)) ([6b3b951](https://github.com/unraid/api/commit/6b3b951d8288cd31d096252be544537dc2bfce50))
|
||||
|
||||
## [4.11.0](https://github.com/unraid/api/compare/v4.10.0...v4.11.0) (2025-07-28)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* tailwind v4 ([#1522](https://github.com/unraid/api/issues/1522)) ([2c62e0a](https://github.com/unraid/api/commit/2c62e0ad09c56d2293b76d07833dfb142c898937))
|
||||
* **web:** install and configure nuxt ui ([#1524](https://github.com/unraid/api/issues/1524)) ([407585c](https://github.com/unraid/api/commit/407585cd40c409175d8e7b861f8d61d8cabc11c9))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* add missing breakpoints ([#1535](https://github.com/unraid/api/issues/1535)) ([f5352e3](https://github.com/unraid/api/commit/f5352e3a26a2766e85d19ffb5f74960c536b91b3))
|
||||
* border color incorrect in tailwind ([#1544](https://github.com/unraid/api/issues/1544)) ([f14b74a](https://github.com/unraid/api/commit/f14b74af91783b08640c0949c51ba7f18508f06f))
|
||||
* **connect:** omit extraneous fields during connect config validation ([#1538](https://github.com/unraid/api/issues/1538)) ([45bd736](https://github.com/unraid/api/commit/45bd73698b2bd534a8aff2c6ac73403de6c58561))
|
||||
* **deps:** pin dependencies ([#1528](https://github.com/unraid/api/issues/1528)) ([a74d935](https://github.com/unraid/api/commit/a74d935b566dd7af1a21824c9b7ab562232f9d8b))
|
||||
* **deps:** pin dependency @nuxt/ui to 3.2.0 ([#1532](https://github.com/unraid/api/issues/1532)) ([8279531](https://github.com/unraid/api/commit/8279531f2b86a78e81a77e6c037a0fb752e98062))
|
||||
* **deps:** update all non-major dependencies ([#1510](https://github.com/unraid/api/issues/1510)) ([1a8da6d](https://github.com/unraid/api/commit/1a8da6d92b96d3afa2a8b42446b36f1ee98b64a0))
|
||||
* **deps:** update all non-major dependencies ([#1520](https://github.com/unraid/api/issues/1520)) ([e2fa648](https://github.com/unraid/api/commit/e2fa648d1cf5a6cbe3e55c3f52c203d26bb4d526))
|
||||
* inject Tailwind CSS into client entry point ([#1537](https://github.com/unraid/api/issues/1537)) ([86b6c4f](https://github.com/unraid/api/commit/86b6c4f85b7b30bb4a13d57450a76bf4c28a3fff))
|
||||
* make settings grid responsive ([#1463](https://github.com/unraid/api/issues/1463)) ([9dfdb8d](https://github.com/unraid/api/commit/9dfdb8dce781fa662d6434ee432e4521f905ffa5))
|
||||
* **notifications:** gracefully handle & mask invalid notifications ([#1529](https://github.com/unraid/api/issues/1529)) ([05056e7](https://github.com/unraid/api/commit/05056e7ca1702eb7bf6c507950460b6b15bf7916))
|
||||
* truncate log files when they take up more than 5mb of space ([#1530](https://github.com/unraid/api/issues/1530)) ([0a18b38](https://github.com/unraid/api/commit/0a18b38008dd86a125cde7f684636d5dbb36f082))
|
||||
* use async for primary file read/writes ([#1531](https://github.com/unraid/api/issues/1531)) ([23b2b88](https://github.com/unraid/api/commit/23b2b8846158a27d1c9808bce0cc1506779c4dc3))
|
||||
|
||||
## [4.10.0](https://github.com/unraid/api/compare/v4.9.5...v4.10.0) (2025-07-15)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* trial extension allowed within 5 days of expiration ([#1490](https://github.com/unraid/api/issues/1490)) ([f34a33b](https://github.com/unraid/api/commit/f34a33bc9f1a7e135d453d9d31888789bfc3f878))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* delay `nginx:reload` file mod effect by 10 seconds ([#1512](https://github.com/unraid/api/issues/1512)) ([af33e99](https://github.com/unraid/api/commit/af33e999a0480a77e3e6b2aa833b17b38b835656))
|
||||
* **deps:** update all non-major dependencies ([#1489](https://github.com/unraid/api/issues/1489)) ([53b05eb](https://github.com/unraid/api/commit/53b05ebe5e2050cb0916fcd65e8d41370aee0624))
|
||||
* ensure no crash if emhttp state configs are missing ([#1514](https://github.com/unraid/api/issues/1514)) ([1a7d35d](https://github.com/unraid/api/commit/1a7d35d3f6972fd8aff58c17b2b0fb79725e660e))
|
||||
* **my.servers:** improve DNS resolution robustness for backup server ([#1518](https://github.com/unraid/api/issues/1518)) ([eecd9b1](https://github.com/unraid/api/commit/eecd9b1017a63651d1dc782feaa224111cdee8b6))
|
||||
* over-eager cloud query from web components ([#1506](https://github.com/unraid/api/issues/1506)) ([074370c](https://github.com/unraid/api/commit/074370c42cdecc4dbc58193ff518aa25735c56b3))
|
||||
* replace myservers.cfg reads in UpdateFlashBackup.php ([#1517](https://github.com/unraid/api/issues/1517)) ([441e180](https://github.com/unraid/api/commit/441e1805c108a6c1cd35ee093246b975a03f8474))
|
||||
* rm short-circuit in `rc.unraid-api` if plugin config dir is absent ([#1515](https://github.com/unraid/api/issues/1515)) ([29dcb7d](https://github.com/unraid/api/commit/29dcb7d0f088937cefc5158055f48680e86e5c36))
|
||||
|
||||
## [4.9.5](https://github.com/unraid/api/compare/v4.9.4...v4.9.5) (2025-07-10)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **connect:** rm eager restart on `ERROR_RETYING` connection status ([#1502](https://github.com/unraid/api/issues/1502)) ([dd759d9](https://github.com/unraid/api/commit/dd759d9f0f841b296f8083bc67c6cd3f7a69aa5b))
|
||||
|
||||
## [4.9.4](https://github.com/unraid/api/compare/v4.9.3...v4.9.4) (2025-07-09)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* backport `<unraid-modals>` upon plg install when necessary ([#1499](https://github.com/unraid/api/issues/1499)) ([33e0b1a](https://github.com/unraid/api/commit/33e0b1ab24bedb6a2c7b376ea73dbe65bc3044be))
|
||||
* DefaultPageLayout patch rollback omits legacy header logo ([#1497](https://github.com/unraid/api/issues/1497)) ([ea20d1e](https://github.com/unraid/api/commit/ea20d1e2116fcafa154090fee78b42ec5d9ba584))
|
||||
* event emitter setup for writing status ([#1496](https://github.com/unraid/api/issues/1496)) ([ca4e2db](https://github.com/unraid/api/commit/ca4e2db1f29126a1fa3784af563832edda64b0ca))
|
||||
|
||||
## [4.9.3](https://github.com/unraid/api/compare/v4.9.2...v4.9.3) (2025-07-09)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* duplicated header logo after api stops ([#1493](https://github.com/unraid/api/issues/1493)) ([4168f43](https://github.com/unraid/api/commit/4168f43e3ecd51479bec3aae585abbe6dcd3e416))
|
||||
|
||||
## [4.9.2](https://github.com/unraid/api/compare/v4.9.1...v4.9.2) (2025-07-09)
|
||||
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
###########################################################
|
||||
# Development/Build Image
|
||||
###########################################################
|
||||
FROM node:22.17.0-bookworm-slim AS development
|
||||
FROM node:22.18.0-bookworm-slim AS development
|
||||
|
||||
# Install build tools and dependencies
|
||||
RUN apt-get update -y && apt-get install -y \
|
||||
|
||||
@@ -27,19 +27,13 @@ const config: CodegenConfig = {
|
||||
},
|
||||
},
|
||||
generates: {
|
||||
// Generate Types for Mothership GraphQL Client
|
||||
'src/graphql/generated/client/': {
|
||||
documents: './src/graphql/mothership/*.ts',
|
||||
schema: {
|
||||
[process.env.MOTHERSHIP_GRAPHQL_LINK as string]: {
|
||||
headers: {
|
||||
origin: 'https://forums.unraid.net',
|
||||
},
|
||||
},
|
||||
},
|
||||
// Generate Types for CLI Internal GraphQL Queries
|
||||
'src/unraid-api/cli/generated/': {
|
||||
documents: ['src/unraid-api/cli/queries/**/*.ts', 'src/unraid-api/cli/mutations/**/*.ts'],
|
||||
schema: './generated-schema.graphql',
|
||||
preset: 'client',
|
||||
presetConfig: {
|
||||
gqlTagName: 'graphql',
|
||||
gqlTagName: 'gql',
|
||||
},
|
||||
config: {
|
||||
useTypeImports: true,
|
||||
@@ -47,21 +41,6 @@ const config: CodegenConfig = {
|
||||
},
|
||||
plugins: [{ add: { content: '/* eslint-disable */' } }],
|
||||
},
|
||||
'src/graphql/generated/client/validators.ts': {
|
||||
schema: {
|
||||
[process.env.MOTHERSHIP_GRAPHQL_LINK as string]: {
|
||||
headers: {
|
||||
origin: 'https://forums.unraid.net',
|
||||
},
|
||||
},
|
||||
},
|
||||
plugins: ['typescript-validation-schema', { add: { content: '/* eslint-disable */' } }],
|
||||
config: {
|
||||
importFrom: '@app/graphql/generated/client/graphql.js',
|
||||
strictScalars: false,
|
||||
schema: 'zod',
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
|
||||
34
api/dev/configs/README.md
Normal file
34
api/dev/configs/README.md
Normal file
@@ -0,0 +1,34 @@
|
||||
# Development Configuration Files
|
||||
|
||||
This directory contains configuration files for local development.
|
||||
|
||||
## OIDC Configuration
|
||||
|
||||
### oidc.json
|
||||
The default OIDC configuration file. This file is committed to git and should only contain non-sensitive test configurations.
|
||||
|
||||
### Using a Local Configuration (gitignored)
|
||||
For local testing with real OAuth providers:
|
||||
|
||||
1. Create an `oidc.local.json` file based on `oidc.json`
|
||||
2. Set the environment variable: `PATHS_OIDC_JSON=./dev/configs/oidc.local.json`
|
||||
3. The API will load your local configuration instead of the default
|
||||
|
||||
Example:
|
||||
```bash
|
||||
PATHS_OIDC_JSON=./dev/configs/oidc.local.json pnpm dev
|
||||
```
|
||||
|
||||
### Setting up OAuth Apps
|
||||
|
||||
#### Google
|
||||
1. Go to [Google Cloud Console](https://console.cloud.google.com/)
|
||||
2. Create a new project or select existing
|
||||
3. Enable Google+ API
|
||||
4. Create OAuth 2.0 credentials
|
||||
5. Add authorized redirect URI: `http://localhost:3000/graphql/api/auth/oidc/callback`
|
||||
|
||||
#### GitHub
|
||||
1. Go to GitHub Settings > Developer settings > OAuth Apps
|
||||
2. Create a new OAuth App
|
||||
3. Set Authorization callback URL: `http://localhost:3000/graphql/api/auth/oidc/callback`
|
||||
@@ -1,10 +1,9 @@
|
||||
{
|
||||
"version": "4.8.0",
|
||||
"extraOrigins": [
|
||||
"https://google.com",
|
||||
"https://test.com"
|
||||
],
|
||||
"version": "4.12.0",
|
||||
"extraOrigins": [],
|
||||
"sandbox": true,
|
||||
"ssoSubIds": [],
|
||||
"plugins": ["unraid-api-plugin-connect"]
|
||||
"plugins": [
|
||||
"unraid-api-plugin-connect"
|
||||
]
|
||||
}
|
||||
@@ -1,16 +1,12 @@
|
||||
{
|
||||
"wanaccess": false,
|
||||
"wanport": 0,
|
||||
"wanaccess": true,
|
||||
"wanport": 8443,
|
||||
"upnpEnabled": false,
|
||||
"apikey": "",
|
||||
"localApiKey": "",
|
||||
"email": "",
|
||||
"username": "",
|
||||
"avatar": "",
|
||||
"regWizTime": "",
|
||||
"accesstoken": "",
|
||||
"idtoken": "",
|
||||
"refreshtoken": "",
|
||||
"dynamicRemoteAccessType": "DISABLED",
|
||||
"ssoSubIds": []
|
||||
"localApiKey": "_______________________LOCAL_API_KEY_HERE_________________________",
|
||||
"email": "test@example.com",
|
||||
"username": "zspearmint",
|
||||
"avatar": "https://via.placeholder.com/200",
|
||||
"regWizTime": "1611175408732_0951-1653-3509-FBA155FA23C0",
|
||||
"dynamicRemoteAccessType": "STATIC"
|
||||
}
|
||||
21
api/dev/configs/oidc.json
Normal file
21
api/dev/configs/oidc.json
Normal file
@@ -0,0 +1,21 @@
|
||||
{
|
||||
"providers": [
|
||||
{
|
||||
"id": "unraid.net",
|
||||
"name": "Unraid.net",
|
||||
"clientId": "CONNECT_SERVER_SSO",
|
||||
"issuer": "https://account.unraid.net",
|
||||
"authorizationEndpoint": "https://account.unraid.net/sso/",
|
||||
"tokenEndpoint": "https://account.unraid.net/api/oauth2/token",
|
||||
"scopes": [
|
||||
"openid",
|
||||
"profile",
|
||||
"email"
|
||||
],
|
||||
"authorizedSubIds": [
|
||||
"297294e2-b31c-4bcc-a441-88aee0ad609f"
|
||||
],
|
||||
"buttonText": "Login With Unraid.net"
|
||||
}
|
||||
]
|
||||
}
|
||||
11
api/dev/keys/fc91da7b-0284-46f4-9018-55aa9759fba9.json
Normal file
11
api/dev/keys/fc91da7b-0284-46f4-9018-55aa9759fba9.json
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"createdAt": "2025-07-23T17:34:06.301Z",
|
||||
"description": "Internal admin API key used by CLI commands for system operations",
|
||||
"id": "fc91da7b-0284-46f4-9018-55aa9759fba9",
|
||||
"key": "_______SUPER_SECRET_KEY_______",
|
||||
"name": "CliInternal",
|
||||
"permissions": [],
|
||||
"roles": [
|
||||
"ADMIN"
|
||||
]
|
||||
}
|
||||
1
api/dev/log/.gitkeep
Normal file
1
api/dev/log/.gitkeep
Normal file
@@ -0,0 +1 @@
|
||||
# custom log directory for tests & development
|
||||
@@ -62,10 +62,17 @@ Switch between production and staging environments.
|
||||
### Developer Mode
|
||||
|
||||
```bash
|
||||
unraid-api developer
|
||||
unraid-api developer # Interactive prompt for tools
|
||||
unraid-api developer --sandbox true # Enable GraphQL sandbox
|
||||
unraid-api developer --sandbox false # Disable GraphQL sandbox
|
||||
unraid-api developer --enable-modal # Enable modal testing tool
|
||||
unraid-api developer --disable-modal # Disable modal testing tool
|
||||
```
|
||||
|
||||
Configure developer features for the API (e.g., GraphQL sandbox).
|
||||
Configure developer features for the API:
|
||||
|
||||
- **GraphQL Sandbox**: Enable/disable Apollo GraphQL sandbox at `/graphql`
|
||||
- **Modal Testing Tool**: Enable/disable UI modal testing in the Unraid menu
|
||||
|
||||
## API Key Management
|
||||
|
||||
|
||||
@@ -4,13 +4,19 @@ The Unraid API provides a GraphQL interface that allows you to interact with you
|
||||
|
||||
## Enabling the GraphQL Sandbox
|
||||
|
||||
1. First, enable developer mode using the CLI:
|
||||
1. Enable developer mode using the CLI:
|
||||
|
||||
```bash
|
||||
unraid-api developer --sandbox true
|
||||
```
|
||||
|
||||
Or use the interactive mode:
|
||||
|
||||
```bash
|
||||
unraid-api developer
|
||||
```
|
||||
|
||||
2. Follow the prompts to enable the sandbox. This will allow you to access the Apollo Sandbox interface.
|
||||
2. Once enabled, you can access the Apollo Sandbox interface
|
||||
|
||||
3. Access the GraphQL playground by navigating to:
|
||||
|
||||
|
||||
BIN
api/docs/public/images/advanced-rules.png
Normal file
BIN
api/docs/public/images/advanced-rules.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 101 KiB |
BIN
api/docs/public/images/button-customization.png
Normal file
BIN
api/docs/public/images/button-customization.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 96 KiB |
BIN
api/docs/public/images/configured-provider.png
Normal file
BIN
api/docs/public/images/configured-provider.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 85 KiB |
BIN
api/docs/public/images/default-unraid-provider.png
Normal file
BIN
api/docs/public/images/default-unraid-provider.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 128 KiB |
BIN
api/docs/public/images/sso-with-options.png
Normal file
BIN
api/docs/public/images/sso-with-options.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 75 KiB |
@@ -22,6 +22,7 @@ The API will be integrated directly into the Unraid operating system in an upcom
|
||||
|
||||
- [CLI Commands](./cli.md) - Reference for all available command-line interface commands
|
||||
- [Using the Unraid API](./how-to-use-the-api.md) - Comprehensive guide on using the GraphQL API
|
||||
- [OIDC Provider Setup](./oidc-provider-setup.md) - OIDC SSO provider configuration examples
|
||||
- [Upcoming Features](./upcoming-features.md) - Roadmap of planned features and improvements
|
||||
|
||||
## Key Features
|
||||
|
||||
402
api/docs/public/oidc-provider-setup.md
Normal file
402
api/docs/public/oidc-provider-setup.md
Normal file
@@ -0,0 +1,402 @@
|
||||
---
|
||||
title: OIDC Provider Setup
|
||||
description: Configure OIDC (OpenID Connect) providers for SSO authentication in Unraid API
|
||||
sidebar_position: 3
|
||||
---
|
||||
|
||||
# OIDC Provider Setup
|
||||
|
||||
This guide walks you through configuring OIDC (OpenID Connect) providers for SSO authentication in the Unraid API using the web interface.
|
||||
|
||||
## Accessing OIDC Settings
|
||||
|
||||
1. Navigate to your Unraid server's web interface
|
||||
2. The OIDC Providers section is available on the main configuration page
|
||||
3. You'll see tabs for different providers - click the **+** button to add a new provider
|
||||
|
||||
### OIDC Providers Interface Overview
|
||||
|
||||

|
||||
_Screenshot: Login page showing traditional login form with SSO options - "Login With Unraid.net" and "Sign in with Google" buttons_
|
||||
|
||||
The interface includes:
|
||||
|
||||
- **Provider tabs**: Each configured provider (Unraid.net, Google, etc.) appears as a tab
|
||||
- **Add Provider button**: Click the **+** button to add new providers
|
||||
- **Authorization Mode dropdown**: Toggle between "simple" and "advanced" modes
|
||||
- **Simple Authorization section**: Configure allowed email domains and specific addresses
|
||||
- **Add Item buttons**: Click to add multiple authorization rules
|
||||
|
||||
## Understanding Authorization Modes
|
||||
|
||||
The interface provides two authorization modes:
|
||||
|
||||
### Simple Mode (Recommended)
|
||||
|
||||
Simple mode is the easiest way to configure authorization. You can:
|
||||
|
||||
- Allow specific email domains (e.g., @company.com)
|
||||
- Allow specific email addresses
|
||||
- Configure who can access your Unraid server with minimal setup
|
||||
|
||||
**When to use Simple Mode:**
|
||||
|
||||
- You want to allow all users from your company domain
|
||||
- You have a small list of specific users
|
||||
- You're new to OIDC configuration
|
||||
|
||||
<details>
|
||||
<summary><strong>Advanced Mode</strong></summary>
|
||||
|
||||
Advanced mode provides granular control using claim-based rules. You can:
|
||||
|
||||
- Create complex authorization rules based on JWT claims
|
||||
- Use operators like equals, contains, endsWith, startsWith
|
||||
- Combine multiple conditions with OR/AND logic
|
||||
- Choose whether ANY rule must pass (OR mode) or ALL rules must pass (AND mode)
|
||||
|
||||
**When to use Advanced Mode:**
|
||||
|
||||
- You need to check group memberships
|
||||
- You want to verify multiple claims (e.g., email domain AND verified status)
|
||||
- You have complex authorization requirements
|
||||
- You need fine-grained control over how rules are evaluated
|
||||
|
||||
</details>
|
||||
|
||||
## Authorization Rules
|
||||
|
||||

|
||||
_Screenshot: Advanced authorization rules showing JWT claim configuration with email endsWith operator for domain-based access control_
|
||||
|
||||
### Simple Mode Examples
|
||||
|
||||
#### Allow Company Domain
|
||||
|
||||
In Simple Authorization:
|
||||
|
||||
- **Allowed Email Domains**: Enter `company.com`
|
||||
- This allows anyone with @company.com email
|
||||
|
||||
#### Allow Specific Users
|
||||
|
||||
- **Specific Email Addresses**: Add individual emails
|
||||
- Click **Add Item** to add multiple addresses
|
||||
|
||||
<details>
|
||||
<summary><strong>Advanced Mode Examples</strong></summary>
|
||||
|
||||
#### Authorization Rule Mode
|
||||
|
||||
When using multiple rules, you can choose how they're evaluated:
|
||||
|
||||
- **OR Mode** (default): User is authorized if ANY rule passes
|
||||
- **AND Mode**: User is authorized only if ALL rules pass
|
||||
|
||||
#### Email Domain with Verification (AND Mode)
|
||||
|
||||
To require both email domain AND verification:
|
||||
|
||||
1. Set **Authorization Rule Mode** to `AND`
|
||||
2. Add two rules:
|
||||
- Rule 1:
|
||||
- **Claim**: `email`
|
||||
- **Operator**: `endsWith`
|
||||
- **Value**: `@company.com`
|
||||
- Rule 2:
|
||||
- **Claim**: `email_verified`
|
||||
- **Operator**: `equals`
|
||||
- **Value**: `true`
|
||||
|
||||
This ensures users must have both a company email AND a verified email address.
|
||||
|
||||
#### Group-Based Access (OR Mode)
|
||||
|
||||
To allow access to multiple groups:
|
||||
|
||||
1. Set **Authorization Rule Mode** to `OR` (default)
|
||||
2. Add rules for each group:
|
||||
- **Claim**: `groups`
|
||||
- **Operator**: `contains`
|
||||
- **Value**: `admins`
|
||||
|
||||
Or add another rule:
|
||||
- **Claim**: `groups`
|
||||
- **Operator**: `contains`
|
||||
- **Value**: `developers`
|
||||
|
||||
Users in either `admins` OR `developers` group will be authorized.
|
||||
|
||||
#### Multiple Domains
|
||||
|
||||
- **Claim**: `email`
|
||||
- **Operator**: `endsWith`
|
||||
- **Values**: Add multiple domains (e.g., `company.com`, `subsidiary.com`)
|
||||
|
||||
#### Complex Authorization (AND Mode)
|
||||
|
||||
For strict security requiring multiple conditions:
|
||||
|
||||
1. Set **Authorization Rule Mode** to `AND`
|
||||
2. Add multiple rules that ALL must pass:
|
||||
- Email must be from company domain
|
||||
- Email must be verified
|
||||
- User must be in specific group
|
||||
- Account must have 2FA enabled (if claim available)
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Configuration Interface Details</strong></summary>
|
||||
|
||||
### Provider Tabs
|
||||
|
||||
- Each configured provider appears as a tab at the top
|
||||
- Click a tab to switch between provider configurations
|
||||
- The **+** button on the right adds a new provider
|
||||
|
||||
### Authorization Mode Dropdown
|
||||
|
||||
- **simple**: Best for email-based authorization (recommended for most users)
|
||||
- **advanced**: For complex claim-based rules using JWT claims
|
||||
|
||||
### Simple Authorization Fields
|
||||
|
||||
When "simple" mode is selected, you'll see:
|
||||
|
||||
- **Allowed Email Domains**: Enter domains without @ (e.g., `company.com`)
|
||||
- Helper text: "Users with emails ending in these domains can login"
|
||||
- **Specific Email Addresses**: Add individual email addresses
|
||||
- Helper text: "Only these exact email addresses can login"
|
||||
- **Add Item** buttons to add multiple entries
|
||||
|
||||
### Advanced Authorization Fields
|
||||
|
||||
When "advanced" mode is selected, you'll see:
|
||||
|
||||
- **Authorization Rule Mode**: Choose `OR` (any rule passes) or `AND` (all rules must pass)
|
||||
- **Authorization Rules**: Add multiple claim-based rules
|
||||
- **For each rule**:
|
||||
- **Claim**: The JWT claim to check
|
||||
- **Operator**: How to compare (equals, contains, endsWith, startsWith)
|
||||
- **Value**: What to match against
|
||||
|
||||
### Additional Interface Elements
|
||||
|
||||
- **Enable Developer Sandbox**: Toggle to enable GraphQL sandbox at `/graphql`
|
||||
- The interface uses a dark theme for better visibility
|
||||
- Field validation indicators help ensure correct configuration
|
||||
|
||||
</details>
|
||||
|
||||
### Required Redirect URI
|
||||
|
||||
All providers must be configured with this redirect URI:
|
||||
|
||||
```
|
||||
http://YOUR_UNRAID_IP:3001/graphql/api/auth/oidc/callback
|
||||
```
|
||||
|
||||
Replace `YOUR_UNRAID_IP` with your actual server IP address.
|
||||
|
||||
### Issuer URL Format
|
||||
|
||||
The **Issuer URL** field accepts both formats, but **base URL is strongly recommended** for security:
|
||||
|
||||
- **Base URL** (recommended): `https://accounts.google.com`
|
||||
- **Full discovery URL**: `https://accounts.google.com/.well-known/openid-configuration`
|
||||
|
||||
**⚠️ Security Note**: Always use the base URL format when possible. The system automatically appends `/.well-known/openid-configuration` for OIDC discovery. Using the full discovery URL directly disables important issuer validation checks and is not recommended by the OpenID Connect specification.
|
||||
|
||||
**Examples of correct base URLs:**
|
||||
- Google: `https://accounts.google.com`
|
||||
- Microsoft/Azure: `https://login.microsoftonline.com/YOUR_TENANT_ID/v2.0`
|
||||
- Keycloak: `https://keycloak.example.com/realms/YOUR_REALM`
|
||||
- Authelia: `https://auth.yourdomain.com`
|
||||
|
||||
## Testing Your Configuration
|
||||
|
||||

|
||||
_Screenshot: Unraid login page displaying both traditional username/password authentication and SSO options with customized provider buttons_
|
||||
|
||||
1. Save your provider configuration
|
||||
2. Log out (if logged in)
|
||||
3. Navigate to the login page
|
||||
4. Your configured provider button should appear
|
||||
5. Click to test the login flow
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
#### "Provider not found" error
|
||||
|
||||
- Ensure the Issuer URL is correct
|
||||
- Check that the provider supports OIDC discovery (/.well-known/openid-configuration)
|
||||
|
||||
#### "Authorization failed"
|
||||
|
||||
- In Simple Mode: Check email domains are entered correctly (without @)
|
||||
- In Advanced Mode:
|
||||
- Verify claim names match exactly what your provider sends
|
||||
- Check if Authorization Rule Mode is set correctly (OR vs AND)
|
||||
- Ensure all required claims are present in the token
|
||||
- Enable debug logging to see actual claims and rule evaluation
|
||||
|
||||
#### "Invalid redirect URI"
|
||||
|
||||
- Ensure the redirect URI in your provider matches exactly
|
||||
- Include the port number (:3001)
|
||||
- Use HTTP for local, HTTPS for production
|
||||
|
||||
#### Cannot see login button
|
||||
|
||||
- Check that at least one authorization rule is configured
|
||||
- Verify the provider is enabled/saved
|
||||
|
||||
### Debug Mode
|
||||
|
||||
To troubleshoot issues:
|
||||
|
||||
1. Enable debug logging:
|
||||
|
||||
```bash
|
||||
LOG_LEVEL=debug unraid-api start --debug
|
||||
```
|
||||
|
||||
2. Check logs for:
|
||||
|
||||
- Received claims from provider
|
||||
- Authorization rule evaluation
|
||||
- Token validation errors
|
||||
|
||||
## Security Best Practices
|
||||
|
||||
1. **Always use HTTPS in production** - OAuth requires secure connections
|
||||
2. **Use Simple Mode for authorization** - Prevents overly accepting configurations and reduces misconfiguration risks
|
||||
3. **Be specific with authorization** - Don't use overly broad rules
|
||||
4. **Rotate secrets regularly** - Update client secrets periodically
|
||||
5. **Test thoroughly** - Verify only intended users can access
|
||||
|
||||
## Need Help?
|
||||
|
||||
- Check provider's OIDC documentation
|
||||
- Review Unraid API logs for detailed error messages
|
||||
- Ensure your provider supports standard OIDC discovery
|
||||
- Verify network connectivity between Unraid and provider
|
||||
|
||||
## Provider-Specific Setup
|
||||
|
||||
### Unraid.net Provider
|
||||
|
||||
The Unraid.net provider is built-in and pre-configured. You only need to configure authorization rules in the interface.
|
||||
|
||||
**Configuration:**
|
||||
- **Issuer URL**: Pre-configured (built-in provider)
|
||||
- **Client ID/Secret**: Pre-configured (built-in provider)
|
||||
- **Redirect URI**: `http://YOUR_UNRAID_IP:3001/graphql/api/auth/oidc/callback`
|
||||
|
||||
:::warning[Security Notice]
|
||||
**Always use HTTPS for production redirect URIs!** The examples above use HTTP for initial setup and testing only. In production environments, you MUST use HTTPS (e.g., `https://YOUR_UNRAID_IP:3001/graphql/api/auth/oidc/callback`) to ensure secure communication and prevent credential interception. Most OIDC providers will reject HTTP redirect URIs for security reasons.
|
||||
:::
|
||||
|
||||
Configure authorization rules using Simple Mode (allowed email domains/addresses) or Advanced Mode for complex requirements.
|
||||
|
||||
### Google
|
||||
|
||||
Set up OAuth 2.0 credentials in [Google Cloud Console](https://console.cloud.google.com/):
|
||||
|
||||
1. Go to **APIs & Services** → **Credentials**
|
||||
2. Click **Create Credentials** → **OAuth client ID**
|
||||
3. Choose **Web application** as the application type
|
||||
4. Add your redirect URI to **Authorized redirect URIs**
|
||||
5. Configure the OAuth consent screen if prompted
|
||||
|
||||
**Configuration:**
|
||||
|
||||
- **Issuer URL**: `https://accounts.google.com`
|
||||
- **Client ID/Secret**: From your OAuth 2.0 client credentials
|
||||
- **Required Scopes**: `openid`, `profile`, `email`
|
||||
- **Redirect URI**: `http://YOUR_UNRAID_IP:3001/graphql/api/auth/oidc/callback`
|
||||
|
||||
:::warning[Google Domain Requirements]
|
||||
**Google requires valid domain names for OAuth redirect URIs.** Local IP addresses and `.local` domains are not accepted. To use Google OAuth with your Unraid server, you'll need:
|
||||
|
||||
- **Option 1: Reverse Proxy** - Set up a reverse proxy (like NGINX Proxy Manager or Traefik) with a valid domain name pointing to your Unraid API
|
||||
- **Option 2: Tailscale** - Use Tailscale to get a valid `*.ts.net` domain that Google will accept
|
||||
- **Option 3: Dynamic DNS** - Use a DDNS service to get a public domain name for your server
|
||||
|
||||
Remember to update your redirect URI in both Google Cloud Console and your Unraid OIDC configuration to use the valid domain.
|
||||
:::
|
||||
|
||||
For Google Workspace domains, use Advanced Mode with the `hd` claim to restrict access to your organization's domain.
|
||||
|
||||
### Authelia
|
||||
|
||||
Configure OIDC client in your Authelia `configuration.yml` with client ID `unraid-api` and generate a hashed secret using the Authelia hash-password command.
|
||||
|
||||
**Configuration:**
|
||||
|
||||
- **Issuer URL**: `https://auth.yourdomain.com`
|
||||
- **Client ID**: `unraid-api` (or as configured in Authelia)
|
||||
- **Client Secret**: Your unhashed secret
|
||||
- **Required Scopes**: `openid`, `profile`, `email`, `groups`
|
||||
- **Redirect URI**: `http://YOUR_UNRAID_IP:3001/graphql/api/auth/oidc/callback`
|
||||
|
||||
Use Advanced Mode with `groups` claim for group-based authorization.
|
||||
|
||||
### Microsoft/Azure AD
|
||||
|
||||
Register a new app in [Azure Portal](https://portal.azure.com/) under Azure Active Directory → App registrations. Note the Application ID, create a client secret, and note your tenant ID.
|
||||
|
||||
**Configuration:**
|
||||
|
||||
- **Issuer URL**: `https://login.microsoftonline.com/YOUR_TENANT_ID/v2.0`
|
||||
- **Client ID**: Your Application (client) ID
|
||||
- **Client Secret**: Generated client secret
|
||||
- **Required Scopes**: `openid`, `profile`, `email`
|
||||
- **Redirect URI**: `http://YOUR_UNRAID_IP:3001/graphql/api/auth/oidc/callback`
|
||||
|
||||
Authorization rules can be configured in the interface using email domains or advanced claims.
|
||||
|
||||
### Keycloak
|
||||
|
||||
Create a new confidential client in Keycloak Admin Console with `openid-connect` protocol and copy the client secret from the Credentials tab.
|
||||
|
||||
**Configuration:**
|
||||
|
||||
- **Issuer URL**: `https://keycloak.example.com/realms/YOUR_REALM`
|
||||
- **Client ID**: `unraid-api` (or as configured in Keycloak)
|
||||
- **Client Secret**: From Keycloak Credentials tab
|
||||
- **Required Scopes**: `openid`, `profile`, `email`
|
||||
- **Redirect URI**: `http://YOUR_UNRAID_IP:3001/graphql/api/auth/oidc/callback`
|
||||
|
||||
For role-based authorization, use Advanced Mode with `realm_access.roles` or `resource_access` claims.
|
||||
|
||||
### Authentik
|
||||
|
||||
Create a new OAuth2/OpenID Provider in Authentik, then create an Application and link it to the provider.
|
||||
|
||||
**Configuration:**
|
||||
|
||||
- **Issuer URL**: `https://authentik.example.com/application/o/unraid-api/`
|
||||
- **Client ID**: From Authentik provider configuration
|
||||
- **Client Secret**: From Authentik provider configuration
|
||||
- **Required Scopes**: `openid`, `profile`, `email`
|
||||
- **Redirect URI**: `http://YOUR_UNRAID_IP:3001/graphql/api/auth/oidc/callback`
|
||||
|
||||
Authorization rules can be configured in the interface.
|
||||
|
||||
### Okta
|
||||
|
||||
Create a new OIDC Web Application in Okta Admin Console and assign appropriate users or groups.
|
||||
|
||||
**Configuration:**
|
||||
|
||||
- **Issuer URL**: `https://YOUR_DOMAIN.okta.com`
|
||||
- **Client ID**: From Okta application configuration
|
||||
- **Client Secret**: From Okta application configuration
|
||||
- **Required Scopes**: `openid`, `profile`, `email`
|
||||
- **Redirect URI**: `http://YOUR_UNRAID_IP:3001/graphql/api/auth/oidc/callback`
|
||||
|
||||
Authorization rules can be configured in the interface using email domains or advanced claims.
|
||||
@@ -226,27 +226,6 @@ type Share implements Node {
|
||||
luksStatus: String
|
||||
}
|
||||
|
||||
type AccessUrl {
|
||||
type: URL_TYPE!
|
||||
name: String
|
||||
ipv4: URL
|
||||
ipv6: URL
|
||||
}
|
||||
|
||||
enum URL_TYPE {
|
||||
LAN
|
||||
WIREGUARD
|
||||
WAN
|
||||
MDNS
|
||||
OTHER
|
||||
DEFAULT
|
||||
}
|
||||
|
||||
"""
|
||||
A field whose value conforms to the standard URL format as specified in RFC3986: https://www.ietf.org/rfc/rfc3986.txt.
|
||||
"""
|
||||
scalar URL
|
||||
|
||||
type DiskPartition {
|
||||
"""The name of the partition"""
|
||||
name: String!
|
||||
@@ -1246,6 +1225,42 @@ type Docker implements Node {
|
||||
id: PrefixedID!
|
||||
containers(skipCache: Boolean! = false): [DockerContainer!]!
|
||||
networks(skipCache: Boolean! = false): [DockerNetwork!]!
|
||||
organizer: ResolvedOrganizerV1!
|
||||
}
|
||||
|
||||
type ResolvedOrganizerView {
|
||||
id: String!
|
||||
name: String!
|
||||
root: ResolvedOrganizerEntry!
|
||||
prefs: JSON
|
||||
}
|
||||
|
||||
union ResolvedOrganizerEntry = ResolvedOrganizerFolder | OrganizerContainerResource | OrganizerResource
|
||||
|
||||
type ResolvedOrganizerFolder {
|
||||
id: String!
|
||||
type: String!
|
||||
name: String!
|
||||
children: [ResolvedOrganizerEntry!]!
|
||||
}
|
||||
|
||||
type OrganizerContainerResource {
|
||||
id: String!
|
||||
type: String!
|
||||
name: String!
|
||||
meta: DockerContainer
|
||||
}
|
||||
|
||||
type OrganizerResource {
|
||||
id: String!
|
||||
type: String!
|
||||
name: String!
|
||||
meta: JSON
|
||||
}
|
||||
|
||||
type ResolvedOrganizerV1 {
|
||||
version: Float!
|
||||
views: [ResolvedOrganizerView!]!
|
||||
}
|
||||
|
||||
type FlashBackupStatus {
|
||||
@@ -1385,6 +1400,13 @@ type ApiConfig {
|
||||
plugins: [String!]!
|
||||
}
|
||||
|
||||
type SsoSettings implements Node {
|
||||
id: PrefixedID!
|
||||
|
||||
"""List of configured OIDC providers"""
|
||||
oidcProviders: [OidcProvider!]!
|
||||
}
|
||||
|
||||
type UnifiedSettings implements Node {
|
||||
id: PrefixedID!
|
||||
|
||||
@@ -1404,6 +1426,9 @@ type UpdateSettingsResponse {
|
||||
|
||||
"""The updated settings values"""
|
||||
values: JSON!
|
||||
|
||||
"""Warning messages about configuration issues found during validation"""
|
||||
warnings: [String!]
|
||||
}
|
||||
|
||||
type Settings implements Node {
|
||||
@@ -1412,10 +1437,245 @@ type Settings implements Node {
|
||||
"""A view of all settings"""
|
||||
unified: UnifiedSettings!
|
||||
|
||||
"""SSO settings"""
|
||||
sso: SsoSettings!
|
||||
|
||||
"""The API setting values"""
|
||||
api: ApiConfig!
|
||||
}
|
||||
|
||||
type OidcAuthorizationRule {
|
||||
"""The claim to check (e.g., email, sub, groups, hd)"""
|
||||
claim: String!
|
||||
|
||||
"""The comparison operator"""
|
||||
operator: AuthorizationOperator!
|
||||
|
||||
"""The value(s) to match against"""
|
||||
value: [String!]!
|
||||
}
|
||||
|
||||
"""Operators for authorization rule matching"""
|
||||
enum AuthorizationOperator {
|
||||
EQUALS
|
||||
CONTAINS
|
||||
ENDS_WITH
|
||||
STARTS_WITH
|
||||
}
|
||||
|
||||
type OidcProvider {
|
||||
"""The unique identifier for the OIDC provider"""
|
||||
id: PrefixedID!
|
||||
|
||||
"""Display name of the OIDC provider"""
|
||||
name: String!
|
||||
|
||||
"""OAuth2 client ID registered with the provider"""
|
||||
clientId: String!
|
||||
|
||||
"""OAuth2 client secret (if required by provider)"""
|
||||
clientSecret: String
|
||||
|
||||
"""
|
||||
OIDC issuer URL (e.g., https://accounts.google.com). Required for auto-discovery via /.well-known/openid-configuration
|
||||
"""
|
||||
issuer: String!
|
||||
|
||||
"""
|
||||
OAuth2 authorization endpoint URL. If omitted, will be auto-discovered from issuer/.well-known/openid-configuration
|
||||
"""
|
||||
authorizationEndpoint: String
|
||||
|
||||
"""
|
||||
OAuth2 token endpoint URL. If omitted, will be auto-discovered from issuer/.well-known/openid-configuration
|
||||
"""
|
||||
tokenEndpoint: String
|
||||
|
||||
"""
|
||||
JSON Web Key Set URI for token validation. If omitted, will be auto-discovered from issuer/.well-known/openid-configuration
|
||||
"""
|
||||
jwksUri: String
|
||||
|
||||
"""OAuth2 scopes to request (e.g., openid, profile, email)"""
|
||||
scopes: [String!]!
|
||||
|
||||
"""Flexible authorization rules based on claims"""
|
||||
authorizationRules: [OidcAuthorizationRule!]
|
||||
|
||||
"""
|
||||
Mode for evaluating authorization rules - OR (any rule passes) or AND (all rules must pass). Defaults to OR.
|
||||
"""
|
||||
authorizationRuleMode: AuthorizationRuleMode
|
||||
|
||||
"""Custom text for the login button"""
|
||||
buttonText: String
|
||||
|
||||
"""URL or base64 encoded icon for the login button"""
|
||||
buttonIcon: String
|
||||
|
||||
"""
|
||||
Button variant style from Reka UI. See https://reka-ui.com/docs/components/button
|
||||
"""
|
||||
buttonVariant: String
|
||||
|
||||
"""
|
||||
Custom CSS styles for the button (e.g., "background: linear-gradient(to right, #4f46e5, #7c3aed); border-radius: 9999px;")
|
||||
"""
|
||||
buttonStyle: String
|
||||
}
|
||||
|
||||
"""
|
||||
Mode for evaluating authorization rules - OR (any rule passes) or AND (all rules must pass)
|
||||
"""
|
||||
enum AuthorizationRuleMode {
|
||||
OR
|
||||
AND
|
||||
}
|
||||
|
||||
type OidcSessionValidation {
|
||||
valid: Boolean!
|
||||
username: String
|
||||
}
|
||||
|
||||
type PublicOidcProvider {
|
||||
id: ID!
|
||||
name: String!
|
||||
buttonText: String
|
||||
buttonIcon: String
|
||||
buttonVariant: String
|
||||
buttonStyle: String
|
||||
}
|
||||
|
||||
type UPSBattery {
|
||||
"""
|
||||
Battery charge level as a percentage (0-100). Unit: percent (%). Example: 100 means battery is fully charged
|
||||
"""
|
||||
chargeLevel: Int!
|
||||
|
||||
"""
|
||||
Estimated runtime remaining on battery power. Unit: seconds. Example: 3600 means 1 hour of runtime remaining
|
||||
"""
|
||||
estimatedRuntime: Int!
|
||||
|
||||
"""
|
||||
Battery health status. Possible values: 'Good', 'Replace', 'Unknown'. Indicates if the battery needs replacement
|
||||
"""
|
||||
health: String!
|
||||
}
|
||||
|
||||
type UPSPower {
|
||||
"""
|
||||
Input voltage from the wall outlet/mains power. Unit: volts (V). Example: 120.5 for typical US household voltage
|
||||
"""
|
||||
inputVoltage: Float!
|
||||
|
||||
"""
|
||||
Output voltage being delivered to connected devices. Unit: volts (V). Example: 120.5 - should match input voltage when on mains power
|
||||
"""
|
||||
outputVoltage: Float!
|
||||
|
||||
"""
|
||||
Current load on the UPS as a percentage of its capacity. Unit: percent (%). Example: 25 means UPS is loaded at 25% of its maximum capacity
|
||||
"""
|
||||
loadPercentage: Int!
|
||||
}
|
||||
|
||||
type UPSDevice {
|
||||
"""
|
||||
Unique identifier for the UPS device. Usually based on the model name or a generated ID
|
||||
"""
|
||||
id: ID!
|
||||
|
||||
"""Display name for the UPS device. Can be customized by the user"""
|
||||
name: String!
|
||||
|
||||
"""UPS model name/number. Example: 'APC Back-UPS Pro 1500'"""
|
||||
model: String!
|
||||
|
||||
"""
|
||||
Current operational status of the UPS. Common values: 'Online', 'On Battery', 'Low Battery', 'Replace Battery', 'Overload', 'Offline'. 'Online' means running on mains power, 'On Battery' means running on battery backup
|
||||
"""
|
||||
status: String!
|
||||
|
||||
"""Battery-related information"""
|
||||
battery: UPSBattery!
|
||||
|
||||
"""Power-related information"""
|
||||
power: UPSPower!
|
||||
}
|
||||
|
||||
type UPSConfiguration {
|
||||
"""
|
||||
UPS service state. Values: 'enable' or 'disable'. Controls whether the UPS monitoring service is running
|
||||
"""
|
||||
service: String
|
||||
|
||||
"""
|
||||
Type of cable connecting the UPS to the server. Common values: 'usb', 'smart', 'ether', 'custom'. Determines communication protocol
|
||||
"""
|
||||
upsCable: String
|
||||
|
||||
"""
|
||||
Custom cable configuration string. Only used when upsCable is set to 'custom'. Format depends on specific UPS model
|
||||
"""
|
||||
customUpsCable: String
|
||||
|
||||
"""
|
||||
UPS communication type. Common values: 'usb', 'net', 'snmp', 'dumb', 'pcnet', 'modbus'. Defines how the server communicates with the UPS
|
||||
"""
|
||||
upsType: String
|
||||
|
||||
"""
|
||||
Device path or network address for UPS connection. Examples: '/dev/ttyUSB0' for USB, '192.168.1.100:3551' for network. Depends on upsType setting
|
||||
"""
|
||||
device: String
|
||||
|
||||
"""
|
||||
Override UPS capacity for runtime calculations. Unit: volt-amperes (VA). Example: 1500 for a 1500VA UPS. Leave unset to use UPS-reported capacity
|
||||
"""
|
||||
overrideUpsCapacity: Int
|
||||
|
||||
"""
|
||||
Battery level threshold for shutdown. Unit: percent (%). Example: 10 means shutdown when battery reaches 10%. System will shutdown when battery drops to this level
|
||||
"""
|
||||
batteryLevel: Int
|
||||
|
||||
"""
|
||||
Runtime threshold for shutdown. Unit: minutes. Example: 5 means shutdown when 5 minutes runtime remaining. System will shutdown when estimated runtime drops below this
|
||||
"""
|
||||
minutes: Int
|
||||
|
||||
"""
|
||||
Timeout for UPS communications. Unit: seconds. Example: 0 means no timeout. Time to wait for UPS response before considering it offline
|
||||
"""
|
||||
timeout: Int
|
||||
|
||||
"""
|
||||
Kill UPS power after shutdown. Values: 'yes' or 'no'. If 'yes', tells UPS to cut power after system shutdown. Useful for ensuring complete power cycle
|
||||
"""
|
||||
killUps: String
|
||||
|
||||
"""
|
||||
Network Information Server (NIS) IP address. Default: '0.0.0.0' (listen on all interfaces). IP address for apcupsd network information server
|
||||
"""
|
||||
nisIp: String
|
||||
|
||||
"""
|
||||
Network server mode. Values: 'on' or 'off'. Enable to allow network clients to monitor this UPS
|
||||
"""
|
||||
netServer: String
|
||||
|
||||
"""
|
||||
UPS name for network monitoring. Used to identify this UPS on the network. Example: 'SERVER_UPS'
|
||||
"""
|
||||
upsName: String
|
||||
|
||||
"""
|
||||
Override UPS model name. Used for display purposes. Leave unset to use UPS-reported model
|
||||
"""
|
||||
modelName: String
|
||||
}
|
||||
|
||||
type VmDomain implements Node {
|
||||
"""The unique identifier for the vm (uuid)"""
|
||||
id: PrefixedID!
|
||||
@@ -1490,6 +1750,27 @@ type Plugin {
|
||||
hasCliModule: Boolean
|
||||
}
|
||||
|
||||
type AccessUrl {
|
||||
type: URL_TYPE!
|
||||
name: String
|
||||
ipv4: URL
|
||||
ipv6: URL
|
||||
}
|
||||
|
||||
enum URL_TYPE {
|
||||
LAN
|
||||
WIREGUARD
|
||||
WAN
|
||||
MDNS
|
||||
OTHER
|
||||
DEFAULT
|
||||
}
|
||||
|
||||
"""
|
||||
A field whose value conforms to the standard URL format as specified in RFC3986: https://www.ietf.org/rfc/rfc3986.txt.
|
||||
"""
|
||||
scalar URL
|
||||
|
||||
type AccessUrlObject {
|
||||
ipv4: String
|
||||
ipv6: String
|
||||
@@ -1653,6 +1934,7 @@ type Query {
|
||||
services: [Service!]!
|
||||
shares: [Share!]!
|
||||
vars: Vars!
|
||||
isInitialSetup: Boolean!
|
||||
|
||||
"""Get information about all VMs on the system"""
|
||||
vms: Vms!
|
||||
@@ -1668,6 +1950,21 @@ type Query {
|
||||
settings: Settings!
|
||||
isSSOEnabled: Boolean!
|
||||
|
||||
"""Get public OIDC provider information for login buttons"""
|
||||
publicOidcProviders: [PublicOidcProvider!]!
|
||||
|
||||
"""Get all configured OIDC providers (admin only)"""
|
||||
oidcProviders: [OidcProvider!]!
|
||||
|
||||
"""Get a specific OIDC provider by ID"""
|
||||
oidcProvider(id: PrefixedID!): OidcProvider
|
||||
|
||||
"""Validate an OIDC session token (internal use for CLI validation)"""
|
||||
validateOidcSession(token: String!): OidcSessionValidation!
|
||||
upsDevices: [UPSDevice!]!
|
||||
upsDeviceById(id: String!): UPSDevice
|
||||
upsConfiguration: UPSConfiguration!
|
||||
|
||||
"""List all installed plugins with their metadata"""
|
||||
plugins: [Plugin!]!
|
||||
remoteAccess: RemoteAccess!
|
||||
@@ -1702,10 +1999,15 @@ type Mutation {
|
||||
parityCheck: ParityCheckMutations!
|
||||
apiKey: ApiKeyMutations!
|
||||
rclone: RCloneMutations!
|
||||
createDockerFolder(name: String!, parentId: String, childrenIds: [String!]): ResolvedOrganizerV1!
|
||||
setDockerFolderChildren(folderId: String, childrenIds: [String!]!): ResolvedOrganizerV1!
|
||||
deleteDockerEntries(entryIds: [String!]!): ResolvedOrganizerV1!
|
||||
moveDockerEntriesToFolder(sourceEntryIds: [String!]!, destinationFolderId: String!): ResolvedOrganizerV1!
|
||||
|
||||
"""Initiates a flash drive backup using a configured remote."""
|
||||
initiateFlashBackup(input: InitiateFlashBackupInput!): FlashBackupStatus!
|
||||
updateSettings(input: JSON!): UpdateSettingsResponse!
|
||||
configureUps(config: UPSConfigInput!): Boolean!
|
||||
|
||||
"""
|
||||
Add one or more plugins to the API. Returns false if restart was triggered automatically, true if manual restart is required.
|
||||
@@ -1747,6 +2049,82 @@ input InitiateFlashBackupInput {
|
||||
options: JSON
|
||||
}
|
||||
|
||||
input UPSConfigInput {
|
||||
"""Enable or disable the UPS monitoring service"""
|
||||
service: UPSServiceState
|
||||
|
||||
"""Type of cable connecting the UPS to the server"""
|
||||
upsCable: UPSCableType
|
||||
|
||||
"""
|
||||
Custom cable configuration (only used when upsCable is CUSTOM). Format depends on specific UPS model
|
||||
"""
|
||||
customUpsCable: String
|
||||
|
||||
"""UPS communication protocol"""
|
||||
upsType: UPSType
|
||||
|
||||
"""
|
||||
Device path or network address for UPS connection. Examples: '/dev/ttyUSB0' for USB, '192.168.1.100:3551' for network
|
||||
"""
|
||||
device: String
|
||||
|
||||
"""
|
||||
Override UPS capacity for runtime calculations. Unit: watts (W). Leave unset to use UPS-reported capacity
|
||||
"""
|
||||
overrideUpsCapacity: Int
|
||||
|
||||
"""
|
||||
Battery level percentage to initiate shutdown. Unit: percent (%) - Valid range: 0-100
|
||||
"""
|
||||
batteryLevel: Int
|
||||
|
||||
"""Runtime left in minutes to initiate shutdown. Unit: minutes"""
|
||||
minutes: Int
|
||||
|
||||
"""
|
||||
Time on battery before shutdown. Unit: seconds. Set to 0 to disable timeout-based shutdown
|
||||
"""
|
||||
timeout: Int
|
||||
|
||||
"""
|
||||
Turn off UPS power after system shutdown. Useful for ensuring complete power cycle
|
||||
"""
|
||||
killUps: UPSKillPower
|
||||
}
|
||||
|
||||
"""Service state for UPS daemon"""
|
||||
enum UPSServiceState {
|
||||
ENABLE
|
||||
DISABLE
|
||||
}
|
||||
|
||||
"""UPS cable connection types"""
|
||||
enum UPSCableType {
|
||||
USB
|
||||
SIMPLE
|
||||
SMART
|
||||
ETHER
|
||||
CUSTOM
|
||||
}
|
||||
|
||||
"""UPS communication protocols"""
|
||||
enum UPSType {
|
||||
USB
|
||||
APCSMART
|
||||
NET
|
||||
SNMP
|
||||
DUMB
|
||||
PCNET
|
||||
MODBUS
|
||||
}
|
||||
|
||||
"""Kill UPS power after shutdown option"""
|
||||
enum UPSKillPower {
|
||||
YES
|
||||
NO
|
||||
}
|
||||
|
||||
input PluginManagementInput {
|
||||
"""Array of plugin package names to add or remove"""
|
||||
names: [String!]!
|
||||
@@ -1829,10 +2207,10 @@ type Subscription {
|
||||
notificationAdded: Notification!
|
||||
notificationsOverview: NotificationOverview!
|
||||
ownerSubscription: Owner!
|
||||
registrationSubscription: Registration!
|
||||
serversSubscription: Server!
|
||||
parityHistorySubscription: ParityCheck!
|
||||
arraySubscription: UnraidArray!
|
||||
upsUpdates: UPSDevice!
|
||||
}
|
||||
|
||||
"""Available authentication action verbs"""
|
||||
|
||||
103
api/package.json
103
api/package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@unraid/api",
|
||||
"version": "4.9.2",
|
||||
"version": "4.13.1",
|
||||
"main": "src/cli/index.ts",
|
||||
"type": "module",
|
||||
"corepack": {
|
||||
@@ -10,14 +10,14 @@
|
||||
"author": "Lime Technology, Inc. <unraid.net>",
|
||||
"license": "GPL-2.0-or-later",
|
||||
"engines": {
|
||||
"pnpm": "10.12.4"
|
||||
"pnpm": "10.14.0"
|
||||
},
|
||||
"scripts": {
|
||||
"// Development": "",
|
||||
"start": "node dist/main.js",
|
||||
"dev": "vite",
|
||||
"dev": "clear && vite",
|
||||
"dev:debug": "NODE_OPTIONS='--inspect-brk=9229 --enable-source-maps' vite",
|
||||
"command": "pnpm run build && clear && ./dist/cli.js",
|
||||
"command": "COMMAND_TESTER=true pnpm run build > /dev/null 2>&1 && NODE_ENV=development ./dist/cli.js",
|
||||
"command:raw": "./dist/cli.js",
|
||||
"// Build and Deploy": "",
|
||||
"build": "vite build --mode=production",
|
||||
@@ -28,9 +28,8 @@
|
||||
"preunraid:deploy": "pnpm build",
|
||||
"unraid:deploy": "./scripts/deploy-dev.sh",
|
||||
"// GraphQL Codegen": "",
|
||||
"codegen": "MOTHERSHIP_GRAPHQL_LINK='https://staging.mothership.unraid.net/ws' graphql-codegen --config codegen.ts -r dotenv/config './.env.staging'",
|
||||
"codegen:watch": "DOTENV_CONFIG_PATH='./.env.staging' graphql-codegen --config codegen.ts --watch -r dotenv/config",
|
||||
"codegen:local": "NODE_TLS_REJECT_UNAUTHORIZED=0 MOTHERSHIP_GRAPHQL_LINK='https://mothership.localhost/ws' graphql-codegen --config codegen.ts --watch",
|
||||
"codegen": "graphql-codegen --config codegen.ts",
|
||||
"codegen:watch": "graphql-codegen --config codegen.ts --watch",
|
||||
"// Code Quality": "",
|
||||
"lint": "eslint --config .eslintrc.ts src/",
|
||||
"lint:fix": "eslint --fix --config .eslintrc.ts src/",
|
||||
@@ -52,26 +51,26 @@
|
||||
"unraid-api": "dist/cli.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"@apollo/client": "3.13.8",
|
||||
"@apollo/client": "3.13.9",
|
||||
"@apollo/server": "4.12.2",
|
||||
"@as-integrations/fastify": "2.1.1",
|
||||
"@fastify/cookie": "11.0.2",
|
||||
"@fastify/helmet": "13.0.1",
|
||||
"@graphql-codegen/client-preset": "4.8.2",
|
||||
"@graphql-codegen/client-preset": "4.8.3",
|
||||
"@graphql-tools/load-files": "7.0.1",
|
||||
"@graphql-tools/merge": "9.0.24",
|
||||
"@graphql-tools/schema": "10.0.23",
|
||||
"@graphql-tools/utils": "10.8.6",
|
||||
"@graphql-tools/merge": "9.1.1",
|
||||
"@graphql-tools/schema": "10.0.25",
|
||||
"@graphql-tools/utils": "10.9.1",
|
||||
"@jsonforms/core": "3.6.0",
|
||||
"@nestjs/apollo": "13.1.0",
|
||||
"@nestjs/cache-manager": "3.0.1",
|
||||
"@nestjs/common": "11.1.3",
|
||||
"@nestjs/common": "11.1.6",
|
||||
"@nestjs/config": "4.0.2",
|
||||
"@nestjs/core": "11.1.3",
|
||||
"@nestjs/core": "11.1.6",
|
||||
"@nestjs/event-emitter": "3.0.1",
|
||||
"@nestjs/graphql": "13.1.0",
|
||||
"@nestjs/passport": "11.0.5",
|
||||
"@nestjs/platform-fastify": "11.1.3",
|
||||
"@nestjs/platform-fastify": "11.1.6",
|
||||
"@nestjs/schedule": "6.0.0",
|
||||
"@nestjs/throttler": "6.4.0",
|
||||
"@reduxjs/toolkit": "2.8.2",
|
||||
@@ -80,9 +79,10 @@
|
||||
"@unraid/libvirt": "2.1.0",
|
||||
"@unraid/shared": "workspace:*",
|
||||
"accesscontrol": "2.2.1",
|
||||
"atomically": "2.0.3",
|
||||
"bycontract": "2.0.11",
|
||||
"bytes": "3.1.2",
|
||||
"cache-manager": "7.0.0",
|
||||
"cache-manager": "7.1.1",
|
||||
"cacheable-lookup": "7.0.0",
|
||||
"camelcase-keys": "9.1.3",
|
||||
"casbin": "5.38.0",
|
||||
@@ -94,16 +94,16 @@
|
||||
"command-exists": "1.2.9",
|
||||
"convert": "5.12.0",
|
||||
"cookie": "1.0.2",
|
||||
"cron": "4.3.1",
|
||||
"cron": "4.3.3",
|
||||
"cross-fetch": "4.1.0",
|
||||
"diff": "8.0.2",
|
||||
"dockerode": "4.0.7",
|
||||
"dotenv": "17.1.0",
|
||||
"dotenv": "17.2.1",
|
||||
"execa": "9.6.0",
|
||||
"exit-hook": "4.0.0",
|
||||
"fastify": "5.4.0",
|
||||
"fastify": "5.5.0",
|
||||
"filenamify": "6.0.0",
|
||||
"fs-extra": "11.3.0",
|
||||
"fs-extra": "11.3.1",
|
||||
"glob": "11.0.3",
|
||||
"global-agent": "3.0.0",
|
||||
"got": "14.4.7",
|
||||
@@ -112,37 +112,38 @@
|
||||
"graphql-scalars": "1.24.2",
|
||||
"graphql-subscriptions": "3.0.0",
|
||||
"graphql-tag": "2.12.6",
|
||||
"graphql-ws": "6.0.5",
|
||||
"graphql-ws": "6.0.6",
|
||||
"ini": "5.0.0",
|
||||
"ip": "2.0.1",
|
||||
"jose": "6.0.11",
|
||||
"jose": "6.0.12",
|
||||
"json-bigint-patch": "0.0.8",
|
||||
"lodash-es": "4.17.21",
|
||||
"multi-ini": "2.3.2",
|
||||
"mustache": "4.2.0",
|
||||
"nest-authz": "2.17.0",
|
||||
"nest-commander": "3.17.0",
|
||||
"nest-commander": "3.18.0",
|
||||
"nestjs-pino": "4.4.0",
|
||||
"node-cache": "5.1.2",
|
||||
"node-window-polyfill": "1.0.4",
|
||||
"openid-client": "6.6.2",
|
||||
"p-retry": "6.2.1",
|
||||
"passport-custom": "1.1.1",
|
||||
"passport-http-header-strategy": "1.1.0",
|
||||
"path-type": "6.0.0",
|
||||
"pino": "9.7.0",
|
||||
"pino": "9.8.0",
|
||||
"pino-http": "10.5.0",
|
||||
"pino-pretty": "13.0.0",
|
||||
"pino-pretty": "13.1.1",
|
||||
"pm2": "6.0.8",
|
||||
"reflect-metadata": "^0.1.14",
|
||||
"request": "2.88.2",
|
||||
"rxjs": "7.8.2",
|
||||
"semver": "7.7.2",
|
||||
"strftime": "0.10.3",
|
||||
"systeminformation": "5.27.6",
|
||||
"systeminformation": "5.27.7",
|
||||
"undici": "7.13.0",
|
||||
"uuid": "11.1.0",
|
||||
"ws": "8.18.2",
|
||||
"ws": "8.18.3",
|
||||
"zen-observable-ts": "1.1.0",
|
||||
"zod": "3.25.67"
|
||||
"zod": "3.25.76"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"unraid-api-plugin-connect": "workspace:*"
|
||||
@@ -153,71 +154,69 @@
|
||||
}
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/js": "9.29.0",
|
||||
"@eslint/js": "9.33.0",
|
||||
"@graphql-codegen/add": "5.0.3",
|
||||
"@graphql-codegen/cli": "5.0.7",
|
||||
"@graphql-codegen/fragment-matcher": "5.1.0",
|
||||
"@graphql-codegen/import-types-preset": "3.0.1",
|
||||
"@graphql-codegen/typed-document-node": "5.1.1",
|
||||
"@graphql-codegen/typed-document-node": "5.1.2",
|
||||
"@graphql-codegen/typescript": "4.1.6",
|
||||
"@graphql-codegen/typescript-operations": "4.6.1",
|
||||
"@graphql-codegen/typescript-resolvers": "4.5.1",
|
||||
"@graphql-typed-document-node/core": "3.2.0",
|
||||
"@ianvs/prettier-plugin-sort-imports": "4.4.2",
|
||||
"@nestjs/testing": "11.1.3",
|
||||
"@ianvs/prettier-plugin-sort-imports": "4.6.1",
|
||||
"@nestjs/testing": "11.1.6",
|
||||
"@originjs/vite-plugin-commonjs": "1.0.3",
|
||||
"@rollup/plugin-node-resolve": "16.0.1",
|
||||
"@swc/core": "1.12.4",
|
||||
"@swc/core": "1.13.3",
|
||||
"@types/async-exit-hook": "2.0.2",
|
||||
"@types/bytes": "3.1.5",
|
||||
"@types/cli-table": "0.3.4",
|
||||
"@types/command-exists": "1.2.3",
|
||||
"@types/cors": "2.8.19",
|
||||
"@types/dockerode": "3.3.41",
|
||||
"@types/dockerode": "3.3.42",
|
||||
"@types/graphql-fields": "1.3.9",
|
||||
"@types/graphql-type-uuid": "0.2.6",
|
||||
"@types/ini": "4.1.1",
|
||||
"@types/ip": "1.1.3",
|
||||
"@types/lodash": "4.17.18",
|
||||
"@types/lodash": "4.17.20",
|
||||
"@types/lodash-es": "4.17.12",
|
||||
"@types/mustache": "4.2.6",
|
||||
"@types/node": "22.15.32",
|
||||
"@types/node": "22.17.1",
|
||||
"@types/pify": "6.1.0",
|
||||
"@types/semver": "7.7.0",
|
||||
"@types/sendmail": "1.4.7",
|
||||
"@types/stoppable": "1.1.3",
|
||||
"@types/strftime": "0.9.8",
|
||||
"@types/supertest": "6.0.3",
|
||||
"@types/uuid": "10.0.0",
|
||||
"@types/ws": "8.18.1",
|
||||
"@types/wtfnode": "0.7.3",
|
||||
"@vitest/coverage-v8": "3.2.4",
|
||||
"@vitest/ui": "3.2.4",
|
||||
"cz-conventional-changelog": "3.3.0",
|
||||
"eslint": "9.29.0",
|
||||
"eslint-plugin-import": "2.31.0",
|
||||
"eslint-plugin-n": "17.20.0",
|
||||
"eslint": "9.33.0",
|
||||
"eslint-plugin-import": "2.32.0",
|
||||
"eslint-plugin-no-relative-import-paths": "1.6.1",
|
||||
"eslint-plugin-prettier": "5.5.0",
|
||||
"graphql-codegen-typescript-validation-schema": "0.17.1",
|
||||
"jiti": "2.4.2",
|
||||
"eslint-plugin-prettier": "5.5.4",
|
||||
"jiti": "2.5.1",
|
||||
"nodemon": "3.1.10",
|
||||
"prettier": "3.5.3",
|
||||
"prettier": "3.6.2",
|
||||
"rollup-plugin-node-externals": "8.0.1",
|
||||
"commit-and-tag-version": "9.5.0",
|
||||
"supertest": "7.1.4",
|
||||
"tsx": "4.20.3",
|
||||
"type-fest": "4.41.0",
|
||||
"typescript": "5.8.3",
|
||||
"typescript-eslint": "8.34.1",
|
||||
"typescript": "5.9.2",
|
||||
"typescript-eslint": "8.39.1",
|
||||
"unplugin-swc": "1.5.5",
|
||||
"vite": "7.0.3",
|
||||
"vite": "7.1.1",
|
||||
"vite-plugin-node": "7.0.0",
|
||||
"vite-tsconfig-paths": "5.1.4",
|
||||
"vitest": "3.2.4",
|
||||
"zx": "8.5.5"
|
||||
"zx": "8.8.0"
|
||||
},
|
||||
"overrides": {
|
||||
"eslint": {
|
||||
"jiti": "2.4.2"
|
||||
"jiti": "2.5.1"
|
||||
},
|
||||
"@as-integrations/fastify": {
|
||||
"fastify": "$fastify"
|
||||
@@ -228,5 +227,5 @@
|
||||
}
|
||||
},
|
||||
"private": true,
|
||||
"packageManager": "pnpm@10.12.4"
|
||||
"packageManager": "pnpm@10.14.0"
|
||||
}
|
||||
|
||||
@@ -1,45 +0,0 @@
|
||||
import { getAllowedOrigins } from '@app/common/allowed-origins.js';
|
||||
import { store } from '@app/store/index.js';
|
||||
import { loadConfigFile } from '@app/store/modules/config.js';
|
||||
import { loadStateFiles } from '@app/store/modules/emhttp.js';
|
||||
|
||||
import 'reflect-metadata';
|
||||
|
||||
import { expect, test } from 'vitest';
|
||||
|
||||
test('Returns allowed origins', async () => {
|
||||
// Load state files into store
|
||||
await store.dispatch(loadStateFiles()).unwrap();
|
||||
await store.dispatch(loadConfigFile()).unwrap();
|
||||
|
||||
// Get allowed origins
|
||||
const allowedOrigins = getAllowedOrigins();
|
||||
|
||||
// Test that the result is an array
|
||||
expect(Array.isArray(allowedOrigins)).toBe(true);
|
||||
|
||||
// Test that it contains the expected socket paths
|
||||
expect(allowedOrigins).toContain('/var/run/unraid-notifications.sock');
|
||||
expect(allowedOrigins).toContain('/var/run/unraid-php.sock');
|
||||
expect(allowedOrigins).toContain('/var/run/unraid-cli.sock');
|
||||
|
||||
// Test that it contains the expected local URLs
|
||||
expect(allowedOrigins).toContain('http://localhost:8080');
|
||||
expect(allowedOrigins).toContain('https://localhost:4443');
|
||||
|
||||
// Test that it contains the expected connect URLs
|
||||
expect(allowedOrigins).toContain('https://connect.myunraid.net');
|
||||
expect(allowedOrigins).toContain('https://connect-staging.myunraid.net');
|
||||
expect(allowedOrigins).toContain('https://dev-my.myunraid.net:4000');
|
||||
|
||||
// Test that it contains the extra origins from config
|
||||
expect(allowedOrigins).toContain('https://google.com');
|
||||
expect(allowedOrigins).toContain('https://test.com');
|
||||
|
||||
// Test that it contains some of the remote URLs
|
||||
expect(allowedOrigins).toContain('https://tower.local:4443');
|
||||
expect(allowedOrigins).toContain('https://192.168.1.150:4443');
|
||||
|
||||
// Test that there are no duplicates
|
||||
expect(allowedOrigins.length).toBe(new Set(allowedOrigins).size);
|
||||
});
|
||||
@@ -1,137 +0,0 @@
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { ApiConfigPersistence } from '@app/unraid-api/config/api-config.module.js';
|
||||
import { ConfigPersistenceHelper } from '@app/unraid-api/config/persistence.helper.js';
|
||||
|
||||
describe('ApiConfigPersistence', () => {
|
||||
let service: ApiConfigPersistence;
|
||||
let configService: ConfigService;
|
||||
let persistenceHelper: ConfigPersistenceHelper;
|
||||
|
||||
beforeEach(() => {
|
||||
configService = {
|
||||
get: vi.fn(),
|
||||
set: vi.fn(),
|
||||
} as any;
|
||||
|
||||
persistenceHelper = {} as ConfigPersistenceHelper;
|
||||
service = new ApiConfigPersistence(configService, persistenceHelper);
|
||||
});
|
||||
|
||||
describe('convertLegacyConfig', () => {
|
||||
it('should migrate sandbox from string "yes" to boolean true', () => {
|
||||
const legacyConfig = {
|
||||
local: { sandbox: 'yes' },
|
||||
api: { extraOrigins: '' },
|
||||
remote: { ssoSubIds: '' },
|
||||
};
|
||||
|
||||
const result = service.convertLegacyConfig(legacyConfig);
|
||||
|
||||
expect(result.sandbox).toBe(true);
|
||||
});
|
||||
|
||||
it('should migrate sandbox from string "no" to boolean false', () => {
|
||||
const legacyConfig = {
|
||||
local: { sandbox: 'no' },
|
||||
api: { extraOrigins: '' },
|
||||
remote: { ssoSubIds: '' },
|
||||
};
|
||||
|
||||
const result = service.convertLegacyConfig(legacyConfig);
|
||||
|
||||
expect(result.sandbox).toBe(false);
|
||||
});
|
||||
|
||||
it('should migrate extraOrigins from comma-separated string to array', () => {
|
||||
const legacyConfig = {
|
||||
local: { sandbox: 'no' },
|
||||
api: { extraOrigins: 'https://example.com,https://test.com' },
|
||||
remote: { ssoSubIds: '' },
|
||||
};
|
||||
|
||||
const result = service.convertLegacyConfig(legacyConfig);
|
||||
|
||||
expect(result.extraOrigins).toEqual(['https://example.com', 'https://test.com']);
|
||||
});
|
||||
|
||||
it('should filter out non-HTTP origins from extraOrigins', () => {
|
||||
const legacyConfig = {
|
||||
local: { sandbox: 'no' },
|
||||
api: {
|
||||
extraOrigins: 'https://example.com,invalid-origin,http://test.com,ftp://bad.com',
|
||||
},
|
||||
remote: { ssoSubIds: '' },
|
||||
};
|
||||
|
||||
const result = service.convertLegacyConfig(legacyConfig);
|
||||
|
||||
expect(result.extraOrigins).toEqual(['https://example.com', 'http://test.com']);
|
||||
});
|
||||
|
||||
it('should handle empty extraOrigins string', () => {
|
||||
const legacyConfig = {
|
||||
local: { sandbox: 'no' },
|
||||
api: { extraOrigins: '' },
|
||||
remote: { ssoSubIds: '' },
|
||||
};
|
||||
|
||||
const result = service.convertLegacyConfig(legacyConfig);
|
||||
|
||||
expect(result.extraOrigins).toEqual([]);
|
||||
});
|
||||
|
||||
it('should migrate ssoSubIds from comma-separated string to array', () => {
|
||||
const legacyConfig = {
|
||||
local: { sandbox: 'no' },
|
||||
api: { extraOrigins: '' },
|
||||
remote: { ssoSubIds: 'user1,user2,user3' },
|
||||
};
|
||||
|
||||
const result = service.convertLegacyConfig(legacyConfig);
|
||||
|
||||
expect(result.ssoSubIds).toEqual(['user1', 'user2', 'user3']);
|
||||
});
|
||||
|
||||
it('should handle empty ssoSubIds string', () => {
|
||||
const legacyConfig = {
|
||||
local: { sandbox: 'no' },
|
||||
api: { extraOrigins: '' },
|
||||
remote: { ssoSubIds: '' },
|
||||
};
|
||||
|
||||
const result = service.convertLegacyConfig(legacyConfig);
|
||||
|
||||
expect(result.ssoSubIds).toEqual([]);
|
||||
});
|
||||
|
||||
it('should handle undefined config sections', () => {
|
||||
const legacyConfig = {};
|
||||
|
||||
const result = service.convertLegacyConfig(legacyConfig);
|
||||
|
||||
expect(result.sandbox).toBe(false);
|
||||
expect(result.extraOrigins).toEqual([]);
|
||||
expect(result.ssoSubIds).toEqual([]);
|
||||
});
|
||||
|
||||
it('should handle complete migration with all fields', () => {
|
||||
const legacyConfig = {
|
||||
local: { sandbox: 'yes' },
|
||||
api: { extraOrigins: 'https://app1.example.com,https://app2.example.com' },
|
||||
remote: { ssoSubIds: 'sub1,sub2,sub3' },
|
||||
};
|
||||
|
||||
const result = service.convertLegacyConfig(legacyConfig);
|
||||
|
||||
expect(result.sandbox).toBe(true);
|
||||
expect(result.extraOrigins).toEqual([
|
||||
'https://app1.example.com',
|
||||
'https://app2.example.com',
|
||||
]);
|
||||
expect(result.ssoSubIds).toEqual(['sub1', 'sub2', 'sub3']);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,158 +0,0 @@
|
||||
import 'reflect-metadata';
|
||||
|
||||
import { cloneDeep } from 'lodash-es';
|
||||
import { expect, test } from 'vitest';
|
||||
|
||||
import { getWriteableConfig } from '@app/core/utils/files/config-file-normalizer.js';
|
||||
import { initialState } from '@app/store/modules/config.js';
|
||||
|
||||
test('it creates a FLASH config with NO OPTIONAL values', () => {
|
||||
const basicConfig = initialState;
|
||||
const config = getWriteableConfig(basicConfig, 'flash');
|
||||
expect(config).toMatchInlineSnapshot(`
|
||||
{
|
||||
"api": {
|
||||
"extraOrigins": "",
|
||||
"version": "",
|
||||
},
|
||||
"local": {
|
||||
"sandbox": "no",
|
||||
},
|
||||
"remote": {
|
||||
"accesstoken": "",
|
||||
"apikey": "",
|
||||
"avatar": "",
|
||||
"dynamicRemoteAccessType": "DISABLED",
|
||||
"email": "",
|
||||
"idtoken": "",
|
||||
"localApiKey": "",
|
||||
"refreshtoken": "",
|
||||
"regWizTime": "",
|
||||
"ssoSubIds": "",
|
||||
"upnpEnabled": "",
|
||||
"username": "",
|
||||
"wanaccess": "",
|
||||
"wanport": "",
|
||||
},
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
test('it creates a MEMORY config with NO OPTIONAL values', () => {
|
||||
const basicConfig = initialState;
|
||||
const config = getWriteableConfig(basicConfig, 'memory');
|
||||
expect(config).toMatchInlineSnapshot(`
|
||||
{
|
||||
"api": {
|
||||
"extraOrigins": "",
|
||||
"version": "",
|
||||
},
|
||||
"connectionStatus": {
|
||||
"minigraph": "PRE_INIT",
|
||||
"upnpStatus": "",
|
||||
},
|
||||
"local": {
|
||||
"sandbox": "no",
|
||||
},
|
||||
"remote": {
|
||||
"accesstoken": "",
|
||||
"allowedOrigins": "/var/run/unraid-notifications.sock, /var/run/unraid-php.sock, /var/run/unraid-cli.sock, https://connect.myunraid.net, https://connect-staging.myunraid.net, https://dev-my.myunraid.net:4000",
|
||||
"apikey": "",
|
||||
"avatar": "",
|
||||
"dynamicRemoteAccessType": "DISABLED",
|
||||
"email": "",
|
||||
"idtoken": "",
|
||||
"localApiKey": "",
|
||||
"refreshtoken": "",
|
||||
"regWizTime": "",
|
||||
"ssoSubIds": "",
|
||||
"upnpEnabled": "",
|
||||
"username": "",
|
||||
"wanaccess": "",
|
||||
"wanport": "",
|
||||
},
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
test('it creates a FLASH config with OPTIONAL values', () => {
|
||||
const basicConfig = cloneDeep(initialState);
|
||||
// 2fa & t2fa should be ignored
|
||||
basicConfig.remote['2Fa'] = 'yes';
|
||||
basicConfig.local['2Fa'] = 'yes';
|
||||
|
||||
basicConfig.api.extraOrigins = 'myextra.origins';
|
||||
basicConfig.remote.upnpEnabled = 'yes';
|
||||
basicConfig.connectionStatus.upnpStatus = 'Turned On';
|
||||
const config = getWriteableConfig(basicConfig, 'flash');
|
||||
expect(config).toMatchInlineSnapshot(`
|
||||
{
|
||||
"api": {
|
||||
"extraOrigins": "myextra.origins",
|
||||
"version": "",
|
||||
},
|
||||
"local": {
|
||||
"sandbox": "no",
|
||||
},
|
||||
"remote": {
|
||||
"accesstoken": "",
|
||||
"apikey": "",
|
||||
"avatar": "",
|
||||
"dynamicRemoteAccessType": "DISABLED",
|
||||
"email": "",
|
||||
"idtoken": "",
|
||||
"localApiKey": "",
|
||||
"refreshtoken": "",
|
||||
"regWizTime": "",
|
||||
"ssoSubIds": "",
|
||||
"upnpEnabled": "yes",
|
||||
"username": "",
|
||||
"wanaccess": "",
|
||||
"wanport": "",
|
||||
},
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
test('it creates a MEMORY config with OPTIONAL values', () => {
|
||||
const basicConfig = cloneDeep(initialState);
|
||||
// 2fa & t2fa should be ignored
|
||||
basicConfig.remote['2Fa'] = 'yes';
|
||||
basicConfig.local['2Fa'] = 'yes';
|
||||
basicConfig.api.extraOrigins = 'myextra.origins';
|
||||
basicConfig.remote.upnpEnabled = 'yes';
|
||||
basicConfig.connectionStatus.upnpStatus = 'Turned On';
|
||||
const config = getWriteableConfig(basicConfig, 'memory');
|
||||
expect(config).toMatchInlineSnapshot(`
|
||||
{
|
||||
"api": {
|
||||
"extraOrigins": "myextra.origins",
|
||||
"version": "",
|
||||
},
|
||||
"connectionStatus": {
|
||||
"minigraph": "PRE_INIT",
|
||||
"upnpStatus": "Turned On",
|
||||
},
|
||||
"local": {
|
||||
"sandbox": "no",
|
||||
},
|
||||
"remote": {
|
||||
"accesstoken": "",
|
||||
"allowedOrigins": "/var/run/unraid-notifications.sock, /var/run/unraid-php.sock, /var/run/unraid-cli.sock, https://connect.myunraid.net, https://connect-staging.myunraid.net, https://dev-my.myunraid.net:4000",
|
||||
"apikey": "",
|
||||
"avatar": "",
|
||||
"dynamicRemoteAccessType": "DISABLED",
|
||||
"email": "",
|
||||
"idtoken": "",
|
||||
"localApiKey": "",
|
||||
"refreshtoken": "",
|
||||
"regWizTime": "",
|
||||
"ssoSubIds": "",
|
||||
"upnpEnabled": "yes",
|
||||
"username": "",
|
||||
"wanaccess": "",
|
||||
"wanport": "",
|
||||
},
|
||||
}
|
||||
`);
|
||||
});
|
||||
5
api/src/__test__/core/utils/pm2/dummy-process.js
Normal file
5
api/src/__test__/core/utils/pm2/dummy-process.js
Normal file
@@ -0,0 +1,5 @@
|
||||
/* eslint-disable no-undef */
|
||||
// Dummy process for PM2 testing
|
||||
setInterval(() => {
|
||||
// Keep process alive
|
||||
}, 1000);
|
||||
@@ -0,0 +1,216 @@
|
||||
import { existsSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
|
||||
import { execa } from 'execa';
|
||||
import pm2 from 'pm2';
|
||||
import { afterAll, afterEach, beforeAll, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { isUnraidApiRunning } from '@app/core/utils/pm2/unraid-api-running.js';
|
||||
|
||||
const __dirname = fileURLToPath(new URL('.', import.meta.url));
|
||||
const PROJECT_ROOT = join(__dirname, '../../../../..');
|
||||
const DUMMY_PROCESS_PATH = join(__dirname, 'dummy-process.js');
|
||||
const CLI_PATH = join(PROJECT_ROOT, 'dist/cli.js');
|
||||
const TEST_PROCESS_NAME = 'test-unraid-api';
|
||||
|
||||
// Shared PM2 connection state
|
||||
let pm2Connected = false;
|
||||
|
||||
// Helper function to run CLI command (assumes CLI is built)
|
||||
async function runCliCommand(command: string, options: any = {}) {
|
||||
return await execa('node', [CLI_PATH, command], options);
|
||||
}
|
||||
|
||||
// Helper to ensure PM2 connection is established
|
||||
async function ensurePM2Connection() {
|
||||
if (pm2Connected) return;
|
||||
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
pm2.connect((err) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
pm2Connected = true;
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Helper to delete specific test processes (lightweight, reuses connection)
|
||||
async function deleteTestProcesses() {
|
||||
if (!pm2Connected) {
|
||||
// No connection, nothing to clean up
|
||||
return;
|
||||
}
|
||||
|
||||
const deletePromise = new Promise<void>((resolve) => {
|
||||
// Delete specific processes we might have created
|
||||
const processNames = ['unraid-api', TEST_PROCESS_NAME];
|
||||
let deletedCount = 0;
|
||||
|
||||
const deleteNext = () => {
|
||||
if (deletedCount >= processNames.length) {
|
||||
resolve();
|
||||
return;
|
||||
}
|
||||
|
||||
const processName = processNames[deletedCount];
|
||||
pm2.delete(processName, (deleteErr) => {
|
||||
// Ignore errors, process might not exist
|
||||
deletedCount++;
|
||||
deleteNext();
|
||||
});
|
||||
};
|
||||
|
||||
deleteNext();
|
||||
});
|
||||
|
||||
const timeoutPromise = new Promise<void>((resolve) => {
|
||||
setTimeout(() => resolve(), 3000); // 3 second timeout
|
||||
});
|
||||
|
||||
return Promise.race([deletePromise, timeoutPromise]);
|
||||
}
|
||||
|
||||
// Helper to ensure PM2 is completely clean (heavy cleanup with daemon kill)
|
||||
async function cleanupAllPM2Processes() {
|
||||
// First delete test processes if we have a connection
|
||||
if (pm2Connected) {
|
||||
await deleteTestProcesses();
|
||||
}
|
||||
|
||||
return new Promise<void>((resolve) => {
|
||||
// Always connect fresh for daemon kill (in case we weren't connected)
|
||||
pm2.connect((err) => {
|
||||
if (err) {
|
||||
// If we can't connect, assume PM2 is not running
|
||||
pm2Connected = false;
|
||||
resolve();
|
||||
return;
|
||||
}
|
||||
|
||||
// Kill the daemon to ensure fresh state
|
||||
pm2.killDaemon((killErr) => {
|
||||
pm2.disconnect();
|
||||
pm2Connected = false;
|
||||
// Small delay to let PM2 fully shutdown
|
||||
setTimeout(resolve, 500);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
describe.skipIf(!!process.env.CI)('PM2 integration tests', () => {
|
||||
beforeAll(async () => {
|
||||
// Build the CLI if it doesn't exist (only for CLI tests)
|
||||
if (!existsSync(CLI_PATH)) {
|
||||
console.log('Building CLI for integration tests...');
|
||||
try {
|
||||
await execa('pnpm', ['build'], {
|
||||
cwd: PROJECT_ROOT,
|
||||
stdio: 'inherit',
|
||||
timeout: 120000, // 2 minute timeout for build
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Failed to build CLI:', error);
|
||||
throw new Error(
|
||||
'Cannot run CLI integration tests without built CLI. Run `pnpm build` first.'
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Only do a full cleanup once at the beginning
|
||||
await cleanupAllPM2Processes();
|
||||
}, 150000); // 2.5 minute timeout for setup
|
||||
|
||||
afterAll(async () => {
|
||||
// Only do a full cleanup once at the end
|
||||
await cleanupAllPM2Processes();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
// Lightweight cleanup after each test - just delete our test processes
|
||||
await deleteTestProcesses();
|
||||
}, 5000); // 5 second timeout for cleanup
|
||||
|
||||
describe('isUnraidApiRunning function', () => {
|
||||
it('should return false when PM2 is not running the unraid-api process', async () => {
|
||||
const result = await isUnraidApiRunning();
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('should return true when PM2 has unraid-api process running', async () => {
|
||||
// Ensure PM2 connection
|
||||
await ensurePM2Connection();
|
||||
|
||||
// Start a dummy process with the name 'unraid-api'
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
pm2.start(
|
||||
{
|
||||
script: DUMMY_PROCESS_PATH,
|
||||
name: 'unraid-api',
|
||||
},
|
||||
(startErr) => {
|
||||
if (startErr) return reject(startErr);
|
||||
resolve();
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
// Give PM2 time to start the process
|
||||
await new Promise((resolve) => setTimeout(resolve, 2000));
|
||||
|
||||
const result = await isUnraidApiRunning();
|
||||
expect(result).toBe(true);
|
||||
}, 30000);
|
||||
|
||||
it('should return false when unraid-api process is stopped', async () => {
|
||||
// Ensure PM2 connection
|
||||
await ensurePM2Connection();
|
||||
|
||||
// Start and then stop the process
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
pm2.start(
|
||||
{
|
||||
script: DUMMY_PROCESS_PATH,
|
||||
name: 'unraid-api',
|
||||
},
|
||||
(startErr) => {
|
||||
if (startErr) return reject(startErr);
|
||||
|
||||
// Stop the process after starting
|
||||
setTimeout(() => {
|
||||
pm2.stop('unraid-api', (stopErr) => {
|
||||
if (stopErr) return reject(stopErr);
|
||||
resolve();
|
||||
});
|
||||
}, 1000);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 1000));
|
||||
|
||||
const result = await isUnraidApiRunning();
|
||||
expect(result).toBe(false);
|
||||
}, 30000);
|
||||
|
||||
it('should handle PM2 connection errors gracefully', async () => {
|
||||
// Set an invalid PM2_HOME to force connection failure
|
||||
const originalPM2Home = process.env.PM2_HOME;
|
||||
process.env.PM2_HOME = '/invalid/path/that/does/not/exist';
|
||||
|
||||
const result = await isUnraidApiRunning();
|
||||
expect(result).toBe(false);
|
||||
|
||||
// Restore original PM2_HOME
|
||||
if (originalPM2Home) {
|
||||
process.env.PM2_HOME = originalPM2Home;
|
||||
} else {
|
||||
delete process.env.PM2_HOME;
|
||||
}
|
||||
}, 15000); // 15 second timeout to allow for the Promise.race timeout
|
||||
});
|
||||
});
|
||||
@@ -34,6 +34,15 @@ vi.mock('@app/store/index.js', () => ({
|
||||
}),
|
||||
},
|
||||
}));
|
||||
vi.mock('@app/environment.js', () => ({
|
||||
ENVIRONMENT: 'development',
|
||||
environment: {
|
||||
IS_MAIN_PROCESS: true,
|
||||
},
|
||||
}));
|
||||
vi.mock('@app/core/utils/files/file-exists.js', () => ({
|
||||
fileExists: vi.fn().mockResolvedValue(true),
|
||||
}));
|
||||
|
||||
// Mock NestJS Logger to suppress logs during tests
|
||||
vi.mock('@nestjs/common', async (importOriginal) => {
|
||||
@@ -63,13 +72,22 @@ describe('RCloneApiService', () => {
|
||||
const { execa } = await import('execa');
|
||||
const pRetry = await import('p-retry');
|
||||
const { existsSync } = await import('node:fs');
|
||||
const { fileExists } = await import('@app/core/utils/files/file-exists.js');
|
||||
|
||||
mockGot = vi.mocked(got);
|
||||
mockExeca = vi.mocked(execa);
|
||||
mockPRetry = vi.mocked(pRetry.default);
|
||||
mockExistsSync = vi.mocked(existsSync);
|
||||
|
||||
mockGot.post = vi.fn().mockResolvedValue({ body: {} });
|
||||
// Mock successful RClone API response for socket check
|
||||
mockGot.post = vi.fn().mockResolvedValue({ body: { pid: 12345 } });
|
||||
|
||||
// Mock RClone binary exists check
|
||||
vi.mocked(fileExists).mockResolvedValue(true);
|
||||
|
||||
// Mock socket exists
|
||||
mockExistsSync.mockReturnValue(true);
|
||||
|
||||
mockExeca.mockReturnValue({
|
||||
on: vi.fn(),
|
||||
kill: vi.fn(),
|
||||
@@ -77,10 +95,12 @@ describe('RCloneApiService', () => {
|
||||
pid: 12345,
|
||||
} as any);
|
||||
mockPRetry.mockResolvedValue(undefined);
|
||||
mockExistsSync.mockReturnValue(false);
|
||||
|
||||
service = new RCloneApiService();
|
||||
await service.onModuleInit();
|
||||
|
||||
// Reset the mock after initialization to prepare for test-specific responses
|
||||
mockGot.post.mockClear();
|
||||
});
|
||||
|
||||
describe('getProviders', () => {
|
||||
@@ -102,6 +122,9 @@ describe('RCloneApiService', () => {
|
||||
json: {},
|
||||
responseType: 'json',
|
||||
enableUnixSockets: true,
|
||||
headers: expect.objectContaining({
|
||||
Authorization: expect.stringMatching(/^Basic /),
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
@@ -129,6 +152,11 @@ describe('RCloneApiService', () => {
|
||||
'http://unix:/tmp/rclone.sock:/config/listremotes',
|
||||
expect.objectContaining({
|
||||
json: {},
|
||||
responseType: 'json',
|
||||
enableUnixSockets: true,
|
||||
headers: expect.objectContaining({
|
||||
Authorization: expect.stringMatching(/^Basic /),
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
@@ -155,6 +183,11 @@ describe('RCloneApiService', () => {
|
||||
'http://unix:/tmp/rclone.sock:/config/get',
|
||||
expect.objectContaining({
|
||||
json: { name: 'test-remote' },
|
||||
responseType: 'json',
|
||||
enableUnixSockets: true,
|
||||
headers: expect.objectContaining({
|
||||
Authorization: expect.stringMatching(/^Basic /),
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
@@ -193,6 +226,11 @@ describe('RCloneApiService', () => {
|
||||
type: 's3',
|
||||
parameters: { access_key_id: 'AKIA...', secret_access_key: 'secret' },
|
||||
},
|
||||
responseType: 'json',
|
||||
enableUnixSockets: true,
|
||||
headers: expect.objectContaining({
|
||||
Authorization: expect.stringMatching(/^Basic /),
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
@@ -217,6 +255,11 @@ describe('RCloneApiService', () => {
|
||||
name: 'existing-remote',
|
||||
access_key_id: 'NEW_AKIA...',
|
||||
},
|
||||
responseType: 'json',
|
||||
enableUnixSockets: true,
|
||||
headers: expect.objectContaining({
|
||||
Authorization: expect.stringMatching(/^Basic /),
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
@@ -235,6 +278,11 @@ describe('RCloneApiService', () => {
|
||||
'http://unix:/tmp/rclone.sock:/config/delete',
|
||||
expect.objectContaining({
|
||||
json: { name: 'remote-to-delete' },
|
||||
responseType: 'json',
|
||||
enableUnixSockets: true,
|
||||
headers: expect.objectContaining({
|
||||
Authorization: expect.stringMatching(/^Basic /),
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
@@ -261,6 +309,11 @@ describe('RCloneApiService', () => {
|
||||
dstFs: 'remote:backup/path',
|
||||
delete_on: 'dst',
|
||||
},
|
||||
responseType: 'json',
|
||||
enableUnixSockets: true,
|
||||
headers: expect.objectContaining({
|
||||
Authorization: expect.stringMatching(/^Basic /),
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
@@ -279,6 +332,11 @@ describe('RCloneApiService', () => {
|
||||
'http://unix:/tmp/rclone.sock:/job/status',
|
||||
expect.objectContaining({
|
||||
json: { jobid: 'job-123' },
|
||||
responseType: 'json',
|
||||
enableUnixSockets: true,
|
||||
headers: expect.objectContaining({
|
||||
Authorization: expect.stringMatching(/^Basic /),
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
@@ -299,6 +357,11 @@ describe('RCloneApiService', () => {
|
||||
'http://unix:/tmp/rclone.sock:/job/list',
|
||||
expect.objectContaining({
|
||||
json: {},
|
||||
responseType: 'json',
|
||||
enableUnixSockets: true,
|
||||
headers: expect.objectContaining({
|
||||
Authorization: expect.stringMatching(/^Basic /),
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
@@ -3,6 +3,7 @@ import '@app/__test__/setup/env-setup.js';
|
||||
import '@app/__test__/setup/keyserver-mock.js';
|
||||
import '@app/__test__/setup/config-setup.js';
|
||||
import '@app/__test__/setup/store-reset.js';
|
||||
import '@app/__test__/setup/api-json-backup.js';
|
||||
|
||||
// This file is automatically loaded by Vitest before running tests
|
||||
// It imports all the setup files that need to be run before tests
|
||||
|
||||
36
api/src/__test__/setup/api-json-backup.ts
Normal file
36
api/src/__test__/setup/api-json-backup.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
import { existsSync, readFileSync, writeFileSync } from 'fs';
|
||||
import { join, resolve } from 'path';
|
||||
|
||||
import { afterAll, beforeAll } from 'vitest';
|
||||
|
||||
// Get the project root directory
|
||||
const projectRoot = resolve(process.cwd());
|
||||
const apiJsonPath = join(projectRoot, 'dev/configs/api.json');
|
||||
const apiJsonBackupPath = join(projectRoot, 'dev/configs/api.json.backup');
|
||||
|
||||
let originalContent: string | null = null;
|
||||
|
||||
/**
|
||||
* Backs up api.json before tests run and restores it after tests complete.
|
||||
* This prevents tests from permanently modifying the development configuration.
|
||||
*/
|
||||
export function setupApiJsonBackup() {
|
||||
beforeAll(() => {
|
||||
// Save the original content if the file exists
|
||||
if (existsSync(apiJsonPath)) {
|
||||
originalContent = readFileSync(apiJsonPath, 'utf-8');
|
||||
// Create a backup file as well for safety
|
||||
writeFileSync(apiJsonBackupPath, originalContent, 'utf-8');
|
||||
}
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
// Restore the original content if we saved it
|
||||
if (originalContent !== null) {
|
||||
writeFileSync(apiJsonPath, originalContent, 'utf-8');
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Auto-run for all tests that import this module
|
||||
setupApiJsonBackup();
|
||||
@@ -17,7 +17,6 @@ exports[`Returns paths 1`] = `
|
||||
"myservers-base",
|
||||
"myservers-config",
|
||||
"myservers-config-states",
|
||||
"myservers-env",
|
||||
"myservers-keepalive",
|
||||
"keyfile-base",
|
||||
"machine-id",
|
||||
|
||||
@@ -1,303 +0,0 @@
|
||||
import { beforeEach, describe, expect, test, vi } from 'vitest';
|
||||
|
||||
import { pubsub, PUBSUB_CHANNEL } from '@app/core/pubsub.js';
|
||||
import { store } from '@app/store/index.js';
|
||||
import { MyServersConfigMemory } from '@app/types/my-servers-config.js';
|
||||
|
||||
describe.skip('config tests', () => {
|
||||
// Mock dependencies
|
||||
vi.mock('@app/core/pubsub.js', () => {
|
||||
const mockPublish = vi.fn();
|
||||
return {
|
||||
pubsub: {
|
||||
publish: mockPublish,
|
||||
},
|
||||
PUBSUB_CHANNEL: {
|
||||
OWNER: 'OWNER',
|
||||
SERVERS: 'SERVERS',
|
||||
},
|
||||
__esModule: true,
|
||||
default: {
|
||||
pubsub: {
|
||||
publish: mockPublish,
|
||||
},
|
||||
PUBSUB_CHANNEL: {
|
||||
OWNER: 'OWNER',
|
||||
SERVERS: 'SERVERS',
|
||||
},
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
// Get the mock function for pubsub.publish
|
||||
const mockPublish = vi.mocked(pubsub.publish);
|
||||
|
||||
// Clear mock before each test
|
||||
beforeEach(() => {
|
||||
mockPublish.mockClear();
|
||||
});
|
||||
|
||||
vi.mock('@app/mothership/graphql-client.js', () => ({
|
||||
GraphQLClient: {
|
||||
clearInstance: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
vi.mock('@app/mothership/jobs/ping-timeout-jobs.js', () => ({
|
||||
stopPingTimeoutJobs: vi.fn(),
|
||||
}));
|
||||
|
||||
const createConfigMatcher = (specificValues: Partial<MyServersConfigMemory> = {}) => {
|
||||
const defaultMatcher = {
|
||||
api: expect.objectContaining({
|
||||
extraOrigins: expect.any(String),
|
||||
version: expect.any(String),
|
||||
}),
|
||||
connectionStatus: expect.objectContaining({
|
||||
minigraph: expect.any(String),
|
||||
upnpStatus: expect.any(String),
|
||||
}),
|
||||
local: expect.objectContaining({
|
||||
sandbox: expect.any(String),
|
||||
}),
|
||||
nodeEnv: expect.any(String),
|
||||
remote: expect.objectContaining({
|
||||
accesstoken: expect.any(String),
|
||||
allowedOrigins: expect.any(String),
|
||||
apikey: expect.any(String),
|
||||
avatar: expect.any(String),
|
||||
dynamicRemoteAccessType: expect.any(String),
|
||||
email: expect.any(String),
|
||||
idtoken: expect.any(String),
|
||||
localApiKey: expect.any(String),
|
||||
refreshtoken: expect.any(String),
|
||||
regWizTime: expect.any(String),
|
||||
ssoSubIds: expect.any(String),
|
||||
upnpEnabled: expect.any(String),
|
||||
username: expect.any(String),
|
||||
wanaccess: expect.any(String),
|
||||
wanport: expect.any(String),
|
||||
}),
|
||||
status: expect.any(String),
|
||||
};
|
||||
|
||||
return expect.objectContaining({
|
||||
...defaultMatcher,
|
||||
...specificValues,
|
||||
});
|
||||
};
|
||||
|
||||
// test('Before init returns default values for all fields', async () => {
|
||||
// const state = store.getState().config;
|
||||
// expect(state).toMatchSnapshot();
|
||||
// }, 10_000);
|
||||
|
||||
test('After init returns values from cfg file for all fields', async () => {
|
||||
const { loadConfigFile } = await import('@app/store/modules/config.js');
|
||||
|
||||
// Load cfg into store
|
||||
await store.dispatch(loadConfigFile());
|
||||
|
||||
// Check if store has cfg contents loaded
|
||||
const state = store.getState().config;
|
||||
expect(state).toMatchObject(createConfigMatcher());
|
||||
});
|
||||
|
||||
test('updateUserConfig merges in changes to current state', async () => {
|
||||
const { loadConfigFile, updateUserConfig } = await import('@app/store/modules/config.js');
|
||||
|
||||
// Load cfg into store
|
||||
await store.dispatch(loadConfigFile());
|
||||
|
||||
// Update store
|
||||
store.dispatch(
|
||||
updateUserConfig({
|
||||
remote: { avatar: 'https://via.placeholder.com/200' },
|
||||
})
|
||||
);
|
||||
|
||||
const state = store.getState().config;
|
||||
expect(state).toMatchObject(
|
||||
createConfigMatcher({
|
||||
remote: expect.objectContaining({
|
||||
avatar: 'https://via.placeholder.com/200',
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
test('loginUser updates state and publishes to pubsub', async () => {
|
||||
const { loginUser } = await import('@app/store/modules/config.js');
|
||||
const userInfo = {
|
||||
email: 'test@example.com',
|
||||
avatar: 'https://via.placeholder.com/200',
|
||||
username: 'testuser',
|
||||
apikey: 'test-api-key',
|
||||
localApiKey: 'test-local-api-key',
|
||||
};
|
||||
|
||||
await store.dispatch(loginUser(userInfo));
|
||||
|
||||
expect(pubsub.publish).toHaveBeenCalledWith(PUBSUB_CHANNEL.OWNER, {
|
||||
owner: {
|
||||
username: userInfo.username,
|
||||
url: '',
|
||||
avatar: userInfo.avatar,
|
||||
},
|
||||
});
|
||||
|
||||
const state = store.getState().config;
|
||||
expect(state).toMatchObject(
|
||||
createConfigMatcher({
|
||||
remote: expect.objectContaining(userInfo),
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
test('logoutUser clears state and publishes to pubsub', async () => {
|
||||
const { logoutUser } = await import('@app/store/modules/config.js');
|
||||
|
||||
await store.dispatch(logoutUser({ reason: 'test logout' }));
|
||||
|
||||
expect(pubsub.publish).toHaveBeenCalledWith(PUBSUB_CHANNEL.SERVERS, { servers: [] });
|
||||
expect(pubsub.publish).toHaveBeenCalledWith(PUBSUB_CHANNEL.OWNER, {
|
||||
owner: {
|
||||
username: 'root',
|
||||
url: '',
|
||||
avatar: '',
|
||||
},
|
||||
});
|
||||
// expect(stopPingTimeoutJobs).toHaveBeenCalled();
|
||||
// expect(GraphQLClient.clearInstance).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('updateAccessTokens updates token fields', async () => {
|
||||
const { updateAccessTokens } = await import('@app/store/modules/config.js');
|
||||
const tokens = {
|
||||
accesstoken: 'new-access-token',
|
||||
refreshtoken: 'new-refresh-token',
|
||||
idtoken: 'new-id-token',
|
||||
};
|
||||
|
||||
store.dispatch(updateAccessTokens(tokens));
|
||||
|
||||
const state = store.getState().config;
|
||||
expect(state).toMatchObject(
|
||||
createConfigMatcher({
|
||||
remote: expect.objectContaining(tokens),
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
test('updateAllowedOrigins updates extraOrigins', async () => {
|
||||
const { updateAllowedOrigins } = await import('@app/store/modules/config.js');
|
||||
const origins = ['https://test1.com', 'https://test2.com'];
|
||||
|
||||
store.dispatch(updateAllowedOrigins(origins));
|
||||
|
||||
const state = store.getState().config;
|
||||
expect(state.api.extraOrigins).toBe(origins.join(', '));
|
||||
});
|
||||
|
||||
test('setUpnpState updates upnp settings', async () => {
|
||||
const { setUpnpState } = await import('@app/store/modules/config.js');
|
||||
|
||||
store.dispatch(setUpnpState({ enabled: 'yes', status: 'active' }));
|
||||
|
||||
const state = store.getState().config;
|
||||
expect(state.remote.upnpEnabled).toBe('yes');
|
||||
expect(state.connectionStatus.upnpStatus).toBe('active');
|
||||
});
|
||||
|
||||
test('setWanPortToValue updates wanport', async () => {
|
||||
const { setWanPortToValue } = await import('@app/store/modules/config.js');
|
||||
|
||||
store.dispatch(setWanPortToValue(8443));
|
||||
|
||||
const state = store.getState().config;
|
||||
expect(state.remote.wanport).toBe('8443');
|
||||
});
|
||||
|
||||
test('setWanAccess updates wanaccess', async () => {
|
||||
const { setWanAccess } = await import('@app/store/modules/config.js');
|
||||
|
||||
store.dispatch(setWanAccess('yes'));
|
||||
|
||||
const state = store.getState().config;
|
||||
expect(state.remote.wanaccess).toBe('yes');
|
||||
});
|
||||
|
||||
// test('addSsoUser adds user to ssoSubIds', async () => {
|
||||
// const { addSsoUser } = await import('@app/store/modules/config.js');
|
||||
|
||||
// store.dispatch(addSsoUser('user1'));
|
||||
// store.dispatch(addSsoUser('user2'));
|
||||
|
||||
// const state = store.getState().config;
|
||||
// expect(state.remote.ssoSubIds).toBe('user1,user2');
|
||||
// });
|
||||
|
||||
// test('removeSsoUser removes user from ssoSubIds', async () => {
|
||||
// const { addSsoUser, removeSsoUser } = await import('@app/store/modules/config.js');
|
||||
|
||||
// store.dispatch(addSsoUser('user1'));
|
||||
// store.dispatch(addSsoUser('user2'));
|
||||
// store.dispatch(removeSsoUser('user1'));
|
||||
|
||||
// const state = store.getState().config;
|
||||
// expect(state.remote.ssoSubIds).toBe('user2');
|
||||
// });
|
||||
|
||||
// test('removeSsoUser with null clears all ssoSubIds', async () => {
|
||||
// const { addSsoUser, removeSsoUser } = await import('@app/store/modules/config.js');
|
||||
|
||||
// store.dispatch(addSsoUser('user1'));
|
||||
// store.dispatch(addSsoUser('user2'));
|
||||
// store.dispatch(removeSsoUser(null));
|
||||
|
||||
// const state = store.getState().config;
|
||||
// expect(state.remote.ssoSubIds).toBe('');
|
||||
// });
|
||||
|
||||
test('setLocalApiKey updates localApiKey', async () => {
|
||||
const { setLocalApiKey } = await import('@app/store/modules/config.js');
|
||||
|
||||
store.dispatch(setLocalApiKey('new-local-api-key'));
|
||||
|
||||
const state = store.getState().config;
|
||||
expect(state.remote.localApiKey).toBe('new-local-api-key');
|
||||
});
|
||||
|
||||
test('setLocalApiKey with null clears localApiKey', async () => {
|
||||
const { setLocalApiKey } = await import('@app/store/modules/config.js');
|
||||
|
||||
store.dispatch(setLocalApiKey(null));
|
||||
|
||||
const state = store.getState().config;
|
||||
expect(state.remote.localApiKey).toBe('');
|
||||
});
|
||||
|
||||
// test('setGraphqlConnectionStatus updates minigraph status', async () => {
|
||||
// store.dispatch(setGraphqlConnectionStatus({ status: MinigraphStatus.CONNECTED, error: null }));
|
||||
|
||||
// const state = store.getState().config;
|
||||
// expect(state.connectionStatus.minigraph).toBe(MinigraphStatus.CONNECTED);
|
||||
// });
|
||||
|
||||
// test('setupRemoteAccessThunk.fulfilled updates remote access settings', async () => {
|
||||
// const remoteAccessSettings = {
|
||||
// accessType: WAN_ACCESS_TYPE.DYNAMIC,
|
||||
// forwardType: WAN_FORWARD_TYPE.UPNP,
|
||||
// };
|
||||
|
||||
// await store.dispatch(setupRemoteAccessThunk(remoteAccessSettings));
|
||||
|
||||
// const state = store.getState().config;
|
||||
// expect(state.remote).toMatchObject({
|
||||
// wanaccess: 'no',
|
||||
// dynamicRemoteAccessType: 'UPNP',
|
||||
// wanport: '',
|
||||
// upnpEnabled: 'yes',
|
||||
// });
|
||||
// });
|
||||
});
|
||||
@@ -24,7 +24,7 @@ test('Before init returns default values for all fields', async () => {
|
||||
`);
|
||||
});
|
||||
|
||||
test('After init returns values from cfg file for all fields', async () => {
|
||||
test('After init returns values from cfg file for all fields', { timeout: 30000 }, async () => {
|
||||
const { loadStateFiles } = await import('@app/store/modules/emhttp.js');
|
||||
|
||||
// Load state files into store
|
||||
|
||||
@@ -24,7 +24,6 @@ test('Returns paths', async () => {
|
||||
'myservers-base': '/boot/config/plugins/dynamix.my.servers/',
|
||||
'myservers-config': expect.stringContaining('api/dev/Unraid.net/myservers.cfg'),
|
||||
'myservers-config-states': expect.stringContaining('api/dev/states/myservers.cfg'),
|
||||
'myservers-env': '/boot/config/plugins/dynamix.my.servers/env',
|
||||
'myservers-keepalive': './dev/Unraid.net/fb_keepalive',
|
||||
'keyfile-base': expect.stringContaining('api/dev/Unraid.net'),
|
||||
'machine-id': expect.stringContaining('api/dev/data/machine-id'),
|
||||
|
||||
@@ -1,34 +0,0 @@
|
||||
import { expect, test, vi } from 'vitest';
|
||||
|
||||
import { store } from '@app/store/index.js';
|
||||
import { loadStateFiles } from '@app/store/modules/emhttp.js';
|
||||
import { loadRegistrationKey } from '@app/store/modules/registration.js';
|
||||
import { createRegistrationEvent } from '@app/store/sync/registration-sync.js';
|
||||
|
||||
vi.mock('@app/core/pubsub', () => ({
|
||||
pubsub: { publish: vi.fn() },
|
||||
}));
|
||||
|
||||
test('Creates a registration event', async () => {
|
||||
// Load state files into store
|
||||
|
||||
const config = await store.dispatch(loadStateFiles()).unwrap();
|
||||
await store.dispatch(loadRegistrationKey());
|
||||
expect(config.var.regFile).toBe('/app/dev/Unraid.net/Pro.key');
|
||||
|
||||
const state = store.getState();
|
||||
const registrationEvent = createRegistrationEvent(state);
|
||||
expect(registrationEvent).toMatchInlineSnapshot(`
|
||||
{
|
||||
"registration": {
|
||||
"guid": "13FE-4200-C300-58C372A52B19",
|
||||
"keyFile": {
|
||||
"contents": "hVs1tLjvC9FiiQsIwIQ7G1KszAcexf0IneThhnmf22SB0dGs5WzRkqMiSMmt2DtR5HOXFUD32YyxuzGeUXmky3zKpSu6xhZNKVg5atGM1OfvkzHBMldI3SeBLuUFSgejLbpNUMdTrbk64JJdbzle4O8wiQgkIpAMIGxeYLwLBD4zHBcfyzq40QnxG--HcX6j25eE0xqa2zWj-j0b0rCAXahJV2a3ySCbPzr1MvfPRTVb0rr7KJ-25R592hYrz4H7Sc1B3p0lr6QUxHE6o7bcYrWKDRtIVoZ8SMPpd1_0gzYIcl5GsDFzFumTXUh8NEnl0Q8hwW1YE-tRc6Y_rrvd7w",
|
||||
"location": "/app/dev/Unraid.net/Pro.key",
|
||||
},
|
||||
"state": "PRO",
|
||||
"type": "PRO",
|
||||
},
|
||||
}
|
||||
`);
|
||||
});
|
||||
@@ -1,20 +0,0 @@
|
||||
import { type Mapping } from '@runonflux/nat-upnp';
|
||||
import { expect, test, vi } from 'vitest';
|
||||
|
||||
import { getWanPortForUpnp } from '@app/upnp/helpers.js';
|
||||
|
||||
test('it successfully gets a wan port given no exclusions', () => {
|
||||
const port = getWanPortForUpnp(null, 36_000, 38_000);
|
||||
expect(port).toBeGreaterThan(35_999);
|
||||
expect(port).toBeLessThan(38_001);
|
||||
});
|
||||
|
||||
test('it fails to get a wan port given exclusions', () => {
|
||||
const port = getWanPortForUpnp([{ public: { port: 36_000 } }] as Mapping[], 36_000, 36_000);
|
||||
expect(port).toBeNull();
|
||||
});
|
||||
|
||||
test('it succeeds in getting a wan port given exclusions', () => {
|
||||
const port = getWanPortForUpnp([{ public: { port: 36_000 } }] as Mapping[], 30_000, 36_000);
|
||||
expect(port).not.toBeNull();
|
||||
});
|
||||
@@ -1,29 +1,37 @@
|
||||
import '@app/dotenv.js';
|
||||
|
||||
import { execa } from 'execa';
|
||||
import { Logger } from '@nestjs/common';
|
||||
|
||||
import { CommandFactory } from 'nest-commander';
|
||||
|
||||
import { internalLogger, logger } from '@app/core/log.js';
|
||||
import { LOG_LEVEL } from '@app/environment.js';
|
||||
import { CliModule } from '@app/unraid-api/cli/cli.module.js';
|
||||
import { LOG_LEVEL, SUPPRESS_LOGS } from '@app/environment.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
|
||||
const getUnraidApiLocation = async () => {
|
||||
const { execa } = await import('execa');
|
||||
try {
|
||||
const shellToUse = await execa('which unraid-api');
|
||||
return shellToUse.stdout.trim();
|
||||
} catch (err) {
|
||||
logger.debug('Could not find unraid-api in PATH, using default location');
|
||||
|
||||
return '/usr/bin/unraid-api';
|
||||
}
|
||||
};
|
||||
|
||||
const getLogger = () => {
|
||||
if (LOG_LEVEL === 'TRACE' && !SUPPRESS_LOGS) {
|
||||
return new LogService();
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
const logger = getLogger();
|
||||
try {
|
||||
await import('json-bigint-patch');
|
||||
const { CliModule } = await import('@app/unraid-api/cli/cli.module.js');
|
||||
|
||||
await CommandFactory.run(CliModule, {
|
||||
cliName: 'unraid-api',
|
||||
logger: LOG_LEVEL === 'TRACE' ? new LogService() : false, // - enable this to see nest initialization issues
|
||||
logger: logger, // - enable this to see nest initialization issues
|
||||
completion: {
|
||||
fig: false,
|
||||
cmd: 'completion-script',
|
||||
@@ -32,10 +40,8 @@ try {
|
||||
});
|
||||
process.exit(0);
|
||||
} catch (error) {
|
||||
logger.error('ERROR:', error);
|
||||
internalLogger.error({
|
||||
message: 'Failed to start unraid-api',
|
||||
error,
|
||||
});
|
||||
if (logger) {
|
||||
logger.error('ERROR:', error);
|
||||
}
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
@@ -1,99 +0,0 @@
|
||||
import { uniq } from 'lodash-es';
|
||||
|
||||
import type { RootState } from '@app/store/index.js';
|
||||
import { logger } from '@app/core/log.js';
|
||||
import { GRAPHQL_INTROSPECTION } from '@app/environment.js';
|
||||
import { getServerIps, getUrlForField } from '@app/graphql/resolvers/subscription/network.js';
|
||||
import { getters, store } from '@app/store/index.js';
|
||||
import { FileLoadStatus } from '@app/store/types.js';
|
||||
|
||||
const getAllowedSocks = (): string[] => [
|
||||
// Notifier bridge
|
||||
'/var/run/unraid-notifications.sock',
|
||||
|
||||
// Unraid PHP scripts
|
||||
'/var/run/unraid-php.sock',
|
||||
|
||||
// CLI
|
||||
'/var/run/unraid-cli.sock',
|
||||
];
|
||||
|
||||
const getLocalAccessUrlsForServer = (state: RootState = store.getState()): string[] => {
|
||||
const { emhttp } = state;
|
||||
|
||||
if (emhttp.status !== FileLoadStatus.LOADED) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const { nginx } = emhttp;
|
||||
try {
|
||||
return [
|
||||
getUrlForField({
|
||||
url: 'localhost',
|
||||
port: nginx.httpPort,
|
||||
}).toString(),
|
||||
getUrlForField({
|
||||
url: 'localhost',
|
||||
portSsl: nginx.httpsPort,
|
||||
}).toString(),
|
||||
];
|
||||
} catch (error: unknown) {
|
||||
logger.debug('Caught error in getLocalAccessUrlsForServer: \n%o', error);
|
||||
return [];
|
||||
}
|
||||
};
|
||||
|
||||
const getRemoteAccessUrlsForAllowedOrigins = (state: RootState = store.getState()): string[] => {
|
||||
const { urls } = getServerIps(state);
|
||||
|
||||
if (urls) {
|
||||
return urls.reduce<string[]>((acc, curr) => {
|
||||
if ((curr.ipv4 && curr.ipv6) || curr.ipv4) {
|
||||
acc.push(curr.ipv4.toString());
|
||||
} else if (curr.ipv6) {
|
||||
acc.push(curr.ipv6.toString());
|
||||
}
|
||||
|
||||
return acc;
|
||||
}, []);
|
||||
}
|
||||
|
||||
return [];
|
||||
};
|
||||
|
||||
export const getExtraOrigins = (): string[] => {
|
||||
const { extraOrigins } = getters.config().api;
|
||||
if (extraOrigins) {
|
||||
return extraOrigins
|
||||
.replaceAll(' ', '')
|
||||
.split(',')
|
||||
.filter((origin) => origin.startsWith('http://') || origin.startsWith('https://'));
|
||||
}
|
||||
|
||||
return [];
|
||||
};
|
||||
|
||||
const getConnectOrigins = (): string[] => {
|
||||
const connectMain = 'https://connect.myunraid.net';
|
||||
const connectStaging = 'https://connect-staging.myunraid.net';
|
||||
const connectDev = 'https://dev-my.myunraid.net:4000';
|
||||
|
||||
return [connectMain, connectStaging, connectDev];
|
||||
};
|
||||
|
||||
const getApolloSandbox = (): string[] => {
|
||||
if (GRAPHQL_INTROSPECTION) {
|
||||
return ['https://studio.apollographql.com'];
|
||||
}
|
||||
return [];
|
||||
};
|
||||
|
||||
export const getAllowedOrigins = (state: RootState = store.getState()): string[] =>
|
||||
uniq([
|
||||
...getAllowedSocks(),
|
||||
...getLocalAccessUrlsForServer(state),
|
||||
...getRemoteAccessUrlsForAllowedOrigins(state),
|
||||
...getExtraOrigins(),
|
||||
...getConnectOrigins(),
|
||||
...getApolloSandbox(),
|
||||
]).map((url) => (url.endsWith('/') ? url.slice(0, -1) : url));
|
||||
@@ -1,7 +1,7 @@
|
||||
import { pino } from 'pino';
|
||||
import pino from 'pino';
|
||||
import pretty from 'pino-pretty';
|
||||
|
||||
import { API_VERSION, LOG_LEVEL, LOG_TYPE } from '@app/environment.js';
|
||||
import { API_VERSION, LOG_LEVEL, LOG_TYPE, PATHS_LOGS_FILE, SUPPRESS_LOGS } from '@app/environment.js';
|
||||
|
||||
export const levels = ['trace', 'debug', 'info', 'warn', 'error', 'fatal'] as const;
|
||||
|
||||
@@ -9,18 +9,30 @@ export type LogLevel = (typeof levels)[number];
|
||||
|
||||
const level = levels[levels.indexOf(LOG_LEVEL.toLowerCase() as LogLevel)] ?? 'info';
|
||||
|
||||
export const logDestination = pino.destination();
|
||||
const nullDestination = pino.destination({
|
||||
write() {
|
||||
// Suppress all logs
|
||||
},
|
||||
});
|
||||
|
||||
const stream =
|
||||
LOG_TYPE === 'pretty'
|
||||
? pretty({
|
||||
singleLine: true,
|
||||
hideObject: false,
|
||||
colorize: true,
|
||||
ignore: 'hostname,pid',
|
||||
destination: logDestination,
|
||||
})
|
||||
: logDestination;
|
||||
export const logDestination =
|
||||
process.env.SUPPRESS_LOGS === 'true' ? nullDestination : pino.destination();
|
||||
const localFileDestination = pino.destination({
|
||||
dest: PATHS_LOGS_FILE,
|
||||
sync: true,
|
||||
});
|
||||
|
||||
const stream = SUPPRESS_LOGS
|
||||
? nullDestination
|
||||
: LOG_TYPE === 'pretty'
|
||||
? pretty({
|
||||
singleLine: true,
|
||||
hideObject: false,
|
||||
colorize: true,
|
||||
ignore: 'hostname,pid',
|
||||
destination: logDestination,
|
||||
})
|
||||
: logDestination;
|
||||
|
||||
export const logger = pino(
|
||||
{
|
||||
@@ -70,6 +82,7 @@ export const keyServerLogger = logger.child({ logger: 'key-server' });
|
||||
export const remoteAccessLogger = logger.child({ logger: 'remote-access' });
|
||||
export const remoteQueryLogger = logger.child({ logger: 'remote-query' });
|
||||
export const apiLogger = logger.child({ logger: 'api' });
|
||||
export const pluginLogger = logger.child({ logger: 'plugin', stream: localFileDestination });
|
||||
|
||||
export const loggers = [
|
||||
internalLogger,
|
||||
|
||||
@@ -8,7 +8,7 @@ export class NginxManager {
|
||||
await execa('/etc/rc.d/rc.nginx', ['reload']);
|
||||
return true;
|
||||
} catch (err: unknown) {
|
||||
logger.warn('Failed to restart Nginx with error: ', err);
|
||||
logger.warn('Failed to restart Nginx with error: %o', err as object);
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
@@ -8,7 +8,7 @@ export class UpdateDNSManager {
|
||||
await execa('/usr/bin/php', ['/usr/local/emhttp/plugins/dynamix/include/UpdateDNS.php']);
|
||||
return true;
|
||||
} catch (err: unknown) {
|
||||
logger.warn('Failed to call Update DNS with error: ', err);
|
||||
logger.warn('Failed to call Update DNS with error: %o', err as object);
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
@@ -1,40 +0,0 @@
|
||||
import { isEqual, merge } from 'lodash-es';
|
||||
|
||||
import { getAllowedOrigins } from '@app/common/allowed-origins.js';
|
||||
import { initialState } from '@app/store/modules/config.js';
|
||||
import {
|
||||
MyServersConfig,
|
||||
MyServersConfigMemory,
|
||||
MyServersConfigMemorySchema,
|
||||
MyServersConfigSchema,
|
||||
} from '@app/types/my-servers-config.js';
|
||||
|
||||
// Define ConfigType and ConfigObject
|
||||
export type ConfigType = 'flash' | 'memory';
|
||||
|
||||
/**
|
||||
* Get a writeable configuration based on the mode ('flash' or 'memory').
|
||||
*/
|
||||
export const getWriteableConfig = <T extends ConfigType>(
|
||||
config: T extends 'memory' ? MyServersConfigMemory : MyServersConfig,
|
||||
mode: T
|
||||
): T extends 'memory' ? MyServersConfigMemory : MyServersConfig => {
|
||||
const schema = mode === 'memory' ? MyServersConfigMemorySchema : MyServersConfigSchema;
|
||||
|
||||
const defaultConfig = schema.parse(initialState);
|
||||
// Use a type assertion for the mergedConfig to include `connectionStatus` only if `mode === 'memory`
|
||||
const mergedConfig = merge<
|
||||
MyServersConfig,
|
||||
T extends 'memory' ? MyServersConfigMemory : MyServersConfig
|
||||
>(defaultConfig, config);
|
||||
|
||||
if (mode === 'memory') {
|
||||
(mergedConfig as MyServersConfigMemory).remote.allowedOrigins = getAllowedOrigins().join(', ');
|
||||
(mergedConfig as MyServersConfigMemory).connectionStatus = {
|
||||
...(defaultConfig as MyServersConfigMemory).connectionStatus,
|
||||
...(config as MyServersConfigMemory).connectionStatus,
|
||||
};
|
||||
}
|
||||
|
||||
return schema.parse(mergedConfig) as T extends 'memory' ? MyServersConfigMemory : MyServersConfig; // Narrowing ensures correct typing
|
||||
};
|
||||
@@ -26,7 +26,7 @@ export const loadState = <T extends Record<string, unknown>>(filePath: string):
|
||||
logger.trace(
|
||||
'Failed loading state file "%s" with "%s"',
|
||||
filePath,
|
||||
error instanceof Error ? error.message : error
|
||||
error instanceof Error ? error.message : String(error)
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,25 +1,40 @@
|
||||
export const isUnraidApiRunning = async (): Promise<boolean | undefined> => {
|
||||
const { connect, describe, disconnect } = await import('pm2');
|
||||
return new Promise((resolve, reject) => {
|
||||
connect(function (err) {
|
||||
const { PM2_HOME } = await import('@app/environment.js');
|
||||
|
||||
// Set PM2_HOME if not already set
|
||||
if (!process.env.PM2_HOME) {
|
||||
process.env.PM2_HOME = PM2_HOME;
|
||||
}
|
||||
|
||||
const pm2Module = await import('pm2');
|
||||
const pm2 = pm2Module.default || pm2Module;
|
||||
|
||||
const pm2Promise = new Promise<boolean>((resolve) => {
|
||||
pm2.connect(function (err) {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
reject('Could not connect to pm2');
|
||||
// Don't reject here, resolve with false since we can't connect to PM2
|
||||
resolve(false);
|
||||
return;
|
||||
}
|
||||
|
||||
describe('unraid-api', function (err, processDescription) {
|
||||
console.log(err);
|
||||
// Now try to describe unraid-api specifically
|
||||
pm2.describe('unraid-api', function (err, processDescription) {
|
||||
if (err || processDescription.length === 0) {
|
||||
console.log(false); // Service not found or error occurred
|
||||
// Service not found or error occurred
|
||||
resolve(false);
|
||||
} else {
|
||||
const isOnline = processDescription?.[0]?.pm2_env?.status === 'online';
|
||||
console.log(isOnline); // Output true if online, false otherwise
|
||||
resolve(isOnline);
|
||||
}
|
||||
|
||||
disconnect();
|
||||
pm2.disconnect();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
const timeoutPromise = new Promise<boolean>((resolve) => {
|
||||
setTimeout(() => resolve(false), 10000); // 10 second timeout
|
||||
});
|
||||
|
||||
return Promise.race([pm2Promise, timeoutPromise]);
|
||||
};
|
||||
|
||||
@@ -13,7 +13,7 @@ const isGuiMode = async (): Promise<boolean> => {
|
||||
// exitCode 0 means process was found, 1 means not found
|
||||
return exitCode === 0;
|
||||
} catch (error) {
|
||||
internalLogger.error('Error checking GUI mode: %s', error);
|
||||
internalLogger.error('Error checking GUI mode: %o', error as object);
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
437
api/src/core/utils/validation/validation-processor.test.ts
Normal file
437
api/src/core/utils/validation/validation-processor.test.ts
Normal file
@@ -0,0 +1,437 @@
|
||||
import { describe, expect, it } from 'vitest';
|
||||
|
||||
import type { ValidationResult } from '@app/core/utils/validation/validation-processor.js';
|
||||
import {
|
||||
createValidationProcessor,
|
||||
ResultInterpreters,
|
||||
} from '@app/core/utils/validation/validation-processor.js';
|
||||
|
||||
describe('ValidationProcessor', () => {
|
||||
type TestInput = { value: number; text: string };
|
||||
|
||||
it('should process all validation steps when no errors occur', () => {
|
||||
const steps = [
|
||||
{
|
||||
name: 'positiveValue',
|
||||
validator: (input: TestInput) => input.value > 0,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
{
|
||||
name: 'nonEmptyText',
|
||||
validator: (input: TestInput) => input.text.length > 0,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
] as const;
|
||||
|
||||
const processor = createValidationProcessor({
|
||||
steps,
|
||||
});
|
||||
|
||||
const result = processor({ value: 5, text: 'hello' }, { failFast: false });
|
||||
|
||||
expect(result.isValid).toBe(true);
|
||||
expect(result.errors).toEqual({});
|
||||
});
|
||||
|
||||
it('should collect all errors when failFast is disabled', () => {
|
||||
const steps = [
|
||||
{
|
||||
name: 'positiveValue',
|
||||
validator: (input: TestInput) => input.value > 0,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
{
|
||||
name: 'nonEmptyText',
|
||||
validator: (input: TestInput) => input.text.length > 0,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
] as const;
|
||||
|
||||
const processor = createValidationProcessor({
|
||||
steps,
|
||||
});
|
||||
|
||||
const result = processor({ value: -1, text: '' }, { failFast: false });
|
||||
|
||||
expect(result.isValid).toBe(false);
|
||||
expect(result.errors.positiveValue).toBe(false);
|
||||
expect(result.errors.nonEmptyText).toBe(false);
|
||||
});
|
||||
|
||||
it('should stop at first error when failFast is enabled', () => {
|
||||
const steps = [
|
||||
{
|
||||
name: 'positiveValue',
|
||||
validator: (input: TestInput) => input.value > 0,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
{
|
||||
name: 'nonEmptyText',
|
||||
validator: (input: TestInput) => input.text.length > 0,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
] as const;
|
||||
|
||||
const processor = createValidationProcessor({
|
||||
steps,
|
||||
});
|
||||
|
||||
const result = processor({ value: -1, text: '' }, { failFast: true });
|
||||
|
||||
expect(result.isValid).toBe(false);
|
||||
expect(result.errors.positiveValue).toBe(false);
|
||||
expect(result.errors.nonEmptyText).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should always fail fast on steps marked with alwaysFailFast', () => {
|
||||
const steps = [
|
||||
{
|
||||
name: 'criticalCheck',
|
||||
validator: (input: TestInput) => input.value !== 0,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
alwaysFailFast: true,
|
||||
},
|
||||
{
|
||||
name: 'nonEmptyText',
|
||||
validator: (input: TestInput) => input.text.length > 0,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
] as const;
|
||||
|
||||
const processor = createValidationProcessor({
|
||||
steps,
|
||||
});
|
||||
|
||||
const result = processor({ value: 0, text: '' }, { failFast: false });
|
||||
|
||||
expect(result.isValid).toBe(false);
|
||||
expect(result.errors.criticalCheck).toBe(false);
|
||||
expect(result.errors.nonEmptyText).toBeUndefined(); // Should not be executed
|
||||
});
|
||||
|
||||
it('should work with different result interpreters', () => {
|
||||
const steps = [
|
||||
{
|
||||
name: 'arrayResult',
|
||||
validator: (input: TestInput) => [1, 2, 3],
|
||||
isError: ResultInterpreters.errorList,
|
||||
},
|
||||
{
|
||||
name: 'nullableResult',
|
||||
validator: (input: TestInput) => (input.value > 0 ? null : 'error'),
|
||||
isError: ResultInterpreters.nullableIsSuccess,
|
||||
},
|
||||
] as const;
|
||||
|
||||
const processor = createValidationProcessor({
|
||||
steps,
|
||||
});
|
||||
|
||||
const result = processor({ value: -1, text: 'test' }, { failFast: false });
|
||||
|
||||
expect(result.isValid).toBe(false);
|
||||
expect(result.errors.arrayResult).toEqual([1, 2, 3]);
|
||||
expect(result.errors.nullableResult).toBe('error');
|
||||
});
|
||||
|
||||
it('should handle 0-arity validators', () => {
|
||||
const processor = createValidationProcessor({
|
||||
steps: [
|
||||
{
|
||||
name: 'zeroArityValidator',
|
||||
validator: () => true,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
{
|
||||
name: 'zeroArityValidator2',
|
||||
validator: () => false,
|
||||
isError: ResultInterpreters.booleanMeansFailure,
|
||||
},
|
||||
] as const,
|
||||
});
|
||||
|
||||
const result = processor(null);
|
||||
expect(result.isValid).toBe(true);
|
||||
});
|
||||
|
||||
it('should work with custom result interpreter', () => {
|
||||
const steps = [
|
||||
{
|
||||
name: 'customCheck',
|
||||
validator: (input: TestInput) => ({ isOk: input.value > 0, code: 'VALUE_CHECK' }),
|
||||
isError: ResultInterpreters.custom((result: { isOk: boolean }) => !result.isOk),
|
||||
},
|
||||
] as const;
|
||||
|
||||
const processor = createValidationProcessor({ steps });
|
||||
|
||||
const validResult = processor({ value: 5, text: 'test' });
|
||||
expect(validResult.isValid).toBe(true);
|
||||
expect(validResult.errors).toEqual({});
|
||||
|
||||
const invalidResult = processor({ value: -1, text: 'test' });
|
||||
expect(invalidResult.isValid).toBe(false);
|
||||
expect(invalidResult.errors.customCheck).toEqual({ isOk: false, code: 'VALUE_CHECK' });
|
||||
});
|
||||
|
||||
it('should work with validationProcessor result interpreter', () => {
|
||||
const innerProcessor = createValidationProcessor({
|
||||
steps: [
|
||||
{
|
||||
name: 'innerCheck',
|
||||
validator: (val: number) => val > 0,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
] as const,
|
||||
});
|
||||
|
||||
const outerProcessor = createValidationProcessor({
|
||||
steps: [
|
||||
{
|
||||
name: 'nestedValidation',
|
||||
validator: (input: TestInput) => innerProcessor(input.value),
|
||||
isError: ResultInterpreters.validationProcessor,
|
||||
},
|
||||
] as const,
|
||||
});
|
||||
|
||||
const validResult = outerProcessor({ value: 5, text: 'test' });
|
||||
expect(validResult.isValid).toBe(true);
|
||||
|
||||
const invalidResult = outerProcessor({ value: -1, text: 'test' });
|
||||
expect(invalidResult.isValid).toBe(false);
|
||||
expect(invalidResult.errors.nestedValidation).toMatchObject({ isValid: false });
|
||||
});
|
||||
|
||||
it('should handle empty steps array', () => {
|
||||
const processor = createValidationProcessor<readonly []>({
|
||||
steps: [],
|
||||
});
|
||||
|
||||
const result = processor('any input' as never);
|
||||
expect(result.isValid).toBe(true);
|
||||
expect(result.errors).toEqual({});
|
||||
});
|
||||
|
||||
it('should throw when validators throw errors', () => {
|
||||
const steps = [
|
||||
{
|
||||
name: 'throwingValidator',
|
||||
validator: (input: TestInput) => {
|
||||
if (input.value === 0) {
|
||||
throw new Error('Division by zero');
|
||||
}
|
||||
return true;
|
||||
},
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
] as const;
|
||||
|
||||
const processor = createValidationProcessor({ steps });
|
||||
|
||||
expect(() => processor({ value: 0, text: 'test' })).toThrow('Division by zero');
|
||||
});
|
||||
|
||||
describe('complex validation scenarios', () => {
|
||||
it('should handle multi-type validation results', () => {
|
||||
type ComplexInput = {
|
||||
email: string;
|
||||
age: number;
|
||||
tags: string[];
|
||||
};
|
||||
|
||||
const steps = [
|
||||
{
|
||||
name: 'emailFormat',
|
||||
validator: (input: ComplexInput) =>
|
||||
/\S+@\S+\.\S+/.test(input.email) ? null : 'Invalid email format',
|
||||
isError: ResultInterpreters.nullableIsSuccess,
|
||||
},
|
||||
{
|
||||
name: 'ageRange',
|
||||
validator: (input: ComplexInput) => input.age >= 18 && input.age <= 120,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
{
|
||||
name: 'tagValidation',
|
||||
validator: (input: ComplexInput) => {
|
||||
const invalidTags = input.tags.filter((tag) => tag.length < 2);
|
||||
return invalidTags;
|
||||
},
|
||||
isError: ResultInterpreters.errorList,
|
||||
},
|
||||
] as const;
|
||||
|
||||
const processor = createValidationProcessor({ steps });
|
||||
|
||||
const validInput: ComplexInput = {
|
||||
email: 'user@example.com',
|
||||
age: 25,
|
||||
tags: ['valid', 'tags', 'here'],
|
||||
};
|
||||
const validResult = processor(validInput);
|
||||
expect(validResult.isValid).toBe(true);
|
||||
|
||||
const invalidInput: ComplexInput = {
|
||||
email: 'invalid-email',
|
||||
age: 150,
|
||||
tags: ['ok', 'a', 'b', 'valid'],
|
||||
};
|
||||
const invalidResult = processor(invalidInput, { failFast: false });
|
||||
expect(invalidResult.isValid).toBe(false);
|
||||
expect(invalidResult.errors.emailFormat).toBe('Invalid email format');
|
||||
expect(invalidResult.errors.ageRange).toBe(false);
|
||||
expect(invalidResult.errors.tagValidation).toEqual(['a', 'b']);
|
||||
});
|
||||
|
||||
it('should preserve type safety with heterogeneous result types', () => {
|
||||
const steps = [
|
||||
{
|
||||
name: 'stringResult',
|
||||
validator: () => 'error message',
|
||||
isError: (result: string) => result.length > 0,
|
||||
},
|
||||
{
|
||||
name: 'numberResult',
|
||||
validator: () => 42,
|
||||
isError: (result: number) => result !== 0,
|
||||
},
|
||||
{
|
||||
name: 'objectResult',
|
||||
validator: () => ({ code: 'ERR_001', severity: 'high' }),
|
||||
isError: (result: { code: string; severity: string }) => true,
|
||||
},
|
||||
] as const;
|
||||
|
||||
const processor = createValidationProcessor({ steps });
|
||||
const result = processor(null, { failFast: false });
|
||||
|
||||
expect(result.isValid).toBe(false);
|
||||
expect(result.errors.stringResult).toBe('error message');
|
||||
expect(result.errors.numberResult).toBe(42);
|
||||
expect(result.errors.objectResult).toEqual({ code: 'ERR_001', severity: 'high' });
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases', () => {
|
||||
it('should handle undefined vs null in nullable interpreter', () => {
|
||||
const steps = [
|
||||
{
|
||||
name: 'nullCheck',
|
||||
validator: () => null,
|
||||
isError: ResultInterpreters.nullableIsSuccess,
|
||||
},
|
||||
{
|
||||
name: 'undefinedCheck',
|
||||
validator: () => undefined,
|
||||
isError: ResultInterpreters.nullableIsSuccess,
|
||||
},
|
||||
{
|
||||
name: 'zeroCheck',
|
||||
validator: () => 0,
|
||||
isError: ResultInterpreters.nullableIsSuccess,
|
||||
},
|
||||
{
|
||||
name: 'falseCheck',
|
||||
validator: () => false,
|
||||
isError: ResultInterpreters.nullableIsSuccess,
|
||||
},
|
||||
] as const;
|
||||
|
||||
const processor = createValidationProcessor({ steps });
|
||||
const result = processor(null, { failFast: false });
|
||||
|
||||
expect(result.isValid).toBe(false);
|
||||
expect(result.errors.nullCheck).toBeUndefined();
|
||||
expect(result.errors.undefinedCheck).toBeUndefined();
|
||||
expect(result.errors.zeroCheck).toBe(0);
|
||||
expect(result.errors.falseCheck).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle very long validation chains', () => {
|
||||
// Test the real-world scenario of dynamically generated validation steps
|
||||
// Note: This demonstrates a limitation of the current type system -
|
||||
// dynamic step generation loses strict typing but still works at runtime
|
||||
type StepInput = { value: number };
|
||||
|
||||
const steps = Array.from({ length: 50 }, (_, i) => ({
|
||||
name: `step${i}`,
|
||||
validator: (input: StepInput) => input.value > i,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
}));
|
||||
|
||||
// For dynamic steps, we need to use a type assertion since TypeScript
|
||||
// can't infer the literal string union from Array.from()
|
||||
const processor = createValidationProcessor({
|
||||
steps,
|
||||
});
|
||||
|
||||
const result = processor({ value: 25 }, { failFast: false });
|
||||
expect(result.isValid).toBe(false);
|
||||
|
||||
const errorCount = Object.keys(result.errors).length;
|
||||
expect(errorCount).toBe(25);
|
||||
});
|
||||
|
||||
it('should handle validation by sum typing their inputs', () => {
|
||||
const processor = createValidationProcessor({
|
||||
steps: [
|
||||
{
|
||||
name: 'step1',
|
||||
validator: ({ age }: { age: number }) => age > 18,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
{
|
||||
name: 'step2',
|
||||
validator: ({ name }: { name: string }) => name.length > 0,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const result = processor({ age: 25, name: 'John' });
|
||||
expect(result.isValid).toBe(true);
|
||||
|
||||
const result2 = processor({ age: 15, name: '' });
|
||||
expect(result2.isValid).toBe(false);
|
||||
});
|
||||
|
||||
it('should allow wider types as processor inputs', () => {
|
||||
const sumProcessor = createValidationProcessor({
|
||||
steps: [
|
||||
{
|
||||
name: 'step1',
|
||||
validator: ({ age }: { age: number }) => age > 18,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
{
|
||||
name: 'step2',
|
||||
validator: ({ name }: { name: string }) => name.length > 0,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
],
|
||||
});
|
||||
type Person = { age: number; name: string };
|
||||
const groupProcessor = createValidationProcessor({
|
||||
steps: [
|
||||
{
|
||||
name: 'step1',
|
||||
validator: ({ age }: Person) => age > 18,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
{
|
||||
name: 'step2',
|
||||
validator: ({ name }: Person) => name.length > 0,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const result = sumProcessor({ age: 25, name: 'John', favoriteColor: 'red' });
|
||||
expect(result.isValid).toBe(true);
|
||||
|
||||
const result2 = groupProcessor({ name: '', favoriteColor: 'red', age: 15 });
|
||||
expect(result2.isValid).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
230
api/src/core/utils/validation/validation-processor.ts
Normal file
230
api/src/core/utils/validation/validation-processor.ts
Normal file
@@ -0,0 +1,230 @@
|
||||
/**
|
||||
* @fileoverview Type-safe sequential validation processor
|
||||
*
|
||||
* This module provides a flexible validation system that allows you to chain multiple
|
||||
* validation steps together in a type-safe manner. It supports both fail-fast and
|
||||
* continue-on-error modes, with comprehensive error collection and reporting.
|
||||
*
|
||||
* Key features:
|
||||
* - Type-safe validation pipeline creation
|
||||
* - Sequential validation step execution
|
||||
* - Configurable fail-fast behavior (global or per-step)
|
||||
* - Comprehensive error collection with typed results
|
||||
* - Helper functions for common validation result interpretations
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const validator = createValidationProcessor({
|
||||
* steps: [
|
||||
* {
|
||||
* name: 'required',
|
||||
* validator: (input: string) => input.length > 0,
|
||||
* isError: ResultInterpreters.booleanMeansSuccess
|
||||
* },
|
||||
* {
|
||||
* name: 'email',
|
||||
* validator: (input: string) => /\S+@\S+\.\S+/.test(input),
|
||||
* isError: ResultInterpreters.booleanMeansSuccess
|
||||
* }
|
||||
* ]
|
||||
* });
|
||||
*
|
||||
* const result = validator('user@example.com');
|
||||
* if (!result.isValid) {
|
||||
* console.log('Validation errors:', result.errors);
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
|
||||
export type ValidationStepConfig<TInput, TResult, TName extends string = string> = {
|
||||
name: TName;
|
||||
validator: (input: TInput) => TResult;
|
||||
isError: (result: TResult) => boolean;
|
||||
alwaysFailFast?: boolean;
|
||||
};
|
||||
|
||||
export interface ValidationPipelineConfig {
|
||||
failFast?: boolean;
|
||||
}
|
||||
|
||||
export type ValidationPipelineDefinition<
|
||||
TInput,
|
||||
TSteps extends readonly ValidationStepConfig<TInput, any, string>[],
|
||||
> = {
|
||||
steps: TSteps;
|
||||
};
|
||||
|
||||
export type ExtractStepResults<TSteps extends readonly ValidationStepConfig<any, any, string>[]> = {
|
||||
[K in TSteps[number]['name']]: Extract<TSteps[number], { name: K }> extends ValidationStepConfig<
|
||||
any,
|
||||
infer R,
|
||||
K
|
||||
>
|
||||
? R
|
||||
: never;
|
||||
};
|
||||
|
||||
export type ValidationResult<TSteps extends readonly ValidationStepConfig<any, any, string>[]> = {
|
||||
isValid: boolean;
|
||||
errors: Partial<ExtractStepResults<TSteps>>;
|
||||
};
|
||||
|
||||
// Util: convert a union to an intersection
|
||||
type UnionToIntersection<U> = (U extends any ? (arg: U) => void : never) extends (arg: infer I) => void
|
||||
? I
|
||||
: never;
|
||||
|
||||
// Extract the *intersection* of all input types required by the steps. This guarantees that
|
||||
// the resulting processor knows about every property that any individual step relies on.
|
||||
// We purposely compute an intersection (not a union) so that all required fields are present.
|
||||
type ExtractInputType<TSteps extends readonly ValidationStepConfig<any, any, string>[]> =
|
||||
UnionToIntersection<
|
||||
TSteps[number] extends ValidationStepConfig<infer TInput, any, string> ? TInput : never
|
||||
>;
|
||||
|
||||
/**
|
||||
* Creates a type-safe validation processor that executes a series of validation steps
|
||||
* sequentially and collects errors from failed validations.
|
||||
*
|
||||
* This function returns a validation processor that can be called with input data
|
||||
* and an optional configuration object. The processor will run each validation step
|
||||
* in order, collecting any errors that occur.
|
||||
*
|
||||
* @template TSteps - A readonly array of validation step configurations that defines
|
||||
* the validation pipeline. The type is constrained to ensure type safety
|
||||
* across all steps and their results.
|
||||
*
|
||||
* @param definition - The validation pipeline definition
|
||||
* @param definition.steps - An array of validation step configurations. Each step must have:
|
||||
* - `name`: A unique string identifier for the step
|
||||
* - `validator`: A function that takes input and returns a validation result
|
||||
* - `isError`: A function that determines if the validation result represents an error
|
||||
* - `alwaysFailFast`: Optional flag to always stop execution on this step's failure
|
||||
*
|
||||
* @returns A validation processor function that accepts:
|
||||
* - `input`: The data to validate (type inferred from the first validation step)
|
||||
* - `config`: Optional configuration object with:
|
||||
* - `failFast`: If true, stops execution on first error (unless overridden by step config)
|
||||
*
|
||||
* @example Basic usage with string validation
|
||||
* ```typescript
|
||||
* const nameValidator = createValidationProcessor({
|
||||
* steps: [
|
||||
* {
|
||||
* name: 'required',
|
||||
* validator: (input: string) => input.trim().length > 0,
|
||||
* isError: ResultInterpreters.booleanMeansSuccess
|
||||
* },
|
||||
* {
|
||||
* name: 'minLength',
|
||||
* validator: (input: string) => input.length >= 2,
|
||||
* isError: ResultInterpreters.booleanMeansSuccess
|
||||
* },
|
||||
* {
|
||||
* name: 'maxLength',
|
||||
* validator: (input: string) => input.length <= 50,
|
||||
* isError: ResultInterpreters.booleanMeansSuccess
|
||||
* }
|
||||
* ]
|
||||
* });
|
||||
*
|
||||
* const result = nameValidator('John');
|
||||
* // result.isValid: boolean
|
||||
* // result.errors: { required?: boolean, minLength?: boolean, maxLength?: boolean }
|
||||
* ```
|
||||
*
|
||||
* @example Complex validation with custom error types
|
||||
* ```typescript
|
||||
* type ValidationError = { message: string; code: string };
|
||||
*
|
||||
* const userValidator = createValidationProcessor({
|
||||
* steps: [
|
||||
* {
|
||||
* name: 'email',
|
||||
* validator: (user: { email: string }) =>
|
||||
* /\S+@\S+\.\S+/.test(user.email)
|
||||
* ? null
|
||||
* : { message: 'Invalid email format', code: 'INVALID_EMAIL' },
|
||||
* isError: (result): result is ValidationError => result !== null
|
||||
* },
|
||||
* {
|
||||
* name: 'age',
|
||||
* validator: (user: { age: number }) =>
|
||||
* user.age >= 18
|
||||
* ? null
|
||||
* : { message: 'Must be 18 or older', code: 'UNDERAGE' },
|
||||
* isError: (result): result is ValidationError => result !== null,
|
||||
* alwaysFailFast: true // Stop immediately if age validation fails
|
||||
* }
|
||||
* ]
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
* @example Using fail-fast mode
|
||||
* ```typescript
|
||||
* const result = validator(input, { failFast: true });
|
||||
* // Stops on first error, even if subsequent steps would also fail
|
||||
* ```
|
||||
*
|
||||
* @since 1.0.0
|
||||
*/
|
||||
export function createValidationProcessor<
|
||||
const TSteps extends readonly ValidationStepConfig<any, any, string>[],
|
||||
>(definition: { steps: TSteps }) {
|
||||
// Determine the base input type required by all steps (intersection).
|
||||
type BaseInput = ExtractInputType<TSteps>;
|
||||
|
||||
// Helper: widen input type for object literals while keeping regular objects assignable.
|
||||
type InputWithExtras = BaseInput extends object
|
||||
? BaseInput | (BaseInput & Record<string, unknown>)
|
||||
: BaseInput;
|
||||
|
||||
return function processValidation(
|
||||
input: InputWithExtras,
|
||||
config: ValidationPipelineConfig = {}
|
||||
): ValidationResult<TSteps> {
|
||||
const errors: Partial<ExtractStepResults<TSteps>> = {};
|
||||
let hasErrors = false;
|
||||
|
||||
for (const step of definition.steps) {
|
||||
const result = step.validator(input as BaseInput);
|
||||
const isError = step.isError(result);
|
||||
|
||||
if (isError) {
|
||||
hasErrors = true;
|
||||
(errors as any)[step.name] = result;
|
||||
|
||||
// Always fail fast for steps marked as such, or when global failFast is enabled
|
||||
if (step.alwaysFailFast || config.failFast) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
isValid: !hasErrors,
|
||||
errors,
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
/** Helper functions for common result interpretations */
|
||||
export const ResultInterpreters = {
|
||||
/** For boolean results: true = success, false = error */
|
||||
booleanMeansSuccess: (result: boolean): boolean => !result,
|
||||
|
||||
/** For boolean results: false = success, true = error */
|
||||
booleanMeansFailure: (result: boolean): boolean => result,
|
||||
|
||||
/** For nullable results: null/undefined = success, anything else = error */
|
||||
nullableIsSuccess: <T>(result: T | null | undefined): boolean => result != null,
|
||||
|
||||
/** For array results: empty = success, non-empty = error */
|
||||
errorList: <T>(result: T[]): boolean => result.length > 0,
|
||||
|
||||
/** For custom predicate */
|
||||
custom: <T>(predicate: (result: T) => boolean) => predicate,
|
||||
|
||||
/** Interpreting the result of a validation processor */
|
||||
validationProcessor: (result: { isValid: boolean }) => !result.isValid,
|
||||
} as const;
|
||||
@@ -67,6 +67,7 @@ export const getPackageJsonDependencies = (): string[] | undefined => {
|
||||
|
||||
export const API_VERSION = process.env.npm_package_version ?? getPackageJson().version;
|
||||
|
||||
/** Controls how the app is built/run (i.e. in terms of optimization) */
|
||||
export const NODE_ENV =
|
||||
(process.env.NODE_ENV as 'development' | 'test' | 'staging' | 'production') ?? 'production';
|
||||
export const environment = {
|
||||
@@ -76,6 +77,7 @@ export const CHOKIDAR_USEPOLLING = process.env.CHOKIDAR_USEPOLLING === 'true';
|
||||
export const IS_DOCKER = process.env.IS_DOCKER === 'true';
|
||||
export const DEBUG = process.env.DEBUG === 'true';
|
||||
export const INTROSPECTION = process.env.INTROSPECTION === 'true';
|
||||
/** Determines the app-level & business logic environment (i.e. what data & infrastructure is used) */
|
||||
export const ENVIRONMENT = process.env.ENVIRONMENT
|
||||
? (process.env.ENVIRONMENT as 'production' | 'staging' | 'development')
|
||||
: 'production';
|
||||
@@ -90,6 +92,7 @@ export const LOG_LEVEL = process.env.LOG_LEVEL
|
||||
: process.env.ENVIRONMENT === 'production'
|
||||
? 'INFO'
|
||||
: 'DEBUG';
|
||||
export const SUPPRESS_LOGS = process.env.SUPPRESS_LOGS === 'true';
|
||||
export const MOTHERSHIP_GRAPHQL_LINK = process.env.MOTHERSHIP_GRAPHQL_LINK
|
||||
? process.env.MOTHERSHIP_GRAPHQL_LINK
|
||||
: ENVIRONMENT === 'staging'
|
||||
@@ -99,7 +102,9 @@ export const MOTHERSHIP_GRAPHQL_LINK = process.env.MOTHERSHIP_GRAPHQL_LINK
|
||||
export const PM2_HOME = process.env.PM2_HOME ?? join(homedir(), '.pm2');
|
||||
export const PM2_PATH = join(import.meta.dirname, '../../', 'node_modules', 'pm2', 'bin', 'pm2');
|
||||
export const ECOSYSTEM_PATH = join(import.meta.dirname, '../../', 'ecosystem.config.json');
|
||||
export const LOGS_DIR = process.env.LOGS_DIR ?? '/var/log/unraid-api';
|
||||
export const PATHS_LOGS_DIR =
|
||||
process.env.PATHS_LOGS_DIR ?? process.env.LOGS_DIR ?? '/var/log/unraid-api';
|
||||
export const PATHS_LOGS_FILE = process.env.PATHS_LOGS_FILE ?? '/var/log/graphql-api.log';
|
||||
|
||||
export const PATHS_CONFIG_MODULES =
|
||||
process.env.PATHS_CONFIG_MODULES ?? '/boot/config/plugins/dynamix.my.servers/configs';
|
||||
|
||||
@@ -1,77 +0,0 @@
|
||||
import { ApolloClient, HttpLink, InMemoryCache, split } from '@apollo/client/core/index.js';
|
||||
import { onError } from '@apollo/client/link/error/index.js';
|
||||
import { GraphQLWsLink } from '@apollo/client/link/subscriptions/index.js';
|
||||
import { getMainDefinition } from '@apollo/client/utilities/index.js';
|
||||
import { fetch } from 'cross-fetch';
|
||||
import { createClient } from 'graphql-ws';
|
||||
import WebSocket from 'ws';
|
||||
|
||||
import { getInternalApiAddress } from '@app/consts.js';
|
||||
import { graphqlLogger } from '@app/core/log.js';
|
||||
import { getters } from '@app/store/index.js';
|
||||
|
||||
const getWebsocketWithHeaders = () => {
|
||||
return class WebsocketWithOriginHeader extends WebSocket {
|
||||
constructor(address, protocols) {
|
||||
super(address, protocols, {
|
||||
headers: {
|
||||
Origin: '/var/run/unraid-cli.sock',
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
export const getApiApolloClient = ({ localApiKey }: { localApiKey: string }) => {
|
||||
const nginxPort = getters?.emhttp()?.nginx?.httpPort ?? 80;
|
||||
graphqlLogger.debug('Internal GraphQL URL: %s', getInternalApiAddress(true, nginxPort));
|
||||
const httpLink = new HttpLink({
|
||||
uri: getInternalApiAddress(true, nginxPort),
|
||||
fetch,
|
||||
headers: {
|
||||
Origin: '/var/run/unraid-cli.sock',
|
||||
'x-api-key': localApiKey,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
|
||||
// Create the subscription websocket link
|
||||
const wsLink = new GraphQLWsLink(
|
||||
createClient({
|
||||
webSocketImpl: getWebsocketWithHeaders(),
|
||||
url: getInternalApiAddress(false, nginxPort),
|
||||
connectionParams: () => {
|
||||
return { 'x-api-key': localApiKey };
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
const splitLink = split(
|
||||
({ query }) => {
|
||||
const definition = getMainDefinition(query);
|
||||
return definition.kind === 'OperationDefinition' && definition.operation === 'subscription';
|
||||
},
|
||||
wsLink,
|
||||
httpLink
|
||||
);
|
||||
|
||||
const errorLink = onError(({ networkError }) => {
|
||||
if (networkError) {
|
||||
graphqlLogger.warn('[GRAPHQL-CLIENT] NETWORK ERROR ENCOUNTERED %o', networkError);
|
||||
}
|
||||
});
|
||||
|
||||
return new ApolloClient({
|
||||
defaultOptions: {
|
||||
query: {
|
||||
fetchPolicy: 'no-cache',
|
||||
},
|
||||
mutate: {
|
||||
fetchPolicy: 'no-cache',
|
||||
},
|
||||
},
|
||||
cache: new InMemoryCache(),
|
||||
link: errorLink.concat(splitLink),
|
||||
});
|
||||
};
|
||||
@@ -1,35 +0,0 @@
|
||||
export const GET_CLOUD_OBJECT = /* GraphQL */ `
|
||||
query getCloud {
|
||||
cloud {
|
||||
error
|
||||
apiKey {
|
||||
valid
|
||||
error
|
||||
}
|
||||
minigraphql {
|
||||
status
|
||||
timeout
|
||||
error
|
||||
}
|
||||
cloud {
|
||||
status
|
||||
error
|
||||
ip
|
||||
}
|
||||
allowedOrigins
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
export const GET_SERVERS = /* GraphQL */ `
|
||||
query getServers {
|
||||
servers {
|
||||
name
|
||||
guid
|
||||
status
|
||||
owner {
|
||||
username
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
@@ -1,58 +0,0 @@
|
||||
/* eslint-disable */
|
||||
import * as types from './graphql.js';
|
||||
import type { TypedDocumentNode as DocumentNode } from '@graphql-typed-document-node/core';
|
||||
|
||||
/**
|
||||
* Map of all GraphQL operations in the project.
|
||||
*
|
||||
* This map has several performance disadvantages:
|
||||
* 1. It is not tree-shakeable, so it will include all operations in the project.
|
||||
* 2. It is not minifiable, so the string of a GraphQL query will be multiple times inside the bundle.
|
||||
* 3. It does not support dead code elimination, so it will add unused operations.
|
||||
*
|
||||
* Therefore it is highly recommended to use the babel or swc plugin for production.
|
||||
* Learn more about it here: https://the-guild.dev/graphql/codegen/plugins/presets/preset-client#reducing-bundle-size
|
||||
*/
|
||||
type Documents = {
|
||||
"\n mutation sendRemoteGraphQLResponse($input: RemoteGraphQLServerInput!) {\n remoteGraphQLResponse(input: $input)\n }\n": typeof types.SendRemoteGraphQlResponseDocument,
|
||||
"\n fragment RemoteGraphQLEventFragment on RemoteGraphQLEvent {\n remoteGraphQLEventData: data {\n type\n body\n sha256\n }\n }\n": typeof types.RemoteGraphQlEventFragmentFragmentDoc,
|
||||
"\n subscription events {\n events {\n __typename\n ... on ClientConnectedEvent {\n connectedData: data {\n type\n version\n apiKey\n }\n connectedEvent: type\n }\n ... on ClientDisconnectedEvent {\n disconnectedData: data {\n type\n version\n apiKey\n }\n disconnectedEvent: type\n }\n ...RemoteGraphQLEventFragment\n }\n }\n": typeof types.EventsDocument,
|
||||
};
|
||||
const documents: Documents = {
|
||||
"\n mutation sendRemoteGraphQLResponse($input: RemoteGraphQLServerInput!) {\n remoteGraphQLResponse(input: $input)\n }\n": types.SendRemoteGraphQlResponseDocument,
|
||||
"\n fragment RemoteGraphQLEventFragment on RemoteGraphQLEvent {\n remoteGraphQLEventData: data {\n type\n body\n sha256\n }\n }\n": types.RemoteGraphQlEventFragmentFragmentDoc,
|
||||
"\n subscription events {\n events {\n __typename\n ... on ClientConnectedEvent {\n connectedData: data {\n type\n version\n apiKey\n }\n connectedEvent: type\n }\n ... on ClientDisconnectedEvent {\n disconnectedData: data {\n type\n version\n apiKey\n }\n disconnectedEvent: type\n }\n ...RemoteGraphQLEventFragment\n }\n }\n": types.EventsDocument,
|
||||
};
|
||||
|
||||
/**
|
||||
* The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
|
||||
*
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const query = graphql(`query GetUser($id: ID!) { user(id: $id) { name } }`);
|
||||
* ```
|
||||
*
|
||||
* The query argument is unknown!
|
||||
* Please regenerate the types.
|
||||
*/
|
||||
export function graphql(source: string): unknown;
|
||||
|
||||
/**
|
||||
* The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
|
||||
*/
|
||||
export function graphql(source: "\n mutation sendRemoteGraphQLResponse($input: RemoteGraphQLServerInput!) {\n remoteGraphQLResponse(input: $input)\n }\n"): (typeof documents)["\n mutation sendRemoteGraphQLResponse($input: RemoteGraphQLServerInput!) {\n remoteGraphQLResponse(input: $input)\n }\n"];
|
||||
/**
|
||||
* The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
|
||||
*/
|
||||
export function graphql(source: "\n fragment RemoteGraphQLEventFragment on RemoteGraphQLEvent {\n remoteGraphQLEventData: data {\n type\n body\n sha256\n }\n }\n"): (typeof documents)["\n fragment RemoteGraphQLEventFragment on RemoteGraphQLEvent {\n remoteGraphQLEventData: data {\n type\n body\n sha256\n }\n }\n"];
|
||||
/**
|
||||
* The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
|
||||
*/
|
||||
export function graphql(source: "\n subscription events {\n events {\n __typename\n ... on ClientConnectedEvent {\n connectedData: data {\n type\n version\n apiKey\n }\n connectedEvent: type\n }\n ... on ClientDisconnectedEvent {\n disconnectedData: data {\n type\n version\n apiKey\n }\n disconnectedEvent: type\n }\n ...RemoteGraphQLEventFragment\n }\n }\n"): (typeof documents)["\n subscription events {\n events {\n __typename\n ... on ClientConnectedEvent {\n connectedData: data {\n type\n version\n apiKey\n }\n connectedEvent: type\n }\n ... on ClientDisconnectedEvent {\n disconnectedData: data {\n type\n version\n apiKey\n }\n disconnectedEvent: type\n }\n ...RemoteGraphQLEventFragment\n }\n }\n"];
|
||||
|
||||
export function graphql(source: string) {
|
||||
return (documents as any)[source] ?? {};
|
||||
}
|
||||
|
||||
export type DocumentType<TDocumentNode extends DocumentNode<any, any>> = TDocumentNode extends DocumentNode< infer TType, any> ? TType : never;
|
||||
@@ -1,748 +0,0 @@
|
||||
/* eslint-disable */
|
||||
import type { TypedDocumentNode as DocumentNode } from '@graphql-typed-document-node/core';
|
||||
export type Maybe<T> = T | null;
|
||||
export type InputMaybe<T> = Maybe<T>;
|
||||
export type Exact<T extends { [key: string]: unknown }> = { [K in keyof T]: T[K] };
|
||||
export type MakeOptional<T, K extends keyof T> = Omit<T, K> & { [SubKey in K]?: Maybe<T[SubKey]> };
|
||||
export type MakeMaybe<T, K extends keyof T> = Omit<T, K> & { [SubKey in K]: Maybe<T[SubKey]> };
|
||||
export type MakeEmpty<T extends { [key: string]: unknown }, K extends keyof T> = { [_ in K]?: never };
|
||||
export type Incremental<T> = T | { [P in keyof T]?: P extends ' $fragmentName' | '__typename' ? T[P] : never };
|
||||
/** All built-in and custom scalars, mapped to their actual values */
|
||||
export type Scalars = {
|
||||
ID: { input: string; output: string; }
|
||||
String: { input: string; output: string; }
|
||||
Boolean: { input: boolean; output: boolean; }
|
||||
Int: { input: number; output: number; }
|
||||
Float: { input: number; output: number; }
|
||||
/** A date-time string at UTC, such as 2007-12-03T10:15:30Z, compliant with the `date-time` format outlined in section 5.6 of the RFC 3339 profile of the ISO 8601 standard for representation of dates and times using the Gregorian calendar. */
|
||||
DateTime: { input: string; output: string; }
|
||||
/** A field whose value is a IPv4 address: https://en.wikipedia.org/wiki/IPv4. */
|
||||
IPv4: { input: any; output: any; }
|
||||
/** A field whose value is a IPv6 address: https://en.wikipedia.org/wiki/IPv6. */
|
||||
IPv6: { input: any; output: any; }
|
||||
/** The `JSON` scalar type represents JSON values as specified by [ECMA-404](http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf). */
|
||||
JSON: { input: Record<string, any>; output: Record<string, any>; }
|
||||
/** The `Long` scalar type represents 52-bit integers */
|
||||
Long: { input: number; output: number; }
|
||||
/** A field whose value is a valid TCP port within the range of 0 to 65535: https://en.wikipedia.org/wiki/Transmission_Control_Protocol#TCP_ports */
|
||||
Port: { input: number; output: number; }
|
||||
/** A field whose value conforms to the standard URL format as specified in RFC3986: https://www.ietf.org/rfc/rfc3986.txt. */
|
||||
URL: { input: URL; output: URL; }
|
||||
};
|
||||
|
||||
export type AccessUrl = {
|
||||
__typename?: 'AccessUrl';
|
||||
ipv4?: Maybe<Scalars['URL']['output']>;
|
||||
ipv6?: Maybe<Scalars['URL']['output']>;
|
||||
name?: Maybe<Scalars['String']['output']>;
|
||||
type: UrlType;
|
||||
};
|
||||
|
||||
export type AccessUrlInput = {
|
||||
ipv4?: InputMaybe<Scalars['URL']['input']>;
|
||||
ipv6?: InputMaybe<Scalars['URL']['input']>;
|
||||
name?: InputMaybe<Scalars['String']['input']>;
|
||||
type: UrlType;
|
||||
};
|
||||
|
||||
export type ArrayCapacity = {
|
||||
__typename?: 'ArrayCapacity';
|
||||
bytes?: Maybe<ArrayCapacityBytes>;
|
||||
};
|
||||
|
||||
export type ArrayCapacityBytes = {
|
||||
__typename?: 'ArrayCapacityBytes';
|
||||
free?: Maybe<Scalars['Long']['output']>;
|
||||
total?: Maybe<Scalars['Long']['output']>;
|
||||
used?: Maybe<Scalars['Long']['output']>;
|
||||
};
|
||||
|
||||
export type ArrayCapacityBytesInput = {
|
||||
free?: InputMaybe<Scalars['Long']['input']>;
|
||||
total?: InputMaybe<Scalars['Long']['input']>;
|
||||
used?: InputMaybe<Scalars['Long']['input']>;
|
||||
};
|
||||
|
||||
export type ArrayCapacityInput = {
|
||||
bytes?: InputMaybe<ArrayCapacityBytesInput>;
|
||||
};
|
||||
|
||||
export type ClientConnectedEvent = {
|
||||
__typename?: 'ClientConnectedEvent';
|
||||
data: ClientConnectionEventData;
|
||||
type: EventType;
|
||||
};
|
||||
|
||||
export type ClientConnectionEventData = {
|
||||
__typename?: 'ClientConnectionEventData';
|
||||
apiKey: Scalars['String']['output'];
|
||||
type: ClientType;
|
||||
version: Scalars['String']['output'];
|
||||
};
|
||||
|
||||
export type ClientDisconnectedEvent = {
|
||||
__typename?: 'ClientDisconnectedEvent';
|
||||
data: ClientConnectionEventData;
|
||||
type: EventType;
|
||||
};
|
||||
|
||||
export type ClientPingEvent = {
|
||||
__typename?: 'ClientPingEvent';
|
||||
data: PingEventData;
|
||||
type: EventType;
|
||||
};
|
||||
|
||||
export enum ClientType {
|
||||
API = 'API',
|
||||
DASHBOARD = 'DASHBOARD'
|
||||
}
|
||||
|
||||
export type Config = {
|
||||
__typename?: 'Config';
|
||||
error?: Maybe<ConfigErrorState>;
|
||||
valid?: Maybe<Scalars['Boolean']['output']>;
|
||||
};
|
||||
|
||||
export enum ConfigErrorState {
|
||||
INVALID = 'INVALID',
|
||||
NO_KEY_SERVER = 'NO_KEY_SERVER',
|
||||
UNKNOWN_ERROR = 'UNKNOWN_ERROR',
|
||||
WITHDRAWN = 'WITHDRAWN'
|
||||
}
|
||||
|
||||
export type Dashboard = {
|
||||
__typename?: 'Dashboard';
|
||||
apps?: Maybe<DashboardApps>;
|
||||
array?: Maybe<DashboardArray>;
|
||||
config?: Maybe<DashboardConfig>;
|
||||
display?: Maybe<DashboardDisplay>;
|
||||
id: Scalars['ID']['output'];
|
||||
lastPublish?: Maybe<Scalars['DateTime']['output']>;
|
||||
network?: Maybe<Network>;
|
||||
online?: Maybe<Scalars['Boolean']['output']>;
|
||||
os?: Maybe<DashboardOs>;
|
||||
services?: Maybe<Array<Maybe<DashboardService>>>;
|
||||
twoFactor?: Maybe<DashboardTwoFactor>;
|
||||
vars?: Maybe<DashboardVars>;
|
||||
versions?: Maybe<DashboardVersions>;
|
||||
vms?: Maybe<DashboardVms>;
|
||||
};
|
||||
|
||||
export type DashboardApps = {
|
||||
__typename?: 'DashboardApps';
|
||||
installed?: Maybe<Scalars['Int']['output']>;
|
||||
started?: Maybe<Scalars['Int']['output']>;
|
||||
};
|
||||
|
||||
export type DashboardAppsInput = {
|
||||
installed: Scalars['Int']['input'];
|
||||
started: Scalars['Int']['input'];
|
||||
};
|
||||
|
||||
export type DashboardArray = {
|
||||
__typename?: 'DashboardArray';
|
||||
/** Current array capacity */
|
||||
capacity?: Maybe<ArrayCapacity>;
|
||||
/** Current array state */
|
||||
state?: Maybe<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
export type DashboardArrayInput = {
|
||||
/** Current array capacity */
|
||||
capacity: ArrayCapacityInput;
|
||||
/** Current array state */
|
||||
state: Scalars['String']['input'];
|
||||
};
|
||||
|
||||
export type DashboardCase = {
|
||||
__typename?: 'DashboardCase';
|
||||
base64?: Maybe<Scalars['String']['output']>;
|
||||
error?: Maybe<Scalars['String']['output']>;
|
||||
icon?: Maybe<Scalars['String']['output']>;
|
||||
url?: Maybe<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
export type DashboardCaseInput = {
|
||||
base64: Scalars['String']['input'];
|
||||
error?: InputMaybe<Scalars['String']['input']>;
|
||||
icon: Scalars['String']['input'];
|
||||
url: Scalars['String']['input'];
|
||||
};
|
||||
|
||||
export type DashboardConfig = {
|
||||
__typename?: 'DashboardConfig';
|
||||
error?: Maybe<Scalars['String']['output']>;
|
||||
valid?: Maybe<Scalars['Boolean']['output']>;
|
||||
};
|
||||
|
||||
export type DashboardConfigInput = {
|
||||
error?: InputMaybe<Scalars['String']['input']>;
|
||||
valid: Scalars['Boolean']['input'];
|
||||
};
|
||||
|
||||
export type DashboardDisplay = {
|
||||
__typename?: 'DashboardDisplay';
|
||||
case?: Maybe<DashboardCase>;
|
||||
};
|
||||
|
||||
export type DashboardDisplayInput = {
|
||||
case: DashboardCaseInput;
|
||||
};
|
||||
|
||||
export type DashboardInput = {
|
||||
apps: DashboardAppsInput;
|
||||
array: DashboardArrayInput;
|
||||
config: DashboardConfigInput;
|
||||
display: DashboardDisplayInput;
|
||||
os: DashboardOsInput;
|
||||
services: Array<DashboardServiceInput>;
|
||||
twoFactor?: InputMaybe<DashboardTwoFactorInput>;
|
||||
vars: DashboardVarsInput;
|
||||
versions: DashboardVersionsInput;
|
||||
vms: DashboardVmsInput;
|
||||
};
|
||||
|
||||
export type DashboardOs = {
|
||||
__typename?: 'DashboardOs';
|
||||
hostname?: Maybe<Scalars['String']['output']>;
|
||||
uptime?: Maybe<Scalars['DateTime']['output']>;
|
||||
};
|
||||
|
||||
export type DashboardOsInput = {
|
||||
hostname: Scalars['String']['input'];
|
||||
uptime: Scalars['DateTime']['input'];
|
||||
};
|
||||
|
||||
export type DashboardService = {
|
||||
__typename?: 'DashboardService';
|
||||
name?: Maybe<Scalars['String']['output']>;
|
||||
online?: Maybe<Scalars['Boolean']['output']>;
|
||||
uptime?: Maybe<DashboardServiceUptime>;
|
||||
version?: Maybe<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
export type DashboardServiceInput = {
|
||||
name: Scalars['String']['input'];
|
||||
online: Scalars['Boolean']['input'];
|
||||
uptime?: InputMaybe<DashboardServiceUptimeInput>;
|
||||
version: Scalars['String']['input'];
|
||||
};
|
||||
|
||||
export type DashboardServiceUptime = {
|
||||
__typename?: 'DashboardServiceUptime';
|
||||
timestamp?: Maybe<Scalars['DateTime']['output']>;
|
||||
};
|
||||
|
||||
export type DashboardServiceUptimeInput = {
|
||||
timestamp: Scalars['DateTime']['input'];
|
||||
};
|
||||
|
||||
export type DashboardTwoFactor = {
|
||||
__typename?: 'DashboardTwoFactor';
|
||||
local?: Maybe<DashboardTwoFactorLocal>;
|
||||
remote?: Maybe<DashboardTwoFactorRemote>;
|
||||
};
|
||||
|
||||
export type DashboardTwoFactorInput = {
|
||||
local: DashboardTwoFactorLocalInput;
|
||||
remote: DashboardTwoFactorRemoteInput;
|
||||
};
|
||||
|
||||
export type DashboardTwoFactorLocal = {
|
||||
__typename?: 'DashboardTwoFactorLocal';
|
||||
enabled?: Maybe<Scalars['Boolean']['output']>;
|
||||
};
|
||||
|
||||
export type DashboardTwoFactorLocalInput = {
|
||||
enabled: Scalars['Boolean']['input'];
|
||||
};
|
||||
|
||||
export type DashboardTwoFactorRemote = {
|
||||
__typename?: 'DashboardTwoFactorRemote';
|
||||
enabled?: Maybe<Scalars['Boolean']['output']>;
|
||||
};
|
||||
|
||||
export type DashboardTwoFactorRemoteInput = {
|
||||
enabled: Scalars['Boolean']['input'];
|
||||
};
|
||||
|
||||
export type DashboardVars = {
|
||||
__typename?: 'DashboardVars';
|
||||
flashGuid?: Maybe<Scalars['String']['output']>;
|
||||
regState?: Maybe<Scalars['String']['output']>;
|
||||
regTy?: Maybe<Scalars['String']['output']>;
|
||||
serverDescription?: Maybe<Scalars['String']['output']>;
|
||||
serverName?: Maybe<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
export type DashboardVarsInput = {
|
||||
flashGuid: Scalars['String']['input'];
|
||||
regState: Scalars['String']['input'];
|
||||
regTy: Scalars['String']['input'];
|
||||
/** Server description */
|
||||
serverDescription?: InputMaybe<Scalars['String']['input']>;
|
||||
/** Name of the server */
|
||||
serverName?: InputMaybe<Scalars['String']['input']>;
|
||||
};
|
||||
|
||||
export type DashboardVersions = {
|
||||
__typename?: 'DashboardVersions';
|
||||
unraid?: Maybe<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
export type DashboardVersionsInput = {
|
||||
unraid: Scalars['String']['input'];
|
||||
};
|
||||
|
||||
export type DashboardVms = {
|
||||
__typename?: 'DashboardVms';
|
||||
installed?: Maybe<Scalars['Int']['output']>;
|
||||
started?: Maybe<Scalars['Int']['output']>;
|
||||
};
|
||||
|
||||
export type DashboardVmsInput = {
|
||||
installed: Scalars['Int']['input'];
|
||||
started: Scalars['Int']['input'];
|
||||
};
|
||||
|
||||
export type Event = ClientConnectedEvent | ClientDisconnectedEvent | ClientPingEvent | RemoteAccessEvent | RemoteGraphQlEvent | UpdateEvent;
|
||||
|
||||
export enum EventType {
|
||||
CLIENT_CONNECTED_EVENT = 'CLIENT_CONNECTED_EVENT',
|
||||
CLIENT_DISCONNECTED_EVENT = 'CLIENT_DISCONNECTED_EVENT',
|
||||
CLIENT_PING_EVENT = 'CLIENT_PING_EVENT',
|
||||
REMOTE_ACCESS_EVENT = 'REMOTE_ACCESS_EVENT',
|
||||
REMOTE_GRAPHQL_EVENT = 'REMOTE_GRAPHQL_EVENT',
|
||||
UPDATE_EVENT = 'UPDATE_EVENT'
|
||||
}
|
||||
|
||||
export type FullServerDetails = {
|
||||
__typename?: 'FullServerDetails';
|
||||
apiConnectedCount?: Maybe<Scalars['Int']['output']>;
|
||||
apiVersion?: Maybe<Scalars['String']['output']>;
|
||||
connectionTimestamp?: Maybe<Scalars['String']['output']>;
|
||||
dashboard?: Maybe<Dashboard>;
|
||||
lastPublish?: Maybe<Scalars['String']['output']>;
|
||||
network?: Maybe<Network>;
|
||||
online?: Maybe<Scalars['Boolean']['output']>;
|
||||
};
|
||||
|
||||
export enum Importance {
|
||||
ALERT = 'ALERT',
|
||||
INFO = 'INFO',
|
||||
WARNING = 'WARNING'
|
||||
}
|
||||
|
||||
export type KsServerDetails = {
|
||||
__typename?: 'KsServerDetails';
|
||||
accessLabel: Scalars['String']['output'];
|
||||
accessUrl: Scalars['String']['output'];
|
||||
apiKey?: Maybe<Scalars['String']['output']>;
|
||||
description: Scalars['String']['output'];
|
||||
dnsHash: Scalars['String']['output'];
|
||||
flashBackupDate?: Maybe<Scalars['Int']['output']>;
|
||||
flashBackupUrl: Scalars['String']['output'];
|
||||
flashProduct: Scalars['String']['output'];
|
||||
flashVendor: Scalars['String']['output'];
|
||||
guid: Scalars['String']['output'];
|
||||
ipsId?: Maybe<Scalars['String']['output']>;
|
||||
keyType?: Maybe<Scalars['String']['output']>;
|
||||
licenseKey: Scalars['String']['output'];
|
||||
name: Scalars['String']['output'];
|
||||
plgVersion?: Maybe<Scalars['String']['output']>;
|
||||
signedIn: Scalars['Boolean']['output'];
|
||||
};
|
||||
|
||||
export type LegacyService = {
|
||||
__typename?: 'LegacyService';
|
||||
name?: Maybe<Scalars['String']['output']>;
|
||||
online?: Maybe<Scalars['Boolean']['output']>;
|
||||
uptime?: Maybe<Scalars['Int']['output']>;
|
||||
version?: Maybe<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
export type Mutation = {
|
||||
__typename?: 'Mutation';
|
||||
remoteGraphQLResponse: Scalars['Boolean']['output'];
|
||||
remoteMutation: Scalars['String']['output'];
|
||||
remoteSession?: Maybe<Scalars['Boolean']['output']>;
|
||||
sendNotification?: Maybe<Notification>;
|
||||
sendPing?: Maybe<Scalars['Boolean']['output']>;
|
||||
updateDashboard: Dashboard;
|
||||
updateNetwork: Network;
|
||||
};
|
||||
|
||||
|
||||
export type MutationRemoteGraphQlResponseArgs = {
|
||||
input: RemoteGraphQlServerInput;
|
||||
};
|
||||
|
||||
|
||||
export type MutationRemoteMutationArgs = {
|
||||
input: RemoteGraphQlClientInput;
|
||||
};
|
||||
|
||||
|
||||
export type MutationRemoteSessionArgs = {
|
||||
remoteAccess: RemoteAccessInput;
|
||||
};
|
||||
|
||||
|
||||
export type MutationSendNotificationArgs = {
|
||||
notification: NotificationInput;
|
||||
};
|
||||
|
||||
|
||||
export type MutationUpdateDashboardArgs = {
|
||||
data: DashboardInput;
|
||||
};
|
||||
|
||||
|
||||
export type MutationUpdateNetworkArgs = {
|
||||
data: NetworkInput;
|
||||
};
|
||||
|
||||
export type Network = {
|
||||
__typename?: 'Network';
|
||||
accessUrls?: Maybe<Array<AccessUrl>>;
|
||||
};
|
||||
|
||||
export type NetworkInput = {
|
||||
accessUrls: Array<AccessUrlInput>;
|
||||
};
|
||||
|
||||
export type Notification = {
|
||||
__typename?: 'Notification';
|
||||
description?: Maybe<Scalars['String']['output']>;
|
||||
importance?: Maybe<Importance>;
|
||||
link?: Maybe<Scalars['String']['output']>;
|
||||
status: NotificationStatus;
|
||||
subject?: Maybe<Scalars['String']['output']>;
|
||||
title?: Maybe<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
export type NotificationInput = {
|
||||
description?: InputMaybe<Scalars['String']['input']>;
|
||||
importance: Importance;
|
||||
link?: InputMaybe<Scalars['String']['input']>;
|
||||
subject?: InputMaybe<Scalars['String']['input']>;
|
||||
title?: InputMaybe<Scalars['String']['input']>;
|
||||
};
|
||||
|
||||
export enum NotificationStatus {
|
||||
FAILED_TO_SEND = 'FAILED_TO_SEND',
|
||||
NOT_FOUND = 'NOT_FOUND',
|
||||
PENDING = 'PENDING',
|
||||
SENT = 'SENT'
|
||||
}
|
||||
|
||||
export type PingEvent = {
|
||||
__typename?: 'PingEvent';
|
||||
data?: Maybe<Scalars['String']['output']>;
|
||||
type: EventType;
|
||||
};
|
||||
|
||||
export type PingEventData = {
|
||||
__typename?: 'PingEventData';
|
||||
source: PingEventSource;
|
||||
};
|
||||
|
||||
export enum PingEventSource {
|
||||
API = 'API',
|
||||
MOTHERSHIP = 'MOTHERSHIP'
|
||||
}
|
||||
|
||||
export type ProfileModel = {
|
||||
__typename?: 'ProfileModel';
|
||||
avatar?: Maybe<Scalars['String']['output']>;
|
||||
cognito_id?: Maybe<Scalars['String']['output']>;
|
||||
url?: Maybe<Scalars['String']['output']>;
|
||||
userId?: Maybe<Scalars['ID']['output']>;
|
||||
username?: Maybe<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
export type Query = {
|
||||
__typename?: 'Query';
|
||||
apiVersion?: Maybe<Scalars['String']['output']>;
|
||||
dashboard?: Maybe<Dashboard>;
|
||||
ksServers: Array<KsServerDetails>;
|
||||
online?: Maybe<Scalars['Boolean']['output']>;
|
||||
remoteQuery: Scalars['String']['output'];
|
||||
serverStatus: ServerStatusResponse;
|
||||
servers: Array<Maybe<Server>>;
|
||||
status?: Maybe<ServerStatus>;
|
||||
};
|
||||
|
||||
|
||||
export type QueryDashboardArgs = {
|
||||
id: Scalars['String']['input'];
|
||||
};
|
||||
|
||||
|
||||
export type QueryRemoteQueryArgs = {
|
||||
input: RemoteGraphQlClientInput;
|
||||
};
|
||||
|
||||
|
||||
export type QueryServerStatusArgs = {
|
||||
apiKey: Scalars['String']['input'];
|
||||
};
|
||||
|
||||
export enum RegistrationState {
|
||||
/** Basic */
|
||||
BASIC = 'BASIC',
|
||||
/** BLACKLISTED */
|
||||
EBLACKLISTED = 'EBLACKLISTED',
|
||||
/** BLACKLISTED */
|
||||
EBLACKLISTED1 = 'EBLACKLISTED1',
|
||||
/** BLACKLISTED */
|
||||
EBLACKLISTED2 = 'EBLACKLISTED2',
|
||||
/** Trial Expired */
|
||||
EEXPIRED = 'EEXPIRED',
|
||||
/** GUID Error */
|
||||
EGUID = 'EGUID',
|
||||
/** Multiple License Keys Present */
|
||||
EGUID1 = 'EGUID1',
|
||||
/** Trial Requires Internet Connection */
|
||||
ENOCONN = 'ENOCONN',
|
||||
/** No Flash */
|
||||
ENOFLASH = 'ENOFLASH',
|
||||
ENOFLASH1 = 'ENOFLASH1',
|
||||
ENOFLASH2 = 'ENOFLASH2',
|
||||
ENOFLASH3 = 'ENOFLASH3',
|
||||
ENOFLASH4 = 'ENOFLASH4',
|
||||
ENOFLASH5 = 'ENOFLASH5',
|
||||
ENOFLASH6 = 'ENOFLASH6',
|
||||
ENOFLASH7 = 'ENOFLASH7',
|
||||
/** No Keyfile */
|
||||
ENOKEYFILE = 'ENOKEYFILE',
|
||||
/** No Keyfile */
|
||||
ENOKEYFILE1 = 'ENOKEYFILE1',
|
||||
/** Missing key file */
|
||||
ENOKEYFILE2 = 'ENOKEYFILE2',
|
||||
/** Invalid installation */
|
||||
ETRIAL = 'ETRIAL',
|
||||
/** Plus */
|
||||
PLUS = 'PLUS',
|
||||
/** Pro */
|
||||
PRO = 'PRO',
|
||||
/** Trial */
|
||||
TRIAL = 'TRIAL'
|
||||
}
|
||||
|
||||
export type RemoteAccessEvent = {
|
||||
__typename?: 'RemoteAccessEvent';
|
||||
data: RemoteAccessEventData;
|
||||
type: EventType;
|
||||
};
|
||||
|
||||
/** Defines whether remote access event is the initiation (from connect) or the response (from the server) */
|
||||
export enum RemoteAccessEventActionType {
|
||||
ACK = 'ACK',
|
||||
END = 'END',
|
||||
INIT = 'INIT',
|
||||
PING = 'PING'
|
||||
}
|
||||
|
||||
export type RemoteAccessEventData = {
|
||||
__typename?: 'RemoteAccessEventData';
|
||||
apiKey: Scalars['String']['output'];
|
||||
type: RemoteAccessEventActionType;
|
||||
url?: Maybe<AccessUrl>;
|
||||
};
|
||||
|
||||
export type RemoteAccessInput = {
|
||||
apiKey: Scalars['String']['input'];
|
||||
type: RemoteAccessEventActionType;
|
||||
url?: InputMaybe<AccessUrlInput>;
|
||||
};
|
||||
|
||||
export type RemoteGraphQlClientInput = {
|
||||
apiKey: Scalars['String']['input'];
|
||||
body: Scalars['String']['input'];
|
||||
/** Time in milliseconds to wait for a response from the remote server (defaults to 15000) */
|
||||
timeout?: InputMaybe<Scalars['Int']['input']>;
|
||||
/** How long mothership should cache the result of this query in seconds, only valid on queries */
|
||||
ttl?: InputMaybe<Scalars['Int']['input']>;
|
||||
};
|
||||
|
||||
export type RemoteGraphQlEvent = {
|
||||
__typename?: 'RemoteGraphQLEvent';
|
||||
data: RemoteGraphQlEventData;
|
||||
type: EventType;
|
||||
};
|
||||
|
||||
export type RemoteGraphQlEventData = {
|
||||
__typename?: 'RemoteGraphQLEventData';
|
||||
/** Contains mutation / subscription / query data in the form of body: JSON, variables: JSON */
|
||||
body: Scalars['String']['output'];
|
||||
/** sha256 hash of the body */
|
||||
sha256: Scalars['String']['output'];
|
||||
type: RemoteGraphQlEventType;
|
||||
};
|
||||
|
||||
export enum RemoteGraphQlEventType {
|
||||
REMOTE_MUTATION_EVENT = 'REMOTE_MUTATION_EVENT',
|
||||
REMOTE_QUERY_EVENT = 'REMOTE_QUERY_EVENT',
|
||||
REMOTE_SUBSCRIPTION_EVENT = 'REMOTE_SUBSCRIPTION_EVENT',
|
||||
REMOTE_SUBSCRIPTION_EVENT_PING = 'REMOTE_SUBSCRIPTION_EVENT_PING'
|
||||
}
|
||||
|
||||
export type RemoteGraphQlServerInput = {
|
||||
/** Body - contains an object containing data: (GQL response data) or errors: (GQL Errors) */
|
||||
body: Scalars['String']['input'];
|
||||
/** sha256 hash of the body */
|
||||
sha256: Scalars['String']['input'];
|
||||
type: RemoteGraphQlEventType;
|
||||
};
|
||||
|
||||
export type Server = {
|
||||
__typename?: 'Server';
|
||||
apikey?: Maybe<Scalars['String']['output']>;
|
||||
guid?: Maybe<Scalars['String']['output']>;
|
||||
lanip?: Maybe<Scalars['String']['output']>;
|
||||
localurl?: Maybe<Scalars['String']['output']>;
|
||||
name?: Maybe<Scalars['String']['output']>;
|
||||
owner?: Maybe<ProfileModel>;
|
||||
remoteurl?: Maybe<Scalars['String']['output']>;
|
||||
status?: Maybe<ServerStatus>;
|
||||
wanip?: Maybe<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
/** Defines server fields that have a TTL on them, for example last ping */
|
||||
export type ServerFieldsWithTtl = {
|
||||
__typename?: 'ServerFieldsWithTtl';
|
||||
lastPing?: Maybe<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
export type ServerModel = {
|
||||
apikey: Scalars['String']['output'];
|
||||
guid: Scalars['String']['output'];
|
||||
lanip: Scalars['String']['output'];
|
||||
localurl: Scalars['String']['output'];
|
||||
name: Scalars['String']['output'];
|
||||
remoteurl: Scalars['String']['output'];
|
||||
wanip: Scalars['String']['output'];
|
||||
};
|
||||
|
||||
export enum ServerStatus {
|
||||
NEVER_CONNECTED = 'never_connected',
|
||||
OFFLINE = 'offline',
|
||||
ONLINE = 'online'
|
||||
}
|
||||
|
||||
export type ServerStatusResponse = {
|
||||
__typename?: 'ServerStatusResponse';
|
||||
id: Scalars['ID']['output'];
|
||||
lastPublish?: Maybe<Scalars['String']['output']>;
|
||||
online: Scalars['Boolean']['output'];
|
||||
};
|
||||
|
||||
export type Service = {
|
||||
__typename?: 'Service';
|
||||
name?: Maybe<Scalars['String']['output']>;
|
||||
online?: Maybe<Scalars['Boolean']['output']>;
|
||||
uptime?: Maybe<Uptime>;
|
||||
version?: Maybe<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
export type Subscription = {
|
||||
__typename?: 'Subscription';
|
||||
events?: Maybe<Array<Event>>;
|
||||
remoteSubscription: Scalars['String']['output'];
|
||||
servers: Array<Server>;
|
||||
};
|
||||
|
||||
|
||||
export type SubscriptionRemoteSubscriptionArgs = {
|
||||
input: RemoteGraphQlClientInput;
|
||||
};
|
||||
|
||||
export type TwoFactorLocal = {
|
||||
__typename?: 'TwoFactorLocal';
|
||||
enabled?: Maybe<Scalars['Boolean']['output']>;
|
||||
};
|
||||
|
||||
export type TwoFactorRemote = {
|
||||
__typename?: 'TwoFactorRemote';
|
||||
enabled?: Maybe<Scalars['Boolean']['output']>;
|
||||
};
|
||||
|
||||
export type TwoFactorWithToken = {
|
||||
__typename?: 'TwoFactorWithToken';
|
||||
local?: Maybe<TwoFactorLocal>;
|
||||
remote?: Maybe<TwoFactorRemote>;
|
||||
token?: Maybe<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
export type TwoFactorWithoutToken = {
|
||||
__typename?: 'TwoFactorWithoutToken';
|
||||
local?: Maybe<TwoFactorLocal>;
|
||||
remote?: Maybe<TwoFactorRemote>;
|
||||
};
|
||||
|
||||
export enum UrlType {
|
||||
DEFAULT = 'DEFAULT',
|
||||
LAN = 'LAN',
|
||||
MDNS = 'MDNS',
|
||||
WAN = 'WAN',
|
||||
WIREGUARD = 'WIREGUARD'
|
||||
}
|
||||
|
||||
export type UpdateEvent = {
|
||||
__typename?: 'UpdateEvent';
|
||||
data: UpdateEventData;
|
||||
type: EventType;
|
||||
};
|
||||
|
||||
export type UpdateEventData = {
|
||||
__typename?: 'UpdateEventData';
|
||||
apiKey: Scalars['String']['output'];
|
||||
type: UpdateType;
|
||||
};
|
||||
|
||||
export enum UpdateType {
|
||||
DASHBOARD = 'DASHBOARD',
|
||||
NETWORK = 'NETWORK'
|
||||
}
|
||||
|
||||
export type Uptime = {
|
||||
__typename?: 'Uptime';
|
||||
timestamp?: Maybe<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
export type UserProfileModelWithServers = {
|
||||
__typename?: 'UserProfileModelWithServers';
|
||||
profile: ProfileModel;
|
||||
servers: Array<Server>;
|
||||
};
|
||||
|
||||
export type Vars = {
|
||||
__typename?: 'Vars';
|
||||
expireTime?: Maybe<Scalars['DateTime']['output']>;
|
||||
flashGuid?: Maybe<Scalars['String']['output']>;
|
||||
regState?: Maybe<RegistrationState>;
|
||||
regTm2?: Maybe<Scalars['String']['output']>;
|
||||
regTy?: Maybe<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
export type SendRemoteGraphQlResponseMutationVariables = Exact<{
|
||||
input: RemoteGraphQlServerInput;
|
||||
}>;
|
||||
|
||||
|
||||
export type SendRemoteGraphQlResponseMutation = { __typename?: 'Mutation', remoteGraphQLResponse: boolean };
|
||||
|
||||
export type RemoteGraphQlEventFragmentFragment = { __typename?: 'RemoteGraphQLEvent', remoteGraphQLEventData: { __typename?: 'RemoteGraphQLEventData', type: RemoteGraphQlEventType, body: string, sha256: string } } & { ' $fragmentName'?: 'RemoteGraphQlEventFragmentFragment' };
|
||||
|
||||
export type EventsSubscriptionVariables = Exact<{ [key: string]: never; }>;
|
||||
|
||||
|
||||
export type EventsSubscription = { __typename?: 'Subscription', events?: Array<{ __typename: 'ClientConnectedEvent', connectedEvent: EventType, connectedData: { __typename?: 'ClientConnectionEventData', type: ClientType, version: string, apiKey: string } } | { __typename: 'ClientDisconnectedEvent', disconnectedEvent: EventType, disconnectedData: { __typename?: 'ClientConnectionEventData', type: ClientType, version: string, apiKey: string } } | { __typename: 'ClientPingEvent' } | { __typename: 'RemoteAccessEvent' } | (
|
||||
{ __typename: 'RemoteGraphQLEvent' }
|
||||
& { ' $fragmentRefs'?: { 'RemoteGraphQlEventFragmentFragment': RemoteGraphQlEventFragmentFragment } }
|
||||
) | { __typename: 'UpdateEvent' }> | null };
|
||||
|
||||
export const RemoteGraphQlEventFragmentFragmentDoc = {"kind":"Document","definitions":[{"kind":"FragmentDefinition","name":{"kind":"Name","value":"RemoteGraphQLEventFragment"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"RemoteGraphQLEvent"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","alias":{"kind":"Name","value":"remoteGraphQLEventData"},"name":{"kind":"Name","value":"data"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"type"}},{"kind":"Field","name":{"kind":"Name","value":"body"}},{"kind":"Field","name":{"kind":"Name","value":"sha256"}}]}}]}}]} as unknown as DocumentNode<RemoteGraphQlEventFragmentFragment, unknown>;
|
||||
export const SendRemoteGraphQlResponseDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"mutation","name":{"kind":"Name","value":"sendRemoteGraphQLResponse"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"input"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"RemoteGraphQLServerInput"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"remoteGraphQLResponse"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"input"},"value":{"kind":"Variable","name":{"kind":"Name","value":"input"}}}]}]}}]} as unknown as DocumentNode<SendRemoteGraphQlResponseMutation, SendRemoteGraphQlResponseMutationVariables>;
|
||||
export const EventsDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"subscription","name":{"kind":"Name","value":"events"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"events"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"__typename"}},{"kind":"InlineFragment","typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"ClientConnectedEvent"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","alias":{"kind":"Name","value":"connectedData"},"name":{"kind":"Name","value":"data"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"type"}},{"kind":"Field","name":{"kind":"Name","value":"version"}},{"kind":"Field","name":{"kind":"Name","value":"apiKey"}}]}},{"kind":"Field","alias":{"kind":"Name","value":"connectedEvent"},"name":{"kind":"Name","value":"type"}}]}},{"kind":"InlineFragment","typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"ClientDisconnectedEvent"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","alias":{"kind":"Name","value":"disconnectedData"},"name":{"kind":"Name","value":"data"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"type"}},{"kind":"Field","name":{"kind":"Name","value":"version"}},{"kind":"Field","name":{"kind":"Name","value":"apiKey"}}]}},{"kind":"Field","alias":{"kind":"Name","value":"disconnectedEvent"},"name":{"kind":"Name","value":"type"}}]}},{"kind":"FragmentSpread","name":{"kind":"Name","value":"RemoteGraphQLEventFragment"}}]}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"RemoteGraphQLEventFragment"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"RemoteGraphQLEvent"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","alias":{"kind":"Name","value":"remoteGraphQLEventData"},"name":{"kind":"Name","value":"data"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"type"}},{"kind":"Field","name":{"kind":"Name","value":"body"}},{"kind":"Field","name":{"kind":"Name","value":"sha256"}}]}}]}}]} as unknown as DocumentNode<EventsSubscription, EventsSubscriptionVariables>;
|
||||
@@ -1,2 +0,0 @@
|
||||
export * from "./fragment-masking.js";
|
||||
export * from "./gql.js";
|
||||
@@ -1,216 +0,0 @@
|
||||
/* eslint-disable */
|
||||
import { z } from 'zod'
|
||||
import { AccessUrlInput, ArrayCapacityBytesInput, ArrayCapacityInput, ClientType, ConfigErrorState, DashboardAppsInput, DashboardArrayInput, DashboardCaseInput, DashboardConfigInput, DashboardDisplayInput, DashboardInput, DashboardOsInput, DashboardServiceInput, DashboardServiceUptimeInput, DashboardTwoFactorInput, DashboardTwoFactorLocalInput, DashboardTwoFactorRemoteInput, DashboardVarsInput, DashboardVersionsInput, DashboardVmsInput, EventType, Importance, NetworkInput, NotificationInput, NotificationStatus, PingEventSource, RegistrationState, RemoteAccessEventActionType, RemoteAccessInput, RemoteGraphQlClientInput, RemoteGraphQlEventType, RemoteGraphQlServerInput, ServerStatus, UrlType, UpdateType } from '@app/graphql/generated/client/graphql.js'
|
||||
|
||||
type Properties<T> = Required<{
|
||||
[K in keyof T]: z.ZodType<T[K], any, T[K]>;
|
||||
}>;
|
||||
|
||||
type definedNonNullAny = {};
|
||||
|
||||
export const isDefinedNonNullAny = (v: any): v is definedNonNullAny => v !== undefined && v !== null;
|
||||
|
||||
export const definedNonNullAnySchema = z.any().refine((v) => isDefinedNonNullAny(v));
|
||||
|
||||
export const ClientTypeSchema = z.nativeEnum(ClientType);
|
||||
|
||||
export const ConfigErrorStateSchema = z.nativeEnum(ConfigErrorState);
|
||||
|
||||
export const EventTypeSchema = z.nativeEnum(EventType);
|
||||
|
||||
export const ImportanceSchema = z.nativeEnum(Importance);
|
||||
|
||||
export const NotificationStatusSchema = z.nativeEnum(NotificationStatus);
|
||||
|
||||
export const PingEventSourceSchema = z.nativeEnum(PingEventSource);
|
||||
|
||||
export const RegistrationStateSchema = z.nativeEnum(RegistrationState);
|
||||
|
||||
export const RemoteAccessEventActionTypeSchema = z.nativeEnum(RemoteAccessEventActionType);
|
||||
|
||||
export const RemoteGraphQlEventTypeSchema = z.nativeEnum(RemoteGraphQlEventType);
|
||||
|
||||
export const ServerStatusSchema = z.nativeEnum(ServerStatus);
|
||||
|
||||
export const UrlTypeSchema = z.nativeEnum(UrlType);
|
||||
|
||||
export const UpdateTypeSchema = z.nativeEnum(UpdateType);
|
||||
|
||||
export function AccessUrlInputSchema(): z.ZodObject<Properties<AccessUrlInput>> {
|
||||
return z.object({
|
||||
ipv4: z.instanceof(URL).nullish(),
|
||||
ipv6: z.instanceof(URL).nullish(),
|
||||
name: z.string().nullish(),
|
||||
type: UrlTypeSchema
|
||||
})
|
||||
}
|
||||
|
||||
export function ArrayCapacityBytesInputSchema(): z.ZodObject<Properties<ArrayCapacityBytesInput>> {
|
||||
return z.object({
|
||||
free: z.number().nullish(),
|
||||
total: z.number().nullish(),
|
||||
used: z.number().nullish()
|
||||
})
|
||||
}
|
||||
|
||||
export function ArrayCapacityInputSchema(): z.ZodObject<Properties<ArrayCapacityInput>> {
|
||||
return z.object({
|
||||
bytes: z.lazy(() => ArrayCapacityBytesInputSchema().nullish())
|
||||
})
|
||||
}
|
||||
|
||||
export function DashboardAppsInputSchema(): z.ZodObject<Properties<DashboardAppsInput>> {
|
||||
return z.object({
|
||||
installed: z.number(),
|
||||
started: z.number()
|
||||
})
|
||||
}
|
||||
|
||||
export function DashboardArrayInputSchema(): z.ZodObject<Properties<DashboardArrayInput>> {
|
||||
return z.object({
|
||||
capacity: z.lazy(() => ArrayCapacityInputSchema()),
|
||||
state: z.string()
|
||||
})
|
||||
}
|
||||
|
||||
export function DashboardCaseInputSchema(): z.ZodObject<Properties<DashboardCaseInput>> {
|
||||
return z.object({
|
||||
base64: z.string(),
|
||||
error: z.string().nullish(),
|
||||
icon: z.string(),
|
||||
url: z.string()
|
||||
})
|
||||
}
|
||||
|
||||
export function DashboardConfigInputSchema(): z.ZodObject<Properties<DashboardConfigInput>> {
|
||||
return z.object({
|
||||
error: z.string().nullish(),
|
||||
valid: z.boolean()
|
||||
})
|
||||
}
|
||||
|
||||
export function DashboardDisplayInputSchema(): z.ZodObject<Properties<DashboardDisplayInput>> {
|
||||
return z.object({
|
||||
case: z.lazy(() => DashboardCaseInputSchema())
|
||||
})
|
||||
}
|
||||
|
||||
export function DashboardInputSchema(): z.ZodObject<Properties<DashboardInput>> {
|
||||
return z.object({
|
||||
apps: z.lazy(() => DashboardAppsInputSchema()),
|
||||
array: z.lazy(() => DashboardArrayInputSchema()),
|
||||
config: z.lazy(() => DashboardConfigInputSchema()),
|
||||
display: z.lazy(() => DashboardDisplayInputSchema()),
|
||||
os: z.lazy(() => DashboardOsInputSchema()),
|
||||
services: z.array(z.lazy(() => DashboardServiceInputSchema())),
|
||||
twoFactor: z.lazy(() => DashboardTwoFactorInputSchema().nullish()),
|
||||
vars: z.lazy(() => DashboardVarsInputSchema()),
|
||||
versions: z.lazy(() => DashboardVersionsInputSchema()),
|
||||
vms: z.lazy(() => DashboardVmsInputSchema())
|
||||
})
|
||||
}
|
||||
|
||||
export function DashboardOsInputSchema(): z.ZodObject<Properties<DashboardOsInput>> {
|
||||
return z.object({
|
||||
hostname: z.string(),
|
||||
uptime: z.string()
|
||||
})
|
||||
}
|
||||
|
||||
export function DashboardServiceInputSchema(): z.ZodObject<Properties<DashboardServiceInput>> {
|
||||
return z.object({
|
||||
name: z.string(),
|
||||
online: z.boolean(),
|
||||
uptime: z.lazy(() => DashboardServiceUptimeInputSchema().nullish()),
|
||||
version: z.string()
|
||||
})
|
||||
}
|
||||
|
||||
export function DashboardServiceUptimeInputSchema(): z.ZodObject<Properties<DashboardServiceUptimeInput>> {
|
||||
return z.object({
|
||||
timestamp: z.string()
|
||||
})
|
||||
}
|
||||
|
||||
export function DashboardTwoFactorInputSchema(): z.ZodObject<Properties<DashboardTwoFactorInput>> {
|
||||
return z.object({
|
||||
local: z.lazy(() => DashboardTwoFactorLocalInputSchema()),
|
||||
remote: z.lazy(() => DashboardTwoFactorRemoteInputSchema())
|
||||
})
|
||||
}
|
||||
|
||||
export function DashboardTwoFactorLocalInputSchema(): z.ZodObject<Properties<DashboardTwoFactorLocalInput>> {
|
||||
return z.object({
|
||||
enabled: z.boolean()
|
||||
})
|
||||
}
|
||||
|
||||
export function DashboardTwoFactorRemoteInputSchema(): z.ZodObject<Properties<DashboardTwoFactorRemoteInput>> {
|
||||
return z.object({
|
||||
enabled: z.boolean()
|
||||
})
|
||||
}
|
||||
|
||||
export function DashboardVarsInputSchema(): z.ZodObject<Properties<DashboardVarsInput>> {
|
||||
return z.object({
|
||||
flashGuid: z.string(),
|
||||
regState: z.string(),
|
||||
regTy: z.string(),
|
||||
serverDescription: z.string().nullish(),
|
||||
serverName: z.string().nullish()
|
||||
})
|
||||
}
|
||||
|
||||
export function DashboardVersionsInputSchema(): z.ZodObject<Properties<DashboardVersionsInput>> {
|
||||
return z.object({
|
||||
unraid: z.string()
|
||||
})
|
||||
}
|
||||
|
||||
export function DashboardVmsInputSchema(): z.ZodObject<Properties<DashboardVmsInput>> {
|
||||
return z.object({
|
||||
installed: z.number(),
|
||||
started: z.number()
|
||||
})
|
||||
}
|
||||
|
||||
export function NetworkInputSchema(): z.ZodObject<Properties<NetworkInput>> {
|
||||
return z.object({
|
||||
accessUrls: z.array(z.lazy(() => AccessUrlInputSchema()))
|
||||
})
|
||||
}
|
||||
|
||||
export function NotificationInputSchema(): z.ZodObject<Properties<NotificationInput>> {
|
||||
return z.object({
|
||||
description: z.string().nullish(),
|
||||
importance: ImportanceSchema,
|
||||
link: z.string().nullish(),
|
||||
subject: z.string().nullish(),
|
||||
title: z.string().nullish()
|
||||
})
|
||||
}
|
||||
|
||||
export function RemoteAccessInputSchema(): z.ZodObject<Properties<RemoteAccessInput>> {
|
||||
return z.object({
|
||||
apiKey: z.string(),
|
||||
type: RemoteAccessEventActionTypeSchema,
|
||||
url: z.lazy(() => AccessUrlInputSchema().nullish())
|
||||
})
|
||||
}
|
||||
|
||||
export function RemoteGraphQlClientInputSchema(): z.ZodObject<Properties<RemoteGraphQlClientInput>> {
|
||||
return z.object({
|
||||
apiKey: z.string(),
|
||||
body: z.string(),
|
||||
timeout: z.number().nullish(),
|
||||
ttl: z.number().nullish()
|
||||
})
|
||||
}
|
||||
|
||||
export function RemoteGraphQlServerInputSchema(): z.ZodObject<Properties<RemoteGraphQlServerInput>> {
|
||||
return z.object({
|
||||
body: z.string(),
|
||||
sha256: z.string(),
|
||||
type: RemoteGraphQlEventTypeSchema
|
||||
})
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
import { FatalAppError } from '@app/core/errors/fatal-error.js';
|
||||
import { modules } from '@app/core/index.js';
|
||||
|
||||
export const getCoreModule = (moduleName: string) => {
|
||||
if (!Object.keys(modules).includes(moduleName)) {
|
||||
throw new FatalAppError(`"${moduleName}" is not a valid core module.`);
|
||||
}
|
||||
|
||||
return modules[moduleName];
|
||||
};
|
||||
@@ -1,7 +0,0 @@
|
||||
import { graphql } from '@app/graphql/generated/client/gql.js';
|
||||
|
||||
export const SEND_REMOTE_QUERY_RESPONSE = graphql(/* GraphQL */ `
|
||||
mutation sendRemoteGraphQLResponse($input: RemoteGraphQLServerInput!) {
|
||||
remoteGraphQLResponse(input: $input)
|
||||
}
|
||||
`);
|
||||
@@ -1,36 +0,0 @@
|
||||
import { graphql } from '@app/graphql/generated/client/gql.js';
|
||||
|
||||
export const RemoteGraphQL_Fragment = graphql(/* GraphQL */ `
|
||||
fragment RemoteGraphQLEventFragment on RemoteGraphQLEvent {
|
||||
remoteGraphQLEventData: data {
|
||||
type
|
||||
body
|
||||
sha256
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
export const EVENTS_SUBSCRIPTION = graphql(/* GraphQL */ `
|
||||
subscription events {
|
||||
events {
|
||||
__typename
|
||||
... on ClientConnectedEvent {
|
||||
connectedData: data {
|
||||
type
|
||||
version
|
||||
apiKey
|
||||
}
|
||||
connectedEvent: type
|
||||
}
|
||||
... on ClientDisconnectedEvent {
|
||||
disconnectedData: data {
|
||||
type
|
||||
version
|
||||
apiKey
|
||||
}
|
||||
disconnectedEvent: type
|
||||
}
|
||||
...RemoteGraphQLEventFragment
|
||||
}
|
||||
}
|
||||
`);
|
||||
@@ -1,234 +0,0 @@
|
||||
import { AccessUrl, URL_TYPE } from '@unraid/shared/network.model.js';
|
||||
|
||||
import type { RootState } from '@app/store/index.js';
|
||||
import { logger } from '@app/core/log.js';
|
||||
import { type Nginx } from '@app/core/types/states/nginx.js';
|
||||
import { store } from '@app/store/index.js';
|
||||
|
||||
interface UrlForFieldInput {
|
||||
url: string;
|
||||
port?: number;
|
||||
portSsl?: number;
|
||||
}
|
||||
|
||||
interface UrlForFieldInputSecure extends UrlForFieldInput {
|
||||
url: string;
|
||||
portSsl: number;
|
||||
}
|
||||
interface UrlForFieldInputInsecure extends UrlForFieldInput {
|
||||
url: string;
|
||||
port: number;
|
||||
}
|
||||
|
||||
export const getUrlForField = ({
|
||||
url,
|
||||
port,
|
||||
portSsl,
|
||||
}: UrlForFieldInputInsecure | UrlForFieldInputSecure) => {
|
||||
let portToUse = '';
|
||||
let httpMode = 'https://';
|
||||
|
||||
if (!url || url === '') {
|
||||
throw new Error('No URL Provided');
|
||||
}
|
||||
|
||||
if (port) {
|
||||
portToUse = port === 80 ? '' : `:${port}`;
|
||||
httpMode = 'http://';
|
||||
} else if (portSsl) {
|
||||
portToUse = portSsl === 443 ? '' : `:${portSsl}`;
|
||||
httpMode = 'https://';
|
||||
} else {
|
||||
throw new Error(`No ports specified for URL: ${url}`);
|
||||
}
|
||||
|
||||
const urlString = `${httpMode}${url}${portToUse}`;
|
||||
|
||||
try {
|
||||
return new URL(urlString);
|
||||
} catch (error: unknown) {
|
||||
throw new Error(`Failed to parse URL: ${urlString}`);
|
||||
}
|
||||
};
|
||||
|
||||
const fieldIsFqdn = (field: keyof Nginx) => field?.toLowerCase().includes('fqdn');
|
||||
|
||||
export type NginxUrlFields = Extract<
|
||||
keyof Nginx,
|
||||
'lanIp' | 'lanIp6' | 'lanName' | 'lanMdns' | 'lanFqdn' | 'wanFqdn' | 'wanFqdn6'
|
||||
>;
|
||||
|
||||
/**
|
||||
*
|
||||
* @param nginx Nginx Config File
|
||||
* @param field The field to build the URL from
|
||||
* @returns a URL, created from the combination of inputs
|
||||
* @throws Error when the URL cannot be created or the URL is invalid
|
||||
*/
|
||||
export const getUrlForServer = ({ nginx, field }: { nginx: Nginx; field: NginxUrlFields }): URL => {
|
||||
if (nginx[field]) {
|
||||
if (fieldIsFqdn(field)) {
|
||||
return getUrlForField({
|
||||
url: nginx[field],
|
||||
portSsl: nginx.httpsPort,
|
||||
});
|
||||
}
|
||||
|
||||
if (!nginx.sslEnabled) {
|
||||
// Use SSL = no
|
||||
return getUrlForField({ url: nginx[field], port: nginx.httpPort });
|
||||
}
|
||||
|
||||
if (nginx.sslMode === 'yes') {
|
||||
return getUrlForField({
|
||||
url: nginx[field],
|
||||
portSsl: nginx.httpsPort,
|
||||
});
|
||||
}
|
||||
|
||||
if (nginx.sslMode === 'auto') {
|
||||
throw new Error(`Cannot get IP Based URL for field: "${field}" SSL mode auto`);
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error(
|
||||
`IP URL Resolver: Could not resolve any access URL for field: "${field}", is FQDN?: ${fieldIsFqdn(
|
||||
field
|
||||
)}`
|
||||
);
|
||||
};
|
||||
|
||||
const getUrlTypeFromFqdn = (fqdnType: string): URL_TYPE => {
|
||||
switch (fqdnType) {
|
||||
case 'LAN':
|
||||
return URL_TYPE.LAN;
|
||||
case 'WAN':
|
||||
return URL_TYPE.WAN;
|
||||
case 'WG':
|
||||
return URL_TYPE.WIREGUARD;
|
||||
default:
|
||||
// HACK: This should be added as a new type (e.g. OTHER or CUSTOM)
|
||||
return URL_TYPE.WIREGUARD;
|
||||
}
|
||||
};
|
||||
|
||||
export const getServerIps = (
|
||||
state: RootState = store.getState()
|
||||
): { urls: AccessUrl[]; errors: Error[] } => {
|
||||
const { nginx } = state.emhttp;
|
||||
const {
|
||||
remote: { wanport },
|
||||
} = state.config;
|
||||
if (!nginx || Object.keys(nginx).length === 0) {
|
||||
return { urls: [], errors: [new Error('Nginx Not Loaded')] };
|
||||
}
|
||||
|
||||
const errors: Error[] = [];
|
||||
const urls: AccessUrl[] = [];
|
||||
|
||||
try {
|
||||
// Default URL
|
||||
const defaultUrl = new URL(nginx.defaultUrl);
|
||||
urls.push({
|
||||
name: 'Default',
|
||||
type: URL_TYPE.DEFAULT,
|
||||
ipv4: defaultUrl,
|
||||
ipv6: defaultUrl,
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
errors.push(error);
|
||||
} else {
|
||||
logger.warn('Uncaught error in network resolver', error);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
// Lan IP URL
|
||||
const lanIp4Url = getUrlForServer({ nginx, field: 'lanIp' });
|
||||
urls.push({
|
||||
name: 'LAN IPv4',
|
||||
type: URL_TYPE.LAN,
|
||||
ipv4: lanIp4Url,
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
errors.push(error);
|
||||
} else {
|
||||
logger.warn('Uncaught error in network resolver', error);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
// Lan IP6 URL
|
||||
const lanIp6Url = getUrlForServer({ nginx, field: 'lanIp6' });
|
||||
urls.push({
|
||||
name: 'LAN IPv6',
|
||||
type: URL_TYPE.LAN,
|
||||
ipv4: lanIp6Url,
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
errors.push(error);
|
||||
} else {
|
||||
logger.warn('Uncaught error in network resolver', error);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
// Lan Name URL
|
||||
const lanNameUrl = getUrlForServer({ nginx, field: 'lanName' });
|
||||
urls.push({
|
||||
name: 'LAN Name',
|
||||
type: URL_TYPE.MDNS,
|
||||
ipv4: lanNameUrl,
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
errors.push(error);
|
||||
} else {
|
||||
logger.warn('Uncaught error in network resolver', error);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
// Lan MDNS URL
|
||||
const lanMdnsUrl = getUrlForServer({ nginx, field: 'lanMdns' });
|
||||
urls.push({
|
||||
name: 'LAN MDNS',
|
||||
type: URL_TYPE.MDNS,
|
||||
ipv4: lanMdnsUrl,
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
errors.push(error);
|
||||
} else {
|
||||
logger.warn('Uncaught error in network resolver', error);
|
||||
}
|
||||
}
|
||||
|
||||
// Now Process the FQDN Urls
|
||||
nginx.fqdnUrls.forEach((fqdnUrl) => {
|
||||
try {
|
||||
const urlType = getUrlTypeFromFqdn(fqdnUrl.interface);
|
||||
const fqdnUrlToUse = getUrlForField({
|
||||
url: fqdnUrl.fqdn,
|
||||
portSsl: urlType === URL_TYPE.WAN ? Number(wanport) : nginx.httpsPort,
|
||||
});
|
||||
|
||||
urls.push({
|
||||
name: `FQDN ${fqdnUrl.interface}${fqdnUrl.id !== null ? ` ${fqdnUrl.id}` : ''}`,
|
||||
type: getUrlTypeFromFqdn(fqdnUrl.interface),
|
||||
ipv4: fqdnUrlToUse,
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
errors.push(error);
|
||||
} else {
|
||||
logger.warn('Uncaught error in network resolver', error);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { urls, errors };
|
||||
};
|
||||
@@ -1,28 +0,0 @@
|
||||
import { mergeTypeDefs } from '@graphql-tools/merge';
|
||||
|
||||
import { logger } from '@app/core/log.js';
|
||||
|
||||
export const loadTypeDefs = async (additionalTypeDefs: string[] = []) => {
|
||||
// TypeScript now knows this returns Record<string, () => Promise<string>>
|
||||
const typeModules = import.meta.glob('./types/**/*.graphql', { query: '?raw', import: 'default' });
|
||||
|
||||
try {
|
||||
const files = await Promise.all(
|
||||
Object.values(typeModules).map(async (importFn) => {
|
||||
const content = await importFn();
|
||||
if (typeof content !== 'string') {
|
||||
throw new Error('Invalid GraphQL type definition format');
|
||||
}
|
||||
return content;
|
||||
})
|
||||
);
|
||||
if (!files.length) {
|
||||
throw new Error('No GraphQL type definitions found');
|
||||
}
|
||||
files.push(...additionalTypeDefs);
|
||||
return mergeTypeDefs(files);
|
||||
} catch (error) {
|
||||
logger.error('Failed to load GraphQL type definitions:', error);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
@@ -1,111 +0,0 @@
|
||||
import { AppError } from '@app/core/errors/app-error.js';
|
||||
import { graphqlLogger } from '@app/core/log.js';
|
||||
import { pubsub } from '@app/core/pubsub.js';
|
||||
import { type User } from '@app/core/types/states/user.js';
|
||||
import { ensurePermission } from '@app/core/utils/permissions/ensure-permission.js';
|
||||
import { store } from '@app/store/index.js';
|
||||
import { MinigraphStatus } from '@app/unraid-api/graph/resolvers/cloud/cloud.model.js';
|
||||
import { Server, ServerStatus } from '@app/unraid-api/graph/resolvers/servers/server.model.js';
|
||||
|
||||
export interface Context {
|
||||
user?: User;
|
||||
websocketId: string;
|
||||
}
|
||||
|
||||
type Subscription = {
|
||||
total: number;
|
||||
channels: string[];
|
||||
};
|
||||
|
||||
const subscriptions: Record<string, Subscription> = {};
|
||||
|
||||
/**
|
||||
* Return current ws connection count.
|
||||
*/
|
||||
export const getWsConnectionCount = () =>
|
||||
Object.values(subscriptions).filter((subscription) => subscription.total >= 1).length;
|
||||
|
||||
/**
|
||||
* Return current ws connection count in channel.
|
||||
*/
|
||||
export const getWsConnectionCountInChannel = (channel: string) =>
|
||||
Object.values(subscriptions).filter((subscription) => subscription.channels.includes(channel))
|
||||
.length;
|
||||
|
||||
export const hasSubscribedToChannel = (id: string, channel: string) => {
|
||||
graphqlLogger.debug('Subscribing to %s', channel);
|
||||
|
||||
// Setup initial object
|
||||
if (subscriptions[id] === undefined) {
|
||||
subscriptions[id] = {
|
||||
total: 1,
|
||||
channels: [channel],
|
||||
};
|
||||
return;
|
||||
}
|
||||
|
||||
subscriptions[id].total++;
|
||||
subscriptions[id].channels.push(channel);
|
||||
};
|
||||
|
||||
/**
|
||||
* Create a pubsub subscription.
|
||||
* @param channel The pubsub channel to subscribe to.
|
||||
* @param resource The access-control permission resource to check against.
|
||||
*/
|
||||
export const createSubscription = (channel: string, resource?: string) => ({
|
||||
subscribe(_: unknown, __: unknown, context: Context) {
|
||||
if (!context.user) {
|
||||
throw new AppError('<ws> No user found in context.', 500);
|
||||
}
|
||||
|
||||
// Check the user has permission to subscribe to this endpoint
|
||||
ensurePermission(context.user, {
|
||||
resource: resource ?? channel,
|
||||
action: 'read',
|
||||
possession: 'any',
|
||||
});
|
||||
|
||||
hasSubscribedToChannel(context.websocketId, channel);
|
||||
return pubsub.asyncIterableIterator(channel);
|
||||
},
|
||||
});
|
||||
|
||||
export const getLocalServer = (getState = store.getState): Array<Server> => {
|
||||
const { emhttp, config, minigraph } = getState();
|
||||
const guid = emhttp.var.regGuid;
|
||||
const { name } = emhttp.var;
|
||||
const wanip = '';
|
||||
const lanip: string = emhttp.networks[0].ipaddr[0];
|
||||
const port = emhttp.var?.port;
|
||||
const localurl = `http://${lanip}:${port}`;
|
||||
const remoteurl = '';
|
||||
|
||||
return [
|
||||
{
|
||||
id: 'local',
|
||||
owner: {
|
||||
id: 'local',
|
||||
username: config.remote.username ?? 'root',
|
||||
url: '',
|
||||
avatar: '',
|
||||
},
|
||||
guid,
|
||||
apikey: config.remote.apikey ?? '',
|
||||
name: name ?? 'Local Server',
|
||||
status:
|
||||
minigraph.status === MinigraphStatus.CONNECTED
|
||||
? ServerStatus.ONLINE
|
||||
: ServerStatus.OFFLINE,
|
||||
wanip,
|
||||
lanip,
|
||||
localurl,
|
||||
remoteurl,
|
||||
},
|
||||
];
|
||||
};
|
||||
|
||||
export const getServers = (getState = store.getState): Server[] => {
|
||||
// Check if we have the servers already cached, if so return them
|
||||
return getLocalServer(getState) ?? [];
|
||||
};
|
||||
@@ -22,10 +22,8 @@ import { loadDynamixConfigFile } from '@app/store/actions/load-dynamix-config-fi
|
||||
import { shutdownApiEvent } from '@app/store/actions/shutdown-api-event.js';
|
||||
import { store } from '@app/store/index.js';
|
||||
import { startMiddlewareListeners } from '@app/store/listeners/listener-middleware.js';
|
||||
import { loadConfigFile } from '@app/store/modules/config.js';
|
||||
import { loadStateFiles } from '@app/store/modules/emhttp.js';
|
||||
import { loadRegistrationKey } from '@app/store/modules/registration.js';
|
||||
import { startStoreSync } from '@app/store/store-sync.js';
|
||||
import { setupDynamixConfigWatch } from '@app/store/watch/dynamix-config-watch.js';
|
||||
import { setupRegistrationKeyWatch } from '@app/store/watch/registration-watch.js';
|
||||
import { StateManager } from '@app/store/watch/state-watch.js';
|
||||
@@ -71,13 +69,6 @@ export const viteNodeApp = async () => {
|
||||
cacheable.install(http.globalAgent);
|
||||
cacheable.install(https.globalAgent);
|
||||
|
||||
// Start file <-> store sync
|
||||
// Must occur before config is loaded to ensure that the handler can fix broken configs
|
||||
await startStoreSync();
|
||||
|
||||
// Load my servers config file into store
|
||||
await store.dispatch(loadConfigFile());
|
||||
|
||||
// Load emhttp state into store
|
||||
await store.dispatch(loadStateFiles());
|
||||
|
||||
@@ -108,7 +99,7 @@ export const viteNodeApp = async () => {
|
||||
|
||||
asyncExitHook(
|
||||
async (signal) => {
|
||||
logger.info('Exiting with signal %s', signal);
|
||||
logger.info('Exiting with signal %d', signal);
|
||||
await server?.close?.();
|
||||
// If port is unix socket, delete socket before exiting
|
||||
unlinkUnixPort();
|
||||
|
||||
@@ -1,12 +0,0 @@
|
||||
/**
|
||||
* Check is the API Key is the correct length (64 characters)
|
||||
* @param apiKey API Key to validate length
|
||||
* @returns Boolean
|
||||
*/
|
||||
export const isApiKeyCorrectLength = (apiKey: string) => {
|
||||
if (apiKey.length !== 64) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
};
|
||||
@@ -94,7 +94,7 @@ export const run = async (channel: string, mutation: string, options: RunOptions
|
||||
);
|
||||
}
|
||||
} else {
|
||||
logger.debug('Error: %s', error);
|
||||
logger.debug('Error: %o', error as object);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
import { createAsyncThunk } from '@reduxjs/toolkit';
|
||||
|
||||
import { remoteAccessLogger } from '@app/core/log.js';
|
||||
import { NginxManager } from '@app/core/modules/services/nginx.js';
|
||||
import { UpdateDNSManager } from '@app/core/modules/services/update-dns.js';
|
||||
import { type AppDispatch, type RootState } from '@app/store/index.js';
|
||||
|
||||
export const reloadNginxAndUpdateDNS = createAsyncThunk<
|
||||
void,
|
||||
void,
|
||||
{ state: RootState; dispatch: AppDispatch }
|
||||
>('config/reloadNginxAndUpdateDNS', async () => {
|
||||
remoteAccessLogger.debug('Reloading Nginx and Updating DNS');
|
||||
const manager = new NginxManager();
|
||||
const updateDns = new UpdateDNSManager();
|
||||
await manager.reloadNginx();
|
||||
await updateDns.updateDNS();
|
||||
remoteAccessLogger.debug('Finished Reloading Nginx and Updating DNS');
|
||||
});
|
||||
@@ -1,8 +0,0 @@
|
||||
import { createAction } from '@reduxjs/toolkit';
|
||||
|
||||
import { MinigraphStatus } from '@app/unraid-api/graph/resolvers/cloud/cloud.model.js';
|
||||
|
||||
export const setGraphqlConnectionStatus = createAction<{
|
||||
status: MinigraphStatus;
|
||||
error: string | null;
|
||||
}>('graphql/status');
|
||||
@@ -1,16 +0,0 @@
|
||||
import { createAsyncThunk } from '@reduxjs/toolkit';
|
||||
|
||||
import { reloadNginxAndUpdateDNS } from '@app/store/actions/reload-nginx-and-update-dns.js';
|
||||
import { type AppDispatch, type RootState } from '@app/store/index.js';
|
||||
import { setWanAccess } from '@app/store/modules/config.js';
|
||||
|
||||
type EnableWanAccessArgs = Parameters<typeof setWanAccess>[0];
|
||||
export const setWanAccessAndReloadNginx = createAsyncThunk<
|
||||
void,
|
||||
EnableWanAccessArgs,
|
||||
{ state: RootState; dispatch: AppDispatch }
|
||||
>('config/setWanAccessAndReloadNginx', async (payload, { dispatch }) => {
|
||||
dispatch(setWanAccess(payload));
|
||||
|
||||
await dispatch(reloadNginxAndUpdateDNS());
|
||||
});
|
||||
@@ -1,13 +1,9 @@
|
||||
import { logDestination, logger } from '@app/core/log.js';
|
||||
import { stopListeners } from '@app/store/listeners/stop-listeners.js';
|
||||
import { writeConfigSync } from '@app/store/sync/config-disk-sync.js';
|
||||
|
||||
export const shutdownApiEvent = () => {
|
||||
logger.debug('Running shutdown');
|
||||
stopListeners();
|
||||
logger.debug('Writing final configs');
|
||||
writeConfigSync('flash');
|
||||
writeConfigSync('memory');
|
||||
logger.debug('Shutting down log destination');
|
||||
logDestination.flushSync();
|
||||
logDestination.destroy();
|
||||
|
||||
@@ -16,11 +16,8 @@ export type AppDispatch = typeof store.dispatch;
|
||||
export type ApiStore = typeof store;
|
||||
|
||||
export const getters = {
|
||||
config: () => store.getState().config,
|
||||
dynamix: () => store.getState().dynamix,
|
||||
emhttp: () => store.getState().emhttp,
|
||||
minigraph: () => store.getState().minigraph,
|
||||
paths: () => store.getState().paths,
|
||||
registration: () => store.getState().registration,
|
||||
upnp: () => store.getState().upnp,
|
||||
};
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
import { writeFileSync } from 'fs';
|
||||
|
||||
import type { ConfigType } from '@app/core/utils/files/config-file-normalizer.js';
|
||||
import { logger } from '@app/core/log.js';
|
||||
import { getWriteableConfig } from '@app/core/utils/files/config-file-normalizer.js';
|
||||
import { safelySerializeObjectToIni } from '@app/core/utils/files/safe-ini-serializer.js';
|
||||
import { startAppListening } from '@app/store/listeners/listener-middleware.js';
|
||||
import { configUpdateActionsFlash, configUpdateActionsMemory } from '@app/store/modules/config.js';
|
||||
|
||||
export const enableConfigFileListener = (mode: ConfigType) => () =>
|
||||
startAppListening({
|
||||
matcher: mode === 'flash' ? configUpdateActionsFlash : configUpdateActionsMemory,
|
||||
async effect(_, { getState }) {
|
||||
const { paths, config } = getState();
|
||||
const pathToWrite =
|
||||
mode === 'flash' ? paths['myservers-config'] : paths['myservers-config-states'];
|
||||
const writeableConfig = getWriteableConfig(config, mode);
|
||||
const serializedConfig = safelySerializeObjectToIni(writeableConfig);
|
||||
logger.debug('Writing updated config to %s', pathToWrite);
|
||||
writeFileSync(pathToWrite, serializedConfig);
|
||||
},
|
||||
});
|
||||
@@ -5,9 +5,6 @@ import { addListener, createListenerMiddleware } from '@reduxjs/toolkit';
|
||||
|
||||
import { type AppDispatch, type RootState } from '@app/store/index.js';
|
||||
import { enableArrayEventListener } from '@app/store/listeners/array-event-listener.js';
|
||||
import { enableConfigFileListener } from '@app/store/listeners/config-listener.js';
|
||||
import { enableUpnpListener } from '@app/store/listeners/upnp-listener.js';
|
||||
import { enableVersionListener } from '@app/store/listeners/version-listener.js';
|
||||
|
||||
export const listenerMiddleware = createListenerMiddleware();
|
||||
|
||||
@@ -21,9 +18,5 @@ export const addAppListener = addListener as TypedAddListener<RootState, AppDisp
|
||||
|
||||
export const startMiddlewareListeners = () => {
|
||||
// Begin listening for events
|
||||
enableConfigFileListener('flash')();
|
||||
enableConfigFileListener('memory')();
|
||||
enableUpnpListener();
|
||||
enableVersionListener();
|
||||
enableArrayEventListener();
|
||||
};
|
||||
|
||||
@@ -1,68 +0,0 @@
|
||||
import { isAnyOf } from '@reduxjs/toolkit';
|
||||
|
||||
import { upnpLogger } from '@app/core/log.js';
|
||||
import { type RootState } from '@app/store/index.js';
|
||||
import { startAppListening } from '@app/store/listeners/listener-middleware.js';
|
||||
import { loadConfigFile } from '@app/store/modules/config.js';
|
||||
import { loadSingleStateFile, loadStateFiles } from '@app/store/modules/emhttp.js';
|
||||
import { disableUpnp, enableUpnp } from '@app/store/modules/upnp.js';
|
||||
import { FileLoadStatus } from '@app/store/types.js';
|
||||
|
||||
// FLAG for review: make sure we replace this
|
||||
const shouldUpnpBeEnabled = (state: RootState | null): boolean => {
|
||||
if (
|
||||
state?.config.status !== FileLoadStatus.LOADED ||
|
||||
state?.emhttp.status !== FileLoadStatus.LOADED
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const { useUpnp } = state.emhttp.var;
|
||||
const { upnpEnabled, wanaccess } = state.config.remote;
|
||||
|
||||
return useUpnp && upnpEnabled === 'yes' && wanaccess === 'yes';
|
||||
};
|
||||
|
||||
const isStateOrConfigUpdate = isAnyOf(
|
||||
loadConfigFile.fulfilled,
|
||||
loadSingleStateFile.fulfilled,
|
||||
loadStateFiles.fulfilled
|
||||
// setupRemoteAccessThunk.fulfilled
|
||||
);
|
||||
|
||||
export const enableUpnpListener = () =>
|
||||
startAppListening({
|
||||
predicate(action, currentState, previousState) {
|
||||
// @TODO: One of our actions is incorrectly configured. Sometimes the action is an anonymous function. We need to fix this.
|
||||
if (
|
||||
(isStateOrConfigUpdate(action) || !action?.type) &&
|
||||
shouldUpnpBeEnabled(currentState) !== shouldUpnpBeEnabled(previousState)
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
},
|
||||
async effect(_, { getState, dispatch }) {
|
||||
const state = getState();
|
||||
const {
|
||||
config: {
|
||||
remote: { wanport },
|
||||
},
|
||||
emhttp: {
|
||||
var: { portssl },
|
||||
},
|
||||
} = getState();
|
||||
upnpLogger.info(
|
||||
'UPNP Enabled: (%s) Wan Port: [%s]',
|
||||
shouldUpnpBeEnabled(state),
|
||||
wanport === '' ? 'Will Generate New WAN Port' : wanport
|
||||
);
|
||||
|
||||
if (shouldUpnpBeEnabled(state)) {
|
||||
await dispatch(enableUpnp({ wanport, portssl }));
|
||||
} else {
|
||||
await dispatch(disableUpnp());
|
||||
}
|
||||
},
|
||||
});
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user