mirror of
https://github.com/unraid/api.git
synced 2026-01-02 14:40:01 -06:00
Compare commits
164 Commits
v4.8.0
...
4.11.0-bui
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ff62fe3298 | ||
|
|
34320a2c7d | ||
|
|
23ef760d76 | ||
|
|
6ea94f061d | ||
|
|
782d5ebadc | ||
|
|
dfe363bc37 | ||
|
|
6b3b951d82 | ||
|
|
5449e30eed | ||
|
|
dc12656f81 | ||
|
|
f14b74af91 | ||
|
|
e2fa648d1c | ||
|
|
3b00fec5fd | ||
|
|
4ff6a1aaa0 | ||
|
|
86b6c4f85b | ||
|
|
45bd73698b | ||
|
|
fee7d4613e | ||
|
|
b6acf50c0d | ||
|
|
8279531f2b | ||
|
|
0a18b38008 | ||
|
|
23b2b88461 | ||
|
|
f5352e3a26 | ||
|
|
9dfdb8dce7 | ||
|
|
407585cd40 | ||
|
|
05056e7ca1 | ||
|
|
a74d935b56 | ||
|
|
2c62e0ad09 | ||
|
|
1a8da6d92b | ||
|
|
81808ada0f | ||
|
|
eecd9b1017 | ||
|
|
441e1805c1 | ||
|
|
29dcb7d0f0 | ||
|
|
1a7d35d3f6 | ||
|
|
af33e999a0 | ||
|
|
85a35804c1 | ||
|
|
a35c8ff2f1 | ||
|
|
153e7a1e3a | ||
|
|
e73fc356cb | ||
|
|
e1a7a3d22d | ||
|
|
53b05ebe5e | ||
|
|
2ed1308e40 | ||
|
|
6c03df2b97 | ||
|
|
074370c42c | ||
|
|
f34a33bc9f | ||
|
|
c7801a9236 | ||
|
|
dd759d9f0f | ||
|
|
74da8d81ef | ||
|
|
33e0b1ab24 | ||
|
|
ca4e2db1f2 | ||
|
|
ea20d1e211 | ||
|
|
79c57b8ed0 | ||
|
|
4168f43e3e | ||
|
|
20de3ec8d6 | ||
|
|
39b8f453da | ||
|
|
6bf3f77638 | ||
|
|
a79d049865 | ||
|
|
5b6bcb6043 | ||
|
|
6ee3cae962 | ||
|
|
f3671c3e07 | ||
|
|
862b54de8c | ||
|
|
9624ca5c39 | ||
|
|
22638811a9 | ||
|
|
5edfd823b8 | ||
|
|
0379845618 | ||
|
|
5a3e7a91eb | ||
|
|
aefcc762ca | ||
|
|
07b59b0970 | ||
|
|
91883e2b47 | ||
|
|
e57ecd551f | ||
|
|
45bb49bcd6 | ||
|
|
06578fcdf5 | ||
|
|
e791cc680d | ||
|
|
5ce5d19db0 | ||
|
|
c0ccdfa030 | ||
|
|
d613bfa041 | ||
|
|
453a5b2c95 | ||
|
|
8a82a3a1b7 | ||
|
|
9b85e009b8 | ||
|
|
a87d455bac | ||
|
|
412b32996d | ||
|
|
ba75a409a4 | ||
|
|
345e83bfb0 | ||
|
|
7be8bc84d3 | ||
|
|
4d97e1465b | ||
|
|
94420e4d45 | ||
|
|
711cc9ac92 | ||
|
|
0ec0de982f | ||
|
|
a2807864ac | ||
|
|
f88400eea8 | ||
|
|
0d443de20e | ||
|
|
27b33f0f95 | ||
|
|
13bd9bb567 | ||
|
|
f542c8e0bd | ||
|
|
038c582aed | ||
|
|
b9a1b9b087 | ||
|
|
d08fc94afb | ||
|
|
7c6f02a5cb | ||
|
|
ffb2ac51a4 | ||
|
|
719f460016 | ||
|
|
3cfe9fe9ee | ||
|
|
e539d7f603 | ||
|
|
a8566e9e5a | ||
|
|
f73e5e0058 | ||
|
|
64ccea2a81 | ||
|
|
86bea56272 | ||
|
|
af6e56de60 | ||
|
|
c4c99843c7 | ||
|
|
3122bdb953 | ||
|
|
22fe91cd56 | ||
|
|
b7c2407840 | ||
|
|
17b7428779 | ||
|
|
a7ef06ea25 | ||
|
|
5ba4479663 | ||
|
|
7bc583b186 | ||
|
|
b8035c207a | ||
|
|
1b279bbab3 | ||
|
|
68df344a4b | ||
|
|
53ca41404f | ||
|
|
9492c2ae6a | ||
|
|
3eb92dc9ea | ||
|
|
f12d231e63 | ||
|
|
75ad8381bd | ||
|
|
9901039a38 | ||
|
|
70c790ff89 | ||
|
|
0788756b91 | ||
|
|
d9ab58eb83 | ||
|
|
642a220c3a | ||
|
|
b6c4ee6eb4 | ||
|
|
8c8a5276b4 | ||
|
|
3dcbfbe489 | ||
|
|
184b76de1c | ||
|
|
c132f28281 | ||
|
|
5517e7506b | ||
|
|
d37dc3bce2 | ||
|
|
83076bb940 | ||
|
|
7b005cbbf6 | ||
|
|
b625227913 | ||
|
|
e54d27aede | ||
|
|
574d572d65 | ||
|
|
5355115af2 | ||
|
|
881f1e0960 | ||
|
|
7067e9e3dd | ||
|
|
f71943b62b | ||
|
|
9ce2fee380 | ||
|
|
7d88b3393c | ||
|
|
f6ec2839b5 | ||
|
|
02de89d130 | ||
|
|
eb080e5d22 | ||
|
|
edc0d1578b | ||
|
|
fcd6fbcdd4 | ||
|
|
fc68ea03d1 | ||
|
|
26ecf779e6 | ||
|
|
00da27d04f | ||
|
|
106ea09399 | ||
|
|
cb43f95233 | ||
|
|
d26c6b0760 | ||
|
|
2ade7eb527 | ||
|
|
e580f646a5 | ||
|
|
2704c0464c | ||
|
|
74a70b5557 | ||
|
|
469333acd4 | ||
|
|
8f70326d0f | ||
|
|
4f63b4cf3b | ||
|
|
a5f48da322 | ||
|
|
291ee475fb |
123
.claude/settings.json
Normal file
123
.claude/settings.json
Normal file
@@ -0,0 +1,123 @@
|
||||
{
|
||||
"permissions": {
|
||||
"allow": [
|
||||
"# Development Commands",
|
||||
"Bash(pnpm install)",
|
||||
"Bash(pnpm dev)",
|
||||
"Bash(pnpm build)",
|
||||
"Bash(pnpm test)",
|
||||
"Bash(pnpm test:*)",
|
||||
"Bash(pnpm lint)",
|
||||
"Bash(pnpm lint:fix)",
|
||||
"Bash(pnpm type-check)",
|
||||
"Bash(pnpm codegen)",
|
||||
"Bash(pnpm storybook)",
|
||||
"Bash(pnpm --filter * dev)",
|
||||
"Bash(pnpm --filter * build)",
|
||||
"Bash(pnpm --filter * test)",
|
||||
"Bash(pnpm --filter * lint)",
|
||||
"Bash(pnpm --filter * codegen)",
|
||||
|
||||
"# Git Commands (read-only)",
|
||||
"Bash(git status)",
|
||||
"Bash(git diff)",
|
||||
"Bash(git log)",
|
||||
"Bash(git branch)",
|
||||
"Bash(git remote -v)",
|
||||
|
||||
"# Search Commands",
|
||||
"Bash(rg *)",
|
||||
|
||||
"# File System (read-only)",
|
||||
"Bash(ls)",
|
||||
"Bash(ls -la)",
|
||||
"Bash(pwd)",
|
||||
"Bash(find . -name)",
|
||||
"Bash(find . -type)",
|
||||
|
||||
"# Node/NPM Commands",
|
||||
"Bash(node --version)",
|
||||
"Bash(pnpm --version)",
|
||||
"Bash(npx --version)",
|
||||
|
||||
"# Environment Commands",
|
||||
"Bash(echo $*)",
|
||||
"Bash(which *)",
|
||||
|
||||
"# Process Commands",
|
||||
"Bash(ps aux | grep)",
|
||||
"Bash(lsof -i)",
|
||||
|
||||
"# Documentation Domains",
|
||||
"WebFetch(domain:tailwindcss.com)",
|
||||
"WebFetch(domain:github.com)",
|
||||
"WebFetch(domain:reka-ui.com)",
|
||||
"WebFetch(domain:nodejs.org)",
|
||||
"WebFetch(domain:pnpm.io)",
|
||||
"WebFetch(domain:vitejs.dev)",
|
||||
"WebFetch(domain:nuxt.com)",
|
||||
"WebFetch(domain:nestjs.com)",
|
||||
|
||||
"# IDE Integration",
|
||||
"mcp__ide__getDiagnostics",
|
||||
|
||||
"# Browser MCP (for testing)",
|
||||
"mcp__browsermcp__browser_navigate",
|
||||
"mcp__browsermcp__browser_click",
|
||||
"mcp__browsermcp__browser_screenshot"
|
||||
],
|
||||
"deny": [
|
||||
"# Dangerous Commands",
|
||||
"Bash(rm -rf)",
|
||||
"Bash(chmod 777)",
|
||||
"Bash(curl)",
|
||||
"Bash(wget)",
|
||||
"Bash(ssh)",
|
||||
"Bash(scp)",
|
||||
"Bash(sudo)",
|
||||
"Bash(su)",
|
||||
"Bash(pkill)",
|
||||
"Bash(kill)",
|
||||
"Bash(killall)",
|
||||
"Bash(python)",
|
||||
"Bash(python3)",
|
||||
"Bash(pip)",
|
||||
"Bash(npm)",
|
||||
"Bash(yarn)",
|
||||
"Bash(apt)",
|
||||
"Bash(brew)",
|
||||
"Bash(systemctl)",
|
||||
"Bash(service)",
|
||||
"Bash(docker)",
|
||||
"Bash(docker-compose)",
|
||||
|
||||
"# File Modification (use Edit/Write tools instead)",
|
||||
"Bash(sed)",
|
||||
"Bash(awk)",
|
||||
"Bash(perl)",
|
||||
"Bash(echo > *)",
|
||||
"Bash(echo >> *)",
|
||||
"Bash(cat > *)",
|
||||
"Bash(cat >> *)",
|
||||
"Bash(tee)",
|
||||
|
||||
"# Git Write Commands (require explicit user action)",
|
||||
"Bash(git add)",
|
||||
"Bash(git commit)",
|
||||
"Bash(git push)",
|
||||
"Bash(git pull)",
|
||||
"Bash(git merge)",
|
||||
"Bash(git rebase)",
|
||||
"Bash(git checkout)",
|
||||
"Bash(git reset)",
|
||||
"Bash(git clean)",
|
||||
|
||||
"# Package Management Write Commands",
|
||||
"Bash(pnpm add)",
|
||||
"Bash(pnpm remove)",
|
||||
"Bash(pnpm update)",
|
||||
"Bash(pnpm upgrade)"
|
||||
]
|
||||
},
|
||||
"enableAllProjectMcpServers": false
|
||||
}
|
||||
@@ -8,5 +8,6 @@ alwaysApply: false
|
||||
* always run scripts from api/package.json unless requested
|
||||
* prefer adding new files to the nest repo located at api/src/unraid-api/ instead of the legacy code
|
||||
* Test suite is VITEST, do not use jest
|
||||
pnpm --filter ./api test
|
||||
* Prefer to not mock simple dependencies
|
||||
|
||||
|
||||
6
.cursor/rules/no-comments.mdc
Normal file
6
.cursor/rules/no-comments.mdc
Normal file
@@ -0,0 +1,6 @@
|
||||
---
|
||||
description:
|
||||
globs:
|
||||
alwaysApply: true
|
||||
---
|
||||
Never add comments for obvious things, and avoid commenting when starting and ending code blocks
|
||||
9
.cursor/rules/web-graphql.mdc
Normal file
9
.cursor/rules/web-graphql.mdc
Normal file
@@ -0,0 +1,9 @@
|
||||
---
|
||||
description:
|
||||
globs: web/**/*
|
||||
alwaysApply: false
|
||||
---
|
||||
* Always run `pnpm codegen` for GraphQL code generation in the web directory
|
||||
* GraphQL queries must be placed in `.query.ts` files
|
||||
* GraphQL mutations must be placed in `.mutation.ts` files
|
||||
* All GraphQL under `web/` and follow this naming convention
|
||||
20
.github/CODEOWNERS
vendored
20
.github/CODEOWNERS
vendored
@@ -1,20 +0,0 @@
|
||||
# Default owners for everything in the repo
|
||||
* @elibosley @pujitm @mdatelle @zackspear
|
||||
|
||||
# API specific files
|
||||
/api/ @elibosley @pujitm @mdatelle
|
||||
|
||||
# Web frontend files
|
||||
/web/ @elibosley @mdatelle @zackspear
|
||||
|
||||
# Plugin related files
|
||||
/plugin/ @elibosley
|
||||
|
||||
# Unraid UI specific files
|
||||
/unraid-ui/ @mdatelle @zackspear @pujitm
|
||||
|
||||
# GitHub workflows and configuration
|
||||
/.github/ @elibosley
|
||||
|
||||
# Documentation
|
||||
*.md @elibosley @pujitm @mdatelle @zackspear
|
||||
49
.github/codeql/README.md
vendored
Normal file
49
.github/codeql/README.md
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
# CodeQL Security Analysis for Unraid API
|
||||
|
||||
This directory contains custom CodeQL queries and configurations for security analysis of the Unraid API codebase.
|
||||
|
||||
## Overview
|
||||
|
||||
The analysis is configured to run:
|
||||
- On all pushes to the main branch
|
||||
- On all pull requests
|
||||
- Weekly via scheduled runs
|
||||
|
||||
## Custom Queries
|
||||
|
||||
The following custom queries are implemented:
|
||||
|
||||
1. **API Authorization Bypass Detection**
|
||||
Identifies API handlers that may not properly check authorization before performing operations.
|
||||
|
||||
2. **GraphQL Injection Detection**
|
||||
Detects potential injection vulnerabilities in GraphQL queries and operations.
|
||||
|
||||
3. **Hardcoded Secrets Detection**
|
||||
Finds potential hardcoded secrets or credentials in the codebase.
|
||||
|
||||
4. **Insecure Cryptographic Implementations**
|
||||
Identifies usage of weak cryptographic algorithms or insecure random number generation.
|
||||
|
||||
5. **Path Traversal Vulnerability Detection**
|
||||
Detects potential path traversal vulnerabilities in file system operations.
|
||||
|
||||
## Configuration
|
||||
|
||||
The CodeQL analysis is configured in:
|
||||
- `.github/workflows/codeql-analysis.yml` - Workflow configuration
|
||||
- `.github/codeql/codeql-config.yml` - CodeQL engine configuration
|
||||
|
||||
## Running Locally
|
||||
|
||||
To run these queries locally:
|
||||
|
||||
1. Install the CodeQL CLI: https://github.com/github/codeql-cli-binaries/releases
|
||||
2. Create a CodeQL database:
|
||||
```
|
||||
codeql database create <db-name> --language=javascript --source-root=.
|
||||
```
|
||||
3. Run a query:
|
||||
```
|
||||
codeql query run .github/codeql/custom-queries/javascript/api-auth-bypass.ql --database=<db-name>
|
||||
```
|
||||
16
.github/codeql/codeql-config.yml
vendored
Normal file
16
.github/codeql/codeql-config.yml
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
name: "Unraid API CodeQL Configuration"
|
||||
|
||||
disable-default-queries: false
|
||||
|
||||
queries:
|
||||
- name: Extended Security Queries
|
||||
uses: security-extended
|
||||
- name: Custom Unraid API Queries
|
||||
uses: ./.github/codeql/custom-queries
|
||||
|
||||
query-filters:
|
||||
- exclude:
|
||||
problem.severity:
|
||||
- warning
|
||||
- recommendation
|
||||
tags contain: security
|
||||
45
.github/codeql/custom-queries/javascript/api-auth-bypass.ql
vendored
Normal file
45
.github/codeql/custom-queries/javascript/api-auth-bypass.ql
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
/**
|
||||
* @name Potential API Authorization Bypass
|
||||
* @description Functions that process API requests without verifying authorization may lead to security vulnerabilities.
|
||||
* @kind problem
|
||||
* @problem.severity error
|
||||
* @precision medium
|
||||
* @id js/api-auth-bypass
|
||||
* @tags security
|
||||
* external/cwe/cwe-285
|
||||
*/
|
||||
|
||||
import javascript
|
||||
|
||||
/**
|
||||
* Identifies functions that appear to handle API requests
|
||||
*/
|
||||
predicate isApiHandler(Function f) {
|
||||
exists(f.getAParameter()) and
|
||||
(
|
||||
f.getName().regexpMatch("(?i).*(api|handler|controller|resolver|endpoint).*") or
|
||||
exists(CallExpr call |
|
||||
call.getCalleeName().regexpMatch("(?i).*(get|post|put|delete|patch).*") and
|
||||
call.getArgument(1) = f
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Identifies expressions that appear to perform authorization checks
|
||||
*/
|
||||
predicate isAuthCheck(DataFlow::Node node) {
|
||||
exists(CallExpr call |
|
||||
call.getCalleeName().regexpMatch("(?i).*(authorize|authenticate|isAuth|checkAuth|verifyAuth|hasPermission|isAdmin|canAccess).*") and
|
||||
call.flow().getASuccessor*() = node
|
||||
)
|
||||
}
|
||||
|
||||
from Function apiHandler
|
||||
where
|
||||
isApiHandler(apiHandler) and
|
||||
not exists(DataFlow::Node authCheck |
|
||||
isAuthCheck(authCheck) and
|
||||
authCheck.getEnclosingExpr().getEnclosingFunction() = apiHandler
|
||||
)
|
||||
select apiHandler, "API handler function may not perform proper authorization checks."
|
||||
77
.github/codeql/custom-queries/javascript/graphql-injection.ql
vendored
Normal file
77
.github/codeql/custom-queries/javascript/graphql-injection.ql
vendored
Normal file
@@ -0,0 +1,77 @@
|
||||
/**
|
||||
* @name Potential GraphQL Injection
|
||||
* @description User-controlled input used directly in GraphQL queries may lead to injection vulnerabilities.
|
||||
* @kind path-problem
|
||||
* @problem.severity error
|
||||
* @precision high
|
||||
* @id js/graphql-injection
|
||||
* @tags security
|
||||
* external/cwe/cwe-943
|
||||
*/
|
||||
|
||||
import javascript
|
||||
import DataFlow::PathGraph
|
||||
|
||||
class GraphQLQueryExecution extends DataFlow::CallNode {
|
||||
GraphQLQueryExecution() {
|
||||
exists(string name |
|
||||
name = this.getCalleeName() and
|
||||
(
|
||||
name = "execute" or
|
||||
name = "executeQuery" or
|
||||
name = "query" or
|
||||
name.regexpMatch("(?i).*graphql.*query.*")
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
DataFlow::Node getQuery() {
|
||||
result = this.getArgument(0)
|
||||
}
|
||||
}
|
||||
|
||||
class UserControlledInput extends DataFlow::Node {
|
||||
UserControlledInput() {
|
||||
exists(DataFlow::ParameterNode param |
|
||||
param.getName().regexpMatch("(?i).*(query|request|input|args|variables|params).*") and
|
||||
this = param
|
||||
)
|
||||
or
|
||||
exists(DataFlow::PropRead prop |
|
||||
prop.getPropertyName().regexpMatch("(?i).*(query|request|input|args|variables|params).*") and
|
||||
this = prop
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if `node` is a string concatenation.
|
||||
*/
|
||||
predicate isStringConcatenation(DataFlow::Node node) {
|
||||
exists(BinaryExpr concat |
|
||||
concat.getOperator() = "+" and
|
||||
concat.flow() = node
|
||||
)
|
||||
}
|
||||
|
||||
class GraphQLInjectionConfig extends TaintTracking::Configuration {
|
||||
GraphQLInjectionConfig() { this = "GraphQLInjectionConfig" }
|
||||
|
||||
override predicate isSource(DataFlow::Node source) {
|
||||
source instanceof UserControlledInput
|
||||
}
|
||||
|
||||
override predicate isSink(DataFlow::Node sink) {
|
||||
exists(GraphQLQueryExecution exec | sink = exec.getQuery())
|
||||
}
|
||||
|
||||
override predicate isAdditionalTaintStep(DataFlow::Node pred, DataFlow::Node succ) {
|
||||
// Add any GraphQL-specific taint steps if needed
|
||||
isStringConcatenation(succ) and
|
||||
succ.(DataFlow::BinaryExprNode).getAnOperand() = pred
|
||||
}
|
||||
}
|
||||
|
||||
from GraphQLInjectionConfig config, DataFlow::PathNode source, DataFlow::PathNode sink
|
||||
where config.hasFlowPath(source, sink)
|
||||
select sink.getNode(), source, sink, "GraphQL query may contain user-controlled input from $@.", source.getNode(), "user input"
|
||||
53
.github/codeql/custom-queries/javascript/hardcoded-secrets.ql
vendored
Normal file
53
.github/codeql/custom-queries/javascript/hardcoded-secrets.ql
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
/**
|
||||
* @name Hardcoded Secrets
|
||||
* @description Hardcoded secrets or credentials in source code can lead to security vulnerabilities.
|
||||
* @kind problem
|
||||
* @problem.severity error
|
||||
* @precision medium
|
||||
* @id js/hardcoded-secrets
|
||||
* @tags security
|
||||
* external/cwe/cwe-798
|
||||
*/
|
||||
|
||||
import javascript
|
||||
|
||||
/**
|
||||
* Identifies variable declarations or assignments that may contain secrets
|
||||
*/
|
||||
predicate isSensitiveAssignment(DataFlow::Node node) {
|
||||
exists(DataFlow::PropWrite propWrite |
|
||||
propWrite.getPropertyName().regexpMatch("(?i).*(secret|key|password|token|credential|auth).*") and
|
||||
propWrite.getRhs() = node
|
||||
)
|
||||
or
|
||||
exists(VariableDeclarator decl |
|
||||
decl.getName().regexpMatch("(?i).*(secret|key|password|token|credential|auth).*") and
|
||||
decl.getInit().flow() = node
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Identifies literals that look like secrets
|
||||
*/
|
||||
predicate isSecretLiteral(StringLiteral literal) {
|
||||
// Match alphanumeric strings of moderate length that may be secrets
|
||||
literal.getValue().regexpMatch("[A-Za-z0-9_\\-]{8,}") and
|
||||
|
||||
not (
|
||||
// Skip likely non-sensitive literals
|
||||
literal.getValue().regexpMatch("(?i)^(true|false|null|undefined|localhost|development|production|staging)$") or
|
||||
// Skip URLs without credentials
|
||||
literal.getValue().regexpMatch("^https?://[^:@/]+")
|
||||
)
|
||||
}
|
||||
|
||||
from DataFlow::Node source
|
||||
where
|
||||
isSensitiveAssignment(source) and
|
||||
(
|
||||
exists(StringLiteral literal |
|
||||
literal.flow() = source and
|
||||
isSecretLiteral(literal)
|
||||
)
|
||||
)
|
||||
select source, "This assignment may contain a hardcoded secret or credential."
|
||||
90
.github/codeql/custom-queries/javascript/insecure-crypto.ql
vendored
Normal file
90
.github/codeql/custom-queries/javascript/insecure-crypto.ql
vendored
Normal file
@@ -0,0 +1,90 @@
|
||||
/**
|
||||
* @name Insecure Cryptographic Implementation
|
||||
* @description Usage of weak cryptographic algorithms or improper implementations can lead to security vulnerabilities.
|
||||
* @kind problem
|
||||
* @problem.severity error
|
||||
* @precision high
|
||||
* @id js/insecure-crypto
|
||||
* @tags security
|
||||
* external/cwe/cwe-327
|
||||
*/
|
||||
|
||||
import javascript
|
||||
|
||||
/**
|
||||
* Identifies calls to crypto functions with insecure algorithms
|
||||
*/
|
||||
predicate isInsecureCryptoCall(CallExpr call) {
|
||||
// Node.js crypto module uses
|
||||
exists(string methodName |
|
||||
methodName = call.getCalleeName() and
|
||||
(
|
||||
// Detect MD5 usage
|
||||
methodName.regexpMatch("(?i).*md5.*") or
|
||||
methodName.regexpMatch("(?i).*sha1.*") or
|
||||
|
||||
// Insecure crypto constructors
|
||||
(
|
||||
methodName = "createHash" or
|
||||
methodName = "createCipheriv" or
|
||||
methodName = "createDecipher"
|
||||
) and
|
||||
(
|
||||
exists(StringLiteral algo |
|
||||
algo = call.getArgument(0) and
|
||||
(
|
||||
algo.getValue().regexpMatch("(?i).*(md5|md4|md2|sha1|des|rc4|blowfish).*") or
|
||||
algo.getValue().regexpMatch("(?i).*(ecb).*") // ECB mode
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
or
|
||||
// Browser crypto API uses
|
||||
exists(MethodCallExpr mce, string propertyName |
|
||||
propertyName = mce.getMethodName() and
|
||||
(
|
||||
propertyName = "subtle" and
|
||||
exists(MethodCallExpr subtleCall |
|
||||
subtleCall.getReceiver() = mce and
|
||||
subtleCall.getMethodName() = "encrypt" and
|
||||
exists(ObjectExpr obj |
|
||||
obj = subtleCall.getArgument(0) and
|
||||
exists(Property p |
|
||||
p = obj.getAProperty() and
|
||||
p.getName() = "name" and
|
||||
exists(StringLiteral algo |
|
||||
algo = p.getInit() and
|
||||
algo.getValue().regexpMatch("(?i).*(rc4|des|aes-cbc).*")
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Identifies usage of Math.random() for security-sensitive operations
|
||||
*/
|
||||
predicate isInsecureRandomCall(CallExpr call) {
|
||||
exists(PropertyAccess prop |
|
||||
prop.getPropertyName() = "random" and
|
||||
prop.getBase().toString() = "Math" and
|
||||
call.getCallee() = prop
|
||||
)
|
||||
}
|
||||
|
||||
from Expr insecureExpr, string message
|
||||
where
|
||||
(
|
||||
insecureExpr instanceof CallExpr and
|
||||
isInsecureCryptoCall(insecureExpr) and
|
||||
message = "Using potentially insecure cryptographic algorithm or mode."
|
||||
) or (
|
||||
insecureExpr instanceof CallExpr and
|
||||
isInsecureRandomCall(insecureExpr) and
|
||||
message = "Using Math.random() for security-sensitive operation. Consider using crypto.getRandomValues() instead."
|
||||
)
|
||||
select insecureExpr, message
|
||||
130
.github/codeql/custom-queries/javascript/path-traversal.ql
vendored
Normal file
130
.github/codeql/custom-queries/javascript/path-traversal.ql
vendored
Normal file
@@ -0,0 +1,130 @@
|
||||
/**
|
||||
* @name Path Traversal Vulnerability
|
||||
* @description User-controlled inputs used in file operations may allow for path traversal attacks.
|
||||
* @kind path-problem
|
||||
* @problem.severity error
|
||||
* @precision high
|
||||
* @id js/path-traversal
|
||||
* @tags security
|
||||
* external/cwe/cwe-22
|
||||
*/
|
||||
|
||||
import javascript
|
||||
import DataFlow::PathGraph
|
||||
|
||||
/**
|
||||
* Identifies sources of user-controlled input
|
||||
*/
|
||||
class UserInput extends DataFlow::Node {
|
||||
UserInput() {
|
||||
// HTTP request parameters
|
||||
exists(DataFlow::ParameterNode param |
|
||||
param.getName().regexpMatch("(?i).*(req|request|param|query|body|user|input).*") and
|
||||
this = param
|
||||
)
|
||||
or
|
||||
// Access to common request properties
|
||||
exists(DataFlow::PropRead prop |
|
||||
(
|
||||
prop.getPropertyName() = "query" or
|
||||
prop.getPropertyName() = "body" or
|
||||
prop.getPropertyName() = "params" or
|
||||
prop.getPropertyName() = "files"
|
||||
) and
|
||||
this = prop
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Identifies fs module imports
|
||||
*/
|
||||
class FileSystemAccess extends DataFlow::CallNode {
|
||||
FileSystemAccess() {
|
||||
// Node.js fs module functions
|
||||
exists(string name |
|
||||
name = this.getCalleeName() and
|
||||
(
|
||||
name = "readFile" or
|
||||
name = "readFileSync" or
|
||||
name = "writeFile" or
|
||||
name = "writeFileSync" or
|
||||
name = "appendFile" or
|
||||
name = "appendFileSync" or
|
||||
name = "createReadStream" or
|
||||
name = "createWriteStream" or
|
||||
name = "openSync" or
|
||||
name = "open"
|
||||
)
|
||||
)
|
||||
or
|
||||
// File system operations via require('fs')
|
||||
exists(DataFlow::SourceNode fsModule, string methodName |
|
||||
(fsModule.getAPropertyRead("promises") or fsModule).flowsTo(this.getReceiver()) and
|
||||
methodName = this.getMethodName() and
|
||||
(
|
||||
methodName = "readFile" or
|
||||
methodName = "writeFile" or
|
||||
methodName = "appendFile" or
|
||||
methodName = "readdir" or
|
||||
methodName = "stat"
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
DataFlow::Node getPathArgument() {
|
||||
result = this.getArgument(0)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Identifies sanitization of file paths
|
||||
*/
|
||||
predicate isPathSanitized(DataFlow::Node node) {
|
||||
// Check for path normalization or validation
|
||||
exists(DataFlow::CallNode call |
|
||||
(
|
||||
call.getCalleeName() = "resolve" or
|
||||
call.getCalleeName() = "normalize" or
|
||||
call.getCalleeName() = "isAbsolute" or
|
||||
call.getCalleeName() = "relative" or
|
||||
call.getCalleeName().regexpMatch("(?i).*(sanitize|validate|check).*path.*")
|
||||
) and
|
||||
call.flowsTo(node)
|
||||
)
|
||||
or
|
||||
// Check for path traversal mitigation patterns
|
||||
exists(DataFlow::CallNode call |
|
||||
call.getCalleeName() = "replace" and
|
||||
exists(StringLiteral regex |
|
||||
regex = call.getArgument(0).(DataFlow::RegExpCreationNode).getSource().getAChildExpr() and
|
||||
regex.getValue().regexpMatch("(\\.\\./|\\.\\.\\\\)")
|
||||
) and
|
||||
call.flowsTo(node)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Configuration for tracking flow from user input to file system operations
|
||||
*/
|
||||
class PathTraversalConfig extends TaintTracking::Configuration {
|
||||
PathTraversalConfig() { this = "PathTraversalConfig" }
|
||||
|
||||
override predicate isSource(DataFlow::Node source) {
|
||||
source instanceof UserInput
|
||||
}
|
||||
|
||||
override predicate isSink(DataFlow::Node sink) {
|
||||
exists(FileSystemAccess fileAccess |
|
||||
sink = fileAccess.getPathArgument()
|
||||
)
|
||||
}
|
||||
|
||||
override predicate isSanitizer(DataFlow::Node node) {
|
||||
isPathSanitized(node)
|
||||
}
|
||||
}
|
||||
|
||||
from PathTraversalConfig config, DataFlow::PathNode source, DataFlow::PathNode sink
|
||||
where config.hasFlowPath(source, sink)
|
||||
select sink.getNode(), source, sink, "File system operation depends on a user-controlled value $@.", source.getNode(), "user input"
|
||||
19
.github/workflows/build-plugin.yml
vendored
19
.github/workflows/build-plugin.yml
vendored
@@ -23,6 +23,10 @@ on:
|
||||
type: string
|
||||
required: true
|
||||
description: "Base URL for the plugin builds"
|
||||
BUILD_NUMBER:
|
||||
type: string
|
||||
required: true
|
||||
description: "Build number for the plugin builds"
|
||||
secrets:
|
||||
CF_ACCESS_KEY_ID:
|
||||
required: true
|
||||
@@ -100,11 +104,6 @@ jobs:
|
||||
with:
|
||||
name: unraid-api
|
||||
path: ${{ github.workspace }}/plugin/api/
|
||||
- name: Download PNPM Store
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: packed-node-modules
|
||||
path: ${{ github.workspace }}/plugin/
|
||||
- name: Extract Unraid API
|
||||
run: |
|
||||
mkdir -p ${{ github.workspace }}/plugin/source/dynamix.unraid.net/usr/local/unraid-api
|
||||
@@ -113,9 +112,8 @@ jobs:
|
||||
id: build-plugin
|
||||
run: |
|
||||
cd ${{ github.workspace }}/plugin
|
||||
ls -al
|
||||
pnpm run build:txz
|
||||
pnpm run build:plugin --tag="${{ inputs.TAG }}" --base-url="${{ inputs.BASE_URL }}"
|
||||
pnpm run build:txz --tag="${{ inputs.TAG }}" --base-url="${{ inputs.BASE_URL }}" --api-version="${{ steps.vars.outputs.API_VERSION }}" --build-number="${{ inputs.BUILD_NUMBER }}"
|
||||
pnpm run build:plugin --tag="${{ inputs.TAG }}" --base-url="${{ inputs.BASE_URL }}" --api-version="${{ steps.vars.outputs.API_VERSION }}" --build-number="${{ inputs.BUILD_NUMBER }}"
|
||||
|
||||
- name: Ensure Plugin Files Exist
|
||||
run: |
|
||||
@@ -130,11 +128,6 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -f ./deploy/*.tar.xz ]; then
|
||||
echo "Error: .tar.xz file not found in plugin/deploy/"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Upload to GHA
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
|
||||
78
.github/workflows/claude-code-review.yml
vendored
Normal file
78
.github/workflows/claude-code-review.yml
vendored
Normal file
@@ -0,0 +1,78 @@
|
||||
name: Claude Code Review
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize]
|
||||
# Optional: Only run on specific file changes
|
||||
# paths:
|
||||
# - "src/**/*.ts"
|
||||
# - "src/**/*.tsx"
|
||||
# - "src/**/*.js"
|
||||
# - "src/**/*.jsx"
|
||||
|
||||
jobs:
|
||||
claude-review:
|
||||
# Optional: Filter by PR author
|
||||
# if: |
|
||||
# github.event.pull_request.user.login == 'external-contributor' ||
|
||||
# github.event.pull_request.user.login == 'new-developer' ||
|
||||
# github.event.pull_request.author_association == 'FIRST_TIME_CONTRIBUTOR'
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: read
|
||||
issues: read
|
||||
id-token: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Run Claude Code Review
|
||||
id: claude-review
|
||||
uses: anthropics/claude-code-action@beta
|
||||
with:
|
||||
claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
|
||||
|
||||
# Optional: Specify model (defaults to Claude Sonnet 4, uncomment for Claude Opus 4)
|
||||
# model: "claude-opus-4-20250514"
|
||||
|
||||
# Direct prompt for automated review (no @claude mention needed)
|
||||
direct_prompt: |
|
||||
Please review this pull request and provide feedback on:
|
||||
- Code quality and best practices
|
||||
- Potential bugs or issues
|
||||
- Performance considerations
|
||||
- Security concerns
|
||||
- Test coverage
|
||||
|
||||
Be constructive and helpful in your feedback.
|
||||
|
||||
# Optional: Use sticky comments to make Claude reuse the same comment on subsequent pushes to the same PR
|
||||
# use_sticky_comment: true
|
||||
|
||||
# Optional: Customize review based on file types
|
||||
# direct_prompt: |
|
||||
# Review this PR focusing on:
|
||||
# - For TypeScript files: Type safety and proper interface usage
|
||||
# - For API endpoints: Security, input validation, and error handling
|
||||
# - For React components: Performance, accessibility, and best practices
|
||||
# - For tests: Coverage, edge cases, and test quality
|
||||
|
||||
# Optional: Different prompts for different authors
|
||||
# direct_prompt: |
|
||||
# ${{ github.event.pull_request.author_association == 'FIRST_TIME_CONTRIBUTOR' &&
|
||||
# 'Welcome! Please review this PR from a first-time contributor. Be encouraging and provide detailed explanations for any suggestions.' ||
|
||||
# 'Please provide a thorough code review focusing on our coding standards and best practices.' }}
|
||||
|
||||
# Optional: Add specific tools for running tests or linting
|
||||
# allowed_tools: "Bash(npm run test),Bash(npm run lint),Bash(npm run typecheck)"
|
||||
|
||||
# Optional: Skip review for certain conditions
|
||||
# if: |
|
||||
# !contains(github.event.pull_request.title, '[skip-review]') &&
|
||||
# !contains(github.event.pull_request.title, '[WIP]')
|
||||
|
||||
64
.github/workflows/claude.yml
vendored
Normal file
64
.github/workflows/claude.yml
vendored
Normal file
@@ -0,0 +1,64 @@
|
||||
name: Claude Code
|
||||
|
||||
on:
|
||||
issue_comment:
|
||||
types: [created]
|
||||
pull_request_review_comment:
|
||||
types: [created]
|
||||
issues:
|
||||
types: [opened, assigned]
|
||||
pull_request_review:
|
||||
types: [submitted]
|
||||
|
||||
jobs:
|
||||
claude:
|
||||
if: |
|
||||
(github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude')) ||
|
||||
(github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude')) ||
|
||||
(github.event_name == 'pull_request_review' && contains(github.event.review.body, '@claude')) ||
|
||||
(github.event_name == 'issues' && (contains(github.event.issue.body, '@claude') || contains(github.event.issue.title, '@claude')))
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: read
|
||||
issues: read
|
||||
id-token: write
|
||||
actions: read # Required for Claude to read CI results on PRs
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Run Claude Code
|
||||
id: claude
|
||||
uses: anthropics/claude-code-action@beta
|
||||
with:
|
||||
claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
|
||||
|
||||
# This is an optional setting that allows Claude to read CI results on PRs
|
||||
additional_permissions: |
|
||||
actions: read
|
||||
|
||||
# Optional: Specify model (defaults to Claude Sonnet 4, uncomment for Claude Opus 4)
|
||||
# model: "claude-opus-4-20250514"
|
||||
|
||||
# Optional: Customize the trigger phrase (default: @claude)
|
||||
# trigger_phrase: "/claude"
|
||||
|
||||
# Optional: Trigger when specific user is assigned to an issue
|
||||
# assignee_trigger: "claude-bot"
|
||||
|
||||
# Optional: Allow Claude to run specific commands
|
||||
# allowed_tools: "Bash(npm install),Bash(npm run build),Bash(npm run test:*),Bash(npm run lint:*)"
|
||||
|
||||
# Optional: Add custom instructions for Claude to customize its behavior for your project
|
||||
# custom_instructions: |
|
||||
# Follow our coding standards
|
||||
# Ensure all new code has tests
|
||||
# Use TypeScript for new files
|
||||
|
||||
# Optional: Custom environment variables for Claude
|
||||
# claude_env: |
|
||||
# NODE_ENV: test
|
||||
|
||||
40
.github/workflows/codeql-analysis.yml
vendored
Normal file
40
.github/workflows/codeql-analysis.yml
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
name: "CodeQL Security Analysis"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
pull_request:
|
||||
branches: [ main ]
|
||||
schedule:
|
||||
- cron: '0 0 * * 0' # Run weekly on Sundays
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: [ 'javascript', 'typescript' ]
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/codeql-config.yml
|
||||
queries: +security-and-quality
|
||||
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v3
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3
|
||||
76
.github/workflows/deploy-storybook.yml
vendored
Normal file
76
.github/workflows/deploy-storybook.yml
vendored
Normal file
@@ -0,0 +1,76 @@
|
||||
name: Deploy Storybook to Cloudflare Workers
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
issues: write
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- 'unraid-ui/**'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'unraid-ui/**'
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
name: Deploy Storybook
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '22.17.1'
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.1
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential libvirt-daemon-system
|
||||
version: 1.0
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Build Storybook
|
||||
run: |
|
||||
cd unraid-ui
|
||||
pnpm build-storybook
|
||||
|
||||
- name: Deploy to Cloudflare Workers (Staging)
|
||||
id: deploy_staging
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: cloudflare/wrangler-action@v3
|
||||
with:
|
||||
apiToken: ${{ secrets.CLOUDFLARE_DEPLOY_TOKEN }}
|
||||
command: deploy --env staging
|
||||
workingDirectory: unraid-ui
|
||||
|
||||
- name: Deploy to Cloudflare Workers (Production)
|
||||
if: github.ref == 'refs/heads/main' && github.event_name == 'push'
|
||||
uses: cloudflare/wrangler-action@v3
|
||||
with:
|
||||
apiToken: ${{ secrets.CLOUDFLARE_DEPLOY_TOKEN }}
|
||||
command: deploy
|
||||
workingDirectory: unraid-ui
|
||||
|
||||
- name: Comment PR with deployment URL
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
github.rest.issues.createComment({
|
||||
issue_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body: `🚀 Storybook has been deployed to staging: ${{ steps.deploy_staging.outputs['deployment-url'] }}`
|
||||
})
|
||||
77
.github/workflows/main.yml
vendored
77
.github/workflows/main.yml
vendored
@@ -45,7 +45,7 @@ jobs:
|
||||
node-version-file: ".nvmrc"
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.4.3
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.1
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential libvirt-daemon-system
|
||||
version: 1.0
|
||||
@@ -72,6 +72,12 @@ jobs:
|
||||
- name: PNPM Install
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Lint
|
||||
run: pnpm run lint
|
||||
|
||||
- name: Type Check
|
||||
run: pnpm run type-check
|
||||
|
||||
- name: Setup libvirt
|
||||
run: |
|
||||
# Create required groups (if they don't already exist)
|
||||
@@ -94,7 +100,7 @@ jobs:
|
||||
auth_unix_rw = "none"
|
||||
EOF
|
||||
|
||||
# Add the current user to libvirt and kvm groups (note: this change won’t apply to the current session)
|
||||
# Add the current user to libvirt and kvm groups (note: this change won't apply to the current session)
|
||||
sudo usermod -aG libvirt,kvm $USER
|
||||
|
||||
sudo mkdir -p /var/run/libvirt
|
||||
@@ -111,15 +117,47 @@ jobs:
|
||||
# Verify libvirt is running using sudo to bypass group membership delays
|
||||
sudo virsh list --all || true
|
||||
|
||||
- name: Lint
|
||||
run: pnpm run lint
|
||||
- uses: oven-sh/setup-bun@v2
|
||||
with:
|
||||
bun-version: latest
|
||||
|
||||
- name: Test
|
||||
run: pnpm run coverage
|
||||
- name: Run Tests Concurrently
|
||||
run: |
|
||||
set -e
|
||||
|
||||
# Run all tests in parallel with labeled output
|
||||
echo "🚀 Starting API coverage tests..."
|
||||
pnpm run coverage > api-test.log 2>&1 &
|
||||
API_PID=$!
|
||||
|
||||
echo "🚀 Starting Connect plugin tests..."
|
||||
(cd ../packages/unraid-api-plugin-connect && pnpm test) > connect-test.log 2>&1 &
|
||||
CONNECT_PID=$!
|
||||
|
||||
echo "🚀 Starting Shared package tests..."
|
||||
(cd ../packages/unraid-shared && pnpm test) > shared-test.log 2>&1 &
|
||||
SHARED_PID=$!
|
||||
|
||||
# Wait for all processes and capture exit codes
|
||||
wait $API_PID && echo "✅ API tests completed" || { echo "❌ API tests failed"; API_EXIT=1; }
|
||||
wait $CONNECT_PID && echo "✅ Connect tests completed" || { echo "❌ Connect tests failed"; CONNECT_EXIT=1; }
|
||||
wait $SHARED_PID && echo "✅ Shared tests completed" || { echo "❌ Shared tests failed"; SHARED_EXIT=1; }
|
||||
|
||||
# Display all outputs
|
||||
echo "📋 API Test Results:" && cat api-test.log
|
||||
echo "📋 Connect Plugin Test Results:" && cat connect-test.log
|
||||
echo "📋 Shared Package Test Results:" && cat shared-test.log
|
||||
|
||||
# Exit with error if any test failed
|
||||
if [[ ${API_EXIT:-0} -eq 1 || ${CONNECT_EXIT:-0} -eq 1 || ${SHARED_EXIT:-0} -eq 1 ]]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
build-api:
|
||||
name: Build API
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
build_number: ${{ steps.buildnumber.outputs.build_number }}
|
||||
defaults:
|
||||
run:
|
||||
working-directory: api
|
||||
@@ -152,7 +190,7 @@ jobs:
|
||||
${{ runner.os }}-pnpm-store-
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.4.3
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.1
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential
|
||||
version: 1.0
|
||||
@@ -162,12 +200,6 @@ jobs:
|
||||
cd ${{ github.workspace }}
|
||||
pnpm install --frozen-lockfile
|
||||
|
||||
- name: Lint
|
||||
run: pnpm run lint
|
||||
|
||||
- name: Type Check
|
||||
run: pnpm run type-check
|
||||
|
||||
- name: Build
|
||||
run: pnpm run build
|
||||
|
||||
@@ -179,11 +211,19 @@ jobs:
|
||||
PACKAGE_LOCK_VERSION=$(jq -r '.version' package.json)
|
||||
API_VERSION=$([[ -n "$IS_TAGGED" ]] && echo "$PACKAGE_LOCK_VERSION" || echo "${PACKAGE_LOCK_VERSION}+${GIT_SHA}")
|
||||
export API_VERSION
|
||||
echo "API_VERSION=${API_VERSION}" >> $GITHUB_ENV
|
||||
echo "PACKAGE_LOCK_VERSION=${PACKAGE_LOCK_VERSION}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Generate build number
|
||||
id: buildnumber
|
||||
uses: onyxmueller/build-tag-number@v1
|
||||
with:
|
||||
token: ${{secrets.github_token}}
|
||||
prefix: ${{steps.vars.outputs.PACKAGE_LOCK_VERSION}}
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
pnpm run build:release
|
||||
|
||||
tar -czf deploy/unraid-api.tgz -C deploy/pack/ .
|
||||
|
||||
- name: Upload tgz to Github artifacts
|
||||
@@ -191,11 +231,6 @@ jobs:
|
||||
with:
|
||||
name: unraid-api
|
||||
path: ${{ github.workspace }}/api/deploy/unraid-api.tgz
|
||||
- name: Upload Node Modules to Github artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: packed-node-modules
|
||||
path: ${{ github.workspace }}/api/deploy/packed-node-modules.tar.xz
|
||||
|
||||
build-unraid-ui-webcomponents:
|
||||
name: Build Unraid UI Library (Webcomponent Version)
|
||||
@@ -232,7 +267,7 @@ jobs:
|
||||
${{ runner.os }}-pnpm-store-
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.4.3
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.1
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential
|
||||
version: 1.0
|
||||
@@ -340,6 +375,7 @@ jobs:
|
||||
TAG: ${{ github.event.pull_request.number && format('PR{0}', github.event.pull_request.number) || '' }}
|
||||
BUCKET_PATH: ${{ github.event.pull_request.number && format('unraid-api/tag/PR{0}', github.event.pull_request.number) || 'unraid-api' }}
|
||||
BASE_URL: "https://preview.dl.unraid.net/unraid-api"
|
||||
BUILD_NUMBER: ${{ needs.build-api.outputs.build_number }}
|
||||
secrets:
|
||||
CF_ACCESS_KEY_ID: ${{ secrets.CF_ACCESS_KEY_ID }}
|
||||
CF_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
@@ -362,6 +398,7 @@ jobs:
|
||||
TAG: ""
|
||||
BUCKET_PATH: unraid-api
|
||||
BASE_URL: "https://stable.dl.unraid.net/unraid-api"
|
||||
BUILD_NUMBER: ${{ needs.build-api.outputs.build_number }}
|
||||
secrets:
|
||||
CF_ACCESS_KEY_ID: ${{ secrets.CF_ACCESS_KEY_ID }}
|
||||
CF_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
|
||||
49
.github/workflows/push-staging-pr-on-close.yml
vendored
49
.github/workflows/push-staging-pr-on-close.yml
vendored
@@ -4,43 +4,68 @@ on:
|
||||
pull_request:
|
||||
types:
|
||||
- closed
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
pr_number:
|
||||
description: "PR number to test with"
|
||||
required: true
|
||||
type: string
|
||||
pr_merged:
|
||||
description: "Simulate merged PR"
|
||||
required: true
|
||||
type: boolean
|
||||
default: true
|
||||
|
||||
jobs:
|
||||
push-staging:
|
||||
if: github.event.pull_request.merged == true
|
||||
if: (github.event_name == 'pull_request' && github.event.pull_request.merged == true) || (github.event_name == 'workflow_dispatch' && inputs.pr_merged == true)
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
actions: read
|
||||
steps:
|
||||
- name: Set Timezone
|
||||
uses: szenius/set-timezone@v2.0
|
||||
with:
|
||||
timezoneLinux: "America/Los_Angeles"
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: refs/pull/${{ github.event.pull_request.base.ref }}/merge
|
||||
|
||||
- name: Set PR number
|
||||
id: pr_number
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "pull_request" ]; then
|
||||
echo "pr_number=${{ github.event.pull_request.number }}" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "pr_number=${{ inputs.pr_number }}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Download artifact
|
||||
uses: actions/download-artifact@v4
|
||||
uses: dawidd6/action-download-artifact@v11
|
||||
with:
|
||||
name: connect-files
|
||||
name_is_regexp: true
|
||||
name: unraid-plugin-.*
|
||||
path: connect-files
|
||||
pr: ${{ steps.pr_number.outputs.pr_number }}
|
||||
workflow_conclusion: success
|
||||
workflow_search: true
|
||||
search_artifacts: true
|
||||
|
||||
- name: Update Downloaded Staging Plugin to New Date
|
||||
run: |
|
||||
if [ ! -f "connect-files/plugins/dynamix.unraid.net.pr.plg" ]; then
|
||||
echo "ERROR: dynamix.unraid.net.pr.plg not found"
|
||||
# Find the .plg file in the downloaded artifact
|
||||
plgfile=$(find connect-files -name "*.plg" -type f | head -1)
|
||||
if [ ! -f "$plgfile" ]; then
|
||||
echo "ERROR: .plg file not found in connect-files/"
|
||||
ls -la connect-files/
|
||||
exit 1
|
||||
fi
|
||||
|
||||
plgfile="connect-files/plugins/dynamix.unraid.net.pr.plg"
|
||||
echo "Found plugin file: $plgfile"
|
||||
version=$(date +"%Y.%m.%d.%H%M")
|
||||
sed -i -E "s#(<!ENTITY version \").*(\">)#\1${version}\2#g" "${plgfile}" || exit 1
|
||||
|
||||
# Change the plugin url to point to staging
|
||||
url="https://preview.dl.unraid.net/unraid-api/dynamix.unraid.net.plg"
|
||||
sed -i -E "s#(<!ENTITY pluginURL \").*(\">)#\1${url}\2#g" "${plgfile}" || exit 1
|
||||
sed -i -E "s#(<!ENTITY plugin_url \").*?(\">)#\1${url}\2#g" "${plgfile}" || exit 1
|
||||
cat "${plgfile}"
|
||||
mkdir -p pr-release
|
||||
mv "${plgfile}" pr-release/dynamix.unraid.net.plg
|
||||
@@ -54,4 +79,4 @@ jobs:
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
AWS_REGION: "auto"
|
||||
SOURCE_DIR: pr-release
|
||||
DEST_DIR: unraid-api/pr/${{ github.event.pull_request.number }}
|
||||
DEST_DIR: unraid-api/tag/PR${{ steps.pr_number.outputs.pr_number }}
|
||||
|
||||
6
.github/workflows/release-production.yml
vendored
6
.github/workflows/release-production.yml
vendored
@@ -30,9 +30,11 @@ jobs:
|
||||
prerelease: false
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '22.x'
|
||||
node-version: '22.17.1'
|
||||
- run: |
|
||||
echo '${{ steps.release-info.outputs.body }}' >> release-notes.txt
|
||||
cat << 'EOF' > release-notes.txt
|
||||
${{ steps.release-info.outputs.body }}
|
||||
EOF
|
||||
- run: npm install html-escaper@2 xml2js
|
||||
- name: Update Plugin Changelog
|
||||
uses: actions/github-script@v7
|
||||
|
||||
4
.github/workflows/test-libvirt.yml
vendored
4
.github/workflows/test-libvirt.yml
vendored
@@ -28,10 +28,10 @@ jobs:
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.13"
|
||||
python-version: "3.13.5"
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.4.3
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.1
|
||||
with:
|
||||
packages: libvirt-dev
|
||||
version: 1.0
|
||||
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -108,4 +108,7 @@ web/scripts/.sync-webgui-repo-*
|
||||
plugin/source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/data/activation-data.php
|
||||
|
||||
# Config file that changes between versions
|
||||
api/dev/Unraid.net/myservers.cfg
|
||||
api/dev/Unraid.net/myservers.cfg
|
||||
|
||||
# Claude local settings
|
||||
.claude/settings.local.json
|
||||
|
||||
1
.rclone-version
Normal file
1
.rclone-version
Normal file
@@ -0,0 +1 @@
|
||||
1.69.1
|
||||
@@ -1 +1 @@
|
||||
{".":"4.8.0"}
|
||||
{".":"4.11.0"}
|
||||
|
||||
15
.vscode/settings.json
vendored
15
.vscode/settings.json
vendored
@@ -1,15 +0,0 @@
|
||||
{
|
||||
"files.associations": {
|
||||
"*.page": "php"
|
||||
},
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.fixAll": "never",
|
||||
"source.fixAll.eslint": "explicit"
|
||||
},
|
||||
"i18n-ally.localesPaths": [
|
||||
"locales"
|
||||
],
|
||||
"i18n-ally.keystyle": "flat",
|
||||
"eslint.experimental.useFlatConfig": true
|
||||
}
|
||||
|
||||
22
.vscode/sftp-template.json
vendored
22
.vscode/sftp-template.json
vendored
@@ -1,22 +0,0 @@
|
||||
{
|
||||
"_comment": "rename this file to .vscode/sftp.json and replace name/host/privateKeyPath for your system",
|
||||
"name": "Tower",
|
||||
"host": "Tower.local",
|
||||
"protocol": "sftp",
|
||||
"port": 22,
|
||||
"username": "root",
|
||||
"privateKeyPath": "C:/Users/username/.ssh/tower",
|
||||
"remotePath": "/",
|
||||
"context": "plugin/source/dynamix.unraid.net/",
|
||||
"uploadOnSave": true,
|
||||
"useTempFile": false,
|
||||
"openSsh": false,
|
||||
"ignore": [
|
||||
"// comment: ignore dot files/dirs in root of repo",
|
||||
".github",
|
||||
".vscode",
|
||||
".git",
|
||||
".DS_Store"
|
||||
]
|
||||
}
|
||||
|
||||
81
@tailwind-shared/base-utilities.css
Normal file
81
@tailwind-shared/base-utilities.css
Normal file
@@ -0,0 +1,81 @@
|
||||
@custom-variant dark (&:where(.dark, .dark *));
|
||||
|
||||
@layer utilities {
|
||||
:host {
|
||||
--tw-divide-y-reverse: 0;
|
||||
--tw-border-style: solid;
|
||||
--tw-font-weight: initial;
|
||||
--tw-tracking: initial;
|
||||
--tw-translate-x: 0;
|
||||
--tw-translate-y: 0;
|
||||
--tw-translate-z: 0;
|
||||
--tw-rotate-x: rotateX(0);
|
||||
--tw-rotate-y: rotateY(0);
|
||||
--tw-rotate-z: rotateZ(0);
|
||||
--tw-skew-x: skewX(0);
|
||||
--tw-skew-y: skewY(0);
|
||||
--tw-space-x-reverse: 0;
|
||||
--tw-gradient-position: initial;
|
||||
--tw-gradient-from: #0000;
|
||||
--tw-gradient-via: #0000;
|
||||
--tw-gradient-to: #0000;
|
||||
--tw-gradient-stops: initial;
|
||||
--tw-gradient-via-stops: initial;
|
||||
--tw-gradient-from-position: 0%;
|
||||
--tw-gradient-via-position: 50%;
|
||||
--tw-gradient-to-position: 100%;
|
||||
--tw-shadow: 0 0 #0000;
|
||||
--tw-shadow-color: initial;
|
||||
--tw-inset-shadow: 0 0 #0000;
|
||||
--tw-inset-shadow-color: initial;
|
||||
--tw-ring-color: initial;
|
||||
--tw-ring-shadow: 0 0 #0000;
|
||||
--tw-inset-ring-color: initial;
|
||||
--tw-inset-ring-shadow: 0 0 #0000;
|
||||
--tw-ring-inset: initial;
|
||||
--tw-ring-offset-width: 0px;
|
||||
--tw-ring-offset-color: #fff;
|
||||
--tw-ring-offset-shadow: 0 0 #0000;
|
||||
--tw-blur: initial;
|
||||
--tw-brightness: initial;
|
||||
--tw-contrast: initial;
|
||||
--tw-grayscale: initial;
|
||||
--tw-hue-rotate: initial;
|
||||
--tw-invert: initial;
|
||||
--tw-opacity: initial;
|
||||
--tw-saturate: initial;
|
||||
--tw-sepia: initial;
|
||||
--tw-drop-shadow: initial;
|
||||
--tw-duration: initial;
|
||||
--tw-ease: initial;
|
||||
}
|
||||
}
|
||||
|
||||
@layer base {
|
||||
*,
|
||||
::after,
|
||||
::before,
|
||||
::backdrop,
|
||||
::file-selector-button {
|
||||
border-color: hsl(var(--border));
|
||||
}
|
||||
|
||||
|
||||
|
||||
body {
|
||||
--color-alpha: #1c1b1b;
|
||||
--color-beta: #f2f2f2;
|
||||
--color-gamma: #999999;
|
||||
--color-gamma-opaque: rgba(153, 153, 153, 0.5);
|
||||
--color-customgradient-start: rgba(242, 242, 242, 0);
|
||||
--color-customgradient-end: rgba(242, 242, 242, 0.85);
|
||||
--shadow-beta: 0 25px 50px -12px rgba(242, 242, 242, 0.15);
|
||||
--ring-offset-shadow: 0 0 var(--color-beta);
|
||||
--ring-shadow: 0 0 var(--color-beta);
|
||||
}
|
||||
|
||||
button:not(:disabled),
|
||||
[role='button']:not(:disabled) {
|
||||
cursor: pointer;
|
||||
}
|
||||
}
|
||||
130
@tailwind-shared/css-variables.css
Normal file
130
@tailwind-shared/css-variables.css
Normal file
@@ -0,0 +1,130 @@
|
||||
/* Hybrid theme system: Native CSS + Theme Store fallback */
|
||||
@layer base {
|
||||
/* Light mode defaults */
|
||||
:root {
|
||||
--background: 0 0% 100%;
|
||||
--foreground: 0 0% 3.9%;
|
||||
--muted: 0 0% 96.1%;
|
||||
--muted-foreground: 0 0% 45.1%;
|
||||
--popover: 0 0% 100%;
|
||||
--popover-foreground: 0 0% 3.9%;
|
||||
--card: 0 0% 100%;
|
||||
--card-foreground: 0 0% 3.9%;
|
||||
--border: 0 0% 89.8%;
|
||||
--input: 0 0% 89.8%;
|
||||
--primary: 0 0% 9%;
|
||||
--primary-foreground: 0 0% 98%;
|
||||
--secondary: 0 0% 96.1%;
|
||||
--secondary-foreground: 0 0% 9%;
|
||||
--accent: 0 0% 96.1%;
|
||||
--accent-foreground: 0 0% 9%;
|
||||
--destructive: 0 84.2% 60.2%;
|
||||
--destructive-foreground: 0 0% 98%;
|
||||
--ring: 0 0% 3.9%;
|
||||
--chart-1: 12 76% 61%;
|
||||
--chart-2: 173 58% 39%;
|
||||
--chart-3: 197 37% 24%;
|
||||
--chart-4: 43 74% 66%;
|
||||
--chart-5: 27 87% 67%;
|
||||
}
|
||||
|
||||
/* Dark mode */
|
||||
.dark {
|
||||
--background: 0 0% 3.9%;
|
||||
--foreground: 0 0% 98%;
|
||||
--muted: 0 0% 14.9%;
|
||||
--muted-foreground: 0 0% 63.9%;
|
||||
--popover: 0 0% 3.9%;
|
||||
--popover-foreground: 0 0% 98%;
|
||||
--card: 0 0% 3.9%;
|
||||
--card-foreground: 0 0% 98%;
|
||||
--border: 0 0% 14.9%;
|
||||
--input: 0 0% 14.9%;
|
||||
--primary: 0 0% 98%;
|
||||
--primary-foreground: 0 0% 9%;
|
||||
--secondary: 0 0% 14.9%;
|
||||
--secondary-foreground: 0 0% 98%;
|
||||
--accent: 0 0% 14.9%;
|
||||
--accent-foreground: 0 0% 98%;
|
||||
--destructive: 0 62.8% 30.6%;
|
||||
--destructive-foreground: 0 0% 98%;
|
||||
--ring: 0 0% 83.1%;
|
||||
--chart-1: 220 70% 50%;
|
||||
--chart-2: 160 60% 45%;
|
||||
--chart-3: 30 80% 55%;
|
||||
--chart-4: 280 65% 60%;
|
||||
--chart-5: 340 75% 55%;
|
||||
}
|
||||
|
||||
/* Alternative class-based dark mode support for specific Unraid themes */
|
||||
.dark[data-theme='black'],
|
||||
.dark[data-theme='gray'] {
|
||||
--background: 0 0% 3.9%;
|
||||
--foreground: 0 0% 98%;
|
||||
--border: 0 0% 14.9%;
|
||||
}
|
||||
|
||||
/* For web components: inherit CSS variables from the host */
|
||||
:host {
|
||||
--background: inherit;
|
||||
--foreground: inherit;
|
||||
--muted: inherit;
|
||||
--muted-foreground: inherit;
|
||||
--popover: inherit;
|
||||
--popover-foreground: inherit;
|
||||
--card: inherit;
|
||||
--card-foreground: inherit;
|
||||
--border: inherit;
|
||||
--input: inherit;
|
||||
--primary: inherit;
|
||||
--primary-foreground: inherit;
|
||||
--secondary: inherit;
|
||||
--secondary-foreground: inherit;
|
||||
--accent: inherit;
|
||||
--accent-foreground: inherit;
|
||||
--destructive: inherit;
|
||||
--destructive-foreground: inherit;
|
||||
--ring: inherit;
|
||||
--chart-1: inherit;
|
||||
--chart-2: inherit;
|
||||
--chart-3: inherit;
|
||||
--chart-4: inherit;
|
||||
--chart-5: inherit;
|
||||
}
|
||||
|
||||
/* Class-based dark mode support for web components using :host-context */
|
||||
:host-context(.dark) {
|
||||
--background: 0 0% 3.9%;
|
||||
--foreground: 0 0% 98%;
|
||||
--muted: 0 0% 14.9%;
|
||||
--muted-foreground: 0 0% 63.9%;
|
||||
--popover: 0 0% 3.9%;
|
||||
--popover-foreground: 0 0% 98%;
|
||||
--card: 0 0% 3.9%;
|
||||
--card-foreground: 0 0% 98%;
|
||||
--border: 0 0% 14.9%;
|
||||
--input: 0 0% 14.9%;
|
||||
--primary: 0 0% 98%;
|
||||
--primary-foreground: 0 0% 9%;
|
||||
--secondary: 0 0% 14.9%;
|
||||
--secondary-foreground: 0 0% 98%;
|
||||
--accent: 0 0% 14.9%;
|
||||
--accent-foreground: 0 0% 98%;
|
||||
--destructive: 0 62.8% 30.6%;
|
||||
--destructive-foreground: 0 0% 98%;
|
||||
--ring: 0 0% 83.1%;
|
||||
--chart-1: 220 70% 50%;
|
||||
--chart-2: 160 60% 45%;
|
||||
--chart-3: 30 80% 55%;
|
||||
--chart-4: 280 65% 60%;
|
||||
--chart-5: 340 75% 55%;
|
||||
}
|
||||
|
||||
/* Alternative class-based dark mode support for specific Unraid themes */
|
||||
:host-context(.dark[data-theme='black']),
|
||||
:host-context(.dark[data-theme='gray']) {
|
||||
--background: 0 0% 3.9%;
|
||||
--foreground: 0 0% 98%;
|
||||
--border: 0 0% 14.9%;
|
||||
}
|
||||
}
|
||||
5
@tailwind-shared/index.css
Normal file
5
@tailwind-shared/index.css
Normal file
@@ -0,0 +1,5 @@
|
||||
/* Tailwind Shared Styles - Single entry point for all shared CSS */
|
||||
@import './css-variables.css';
|
||||
@import './unraid-theme.css';
|
||||
@import './base-utilities.css';
|
||||
@import './sonner.css';
|
||||
@@ -662,4 +662,4 @@
|
||||
.sonner-loader[data-visible='false'] {
|
||||
opacity: 0;
|
||||
transform: scale(0.8) translate(-50%, -50%);
|
||||
}
|
||||
}
|
||||
259
@tailwind-shared/unraid-theme.css
Normal file
259
@tailwind-shared/unraid-theme.css
Normal file
@@ -0,0 +1,259 @@
|
||||
@theme static {
|
||||
/* Breakpoints */
|
||||
--breakpoint-xs: 30rem;
|
||||
--breakpoint-2xl: 100rem;
|
||||
--breakpoint-3xl: 120rem;
|
||||
/* Container settings */
|
||||
--container-center: true;
|
||||
--container-padding: 2rem;
|
||||
--container-screen-2xl: 1400px;
|
||||
|
||||
/* Font families */
|
||||
--font-sans:
|
||||
clear-sans, ui-sans-serif, system-ui, sans-serif, 'Apple Color Emoji', 'Segoe UI Emoji',
|
||||
'Segoe UI Symbol', 'Noto Color Emoji';
|
||||
|
||||
/* Grid template columns */
|
||||
--grid-template-columns-settings: 35% 1fr;
|
||||
|
||||
/* Border color default */
|
||||
--default-border-color: var(--color-border);
|
||||
--ui-border-muted: hsl(var(--border));
|
||||
--ui-radius: 0.5rem;
|
||||
--ui-primary: var(--color-primary-500);
|
||||
--ui-primary-hover: var(--color-primary-600);
|
||||
--ui-primary-active: var(--color-primary-700);
|
||||
|
||||
/* Color palette */
|
||||
--color-inherit: inherit;
|
||||
--color-transparent: transparent;
|
||||
--color-black: #1c1b1b;
|
||||
--color-grey-darkest: #222;
|
||||
--color-grey-darker: #606f7b;
|
||||
--color-grey-dark: #383735;
|
||||
--color-grey-mid: #999999;
|
||||
--color-grey: #e0e0e0;
|
||||
--color-grey-light: #dae1e7;
|
||||
--color-grey-lighter: #f1f5f8;
|
||||
--color-grey-lightest: #f2f2f2;
|
||||
--color-white: #ffffff;
|
||||
|
||||
/* Unraid colors */
|
||||
--color-yellow-accent: #e9bf41;
|
||||
--color-orange-dark: #f15a2c;
|
||||
--color-orange: #ff8c2f;
|
||||
|
||||
/* Unraid red palette */
|
||||
--color-unraid-red: #e22828;
|
||||
--color-unraid-red-50: #fef2f2;
|
||||
--color-unraid-red-100: #ffe1e1;
|
||||
--color-unraid-red-200: #ffc9c9;
|
||||
--color-unraid-red-300: #fea3a3;
|
||||
--color-unraid-red-400: #fc6d6d;
|
||||
--color-unraid-red-500: #f43f3f;
|
||||
--color-unraid-red-600: #e22828;
|
||||
--color-unraid-red-700: #bd1818;
|
||||
--color-unraid-red-800: #9c1818;
|
||||
--color-unraid-red-900: #821a1a;
|
||||
--color-unraid-red-950: #470808;
|
||||
|
||||
/* Unraid green palette */
|
||||
--color-unraid-green: #63a659;
|
||||
--color-unraid-green-50: #f5f9f4;
|
||||
--color-unraid-green-100: #e7f3e5;
|
||||
--color-unraid-green-200: #d0e6cc;
|
||||
--color-unraid-green-300: #aad1a4;
|
||||
--color-unraid-green-400: #7db474;
|
||||
--color-unraid-green-500: #63a659;
|
||||
--color-unraid-green-600: #457b3e;
|
||||
--color-unraid-green-700: #396134;
|
||||
--color-unraid-green-800: #314e2d;
|
||||
--color-unraid-green-900: #284126;
|
||||
--color-unraid-green-950: #122211;
|
||||
|
||||
/* Primary colors (orange) */
|
||||
--color-primary-50: #fff7ed;
|
||||
--color-primary-100: #ffedd5;
|
||||
--color-primary-200: #fed7aa;
|
||||
--color-primary-300: #fdba74;
|
||||
--color-primary-400: #fb923c;
|
||||
--color-primary-500: #ff6600;
|
||||
--color-primary-600: #ea580c;
|
||||
--color-primary-700: #c2410c;
|
||||
--color-primary-800: #9a3412;
|
||||
--color-primary-900: #7c2d12;
|
||||
--color-primary-950: #431407;
|
||||
|
||||
/* Header colors */
|
||||
--color-header-text-primary: var(--header-text-primary);
|
||||
--color-header-text-secondary: var(--header-text-secondary);
|
||||
--color-header-background-color: var(--header-background-color);
|
||||
|
||||
/* Legacy colors */
|
||||
--color-alpha: var(--color-alpha);
|
||||
--color-beta: var(--color-beta);
|
||||
--color-gamma: var(--color-gamma);
|
||||
--color-gamma-opaque: var(--color-gamma-opaque);
|
||||
--color-customgradient-start: var(--color-customgradient-start);
|
||||
--color-customgradient-end: var(--color-customgradient-end);
|
||||
|
||||
/* Gradients */
|
||||
--color-header-gradient-start: var(--header-gradient-start);
|
||||
--color-header-gradient-end: var(--header-gradient-end);
|
||||
--color-banner-gradient: var(--banner-gradient);
|
||||
|
||||
/* Font sizes */
|
||||
--font-10px: 10px;
|
||||
--font-12px: 12px;
|
||||
--font-14px: 14px;
|
||||
--font-16px: 16px;
|
||||
--font-18px: 18px;
|
||||
--font-20px: 20px;
|
||||
--font-24px: 24px;
|
||||
--font-30px: 30px;
|
||||
|
||||
/* Spacing */
|
||||
--spacing-4_5: 1.125rem;
|
||||
--spacing--8px: -8px;
|
||||
--spacing-2px: 2px;
|
||||
--spacing-4px: 4px;
|
||||
--spacing-6px: 6px;
|
||||
--spacing-8px: 8px;
|
||||
--spacing-10px: 10px;
|
||||
--spacing-12px: 12px;
|
||||
--spacing-14px: 14px;
|
||||
--spacing-16px: 16px;
|
||||
--spacing-20px: 20px;
|
||||
--spacing-24px: 24px;
|
||||
--spacing-28px: 28px;
|
||||
--spacing-32px: 32px;
|
||||
--spacing-36px: 36px;
|
||||
--spacing-40px: 40px;
|
||||
--spacing-64px: 64px;
|
||||
--spacing-80px: 80px;
|
||||
--spacing-90px: 90px;
|
||||
--spacing-150px: 150px;
|
||||
--spacing-160px: 160px;
|
||||
--spacing-200px: 200px;
|
||||
--spacing-260px: 260px;
|
||||
--spacing-300px: 300px;
|
||||
--spacing-310px: 310px;
|
||||
--spacing-350px: 350px;
|
||||
--spacing-448px: 448px;
|
||||
--spacing-512px: 512px;
|
||||
--spacing-640px: 640px;
|
||||
--spacing-800px: 800px;
|
||||
|
||||
/* Width and Height values */
|
||||
--width-36px: 36px;
|
||||
--height-36px: 36px;
|
||||
|
||||
/* Min/Max widths */
|
||||
--min-width-86px: 86px;
|
||||
--min-width-160px: 160px;
|
||||
--min-width-260px: 260px;
|
||||
--min-width-300px: 300px;
|
||||
--min-width-310px: 310px;
|
||||
--min-width-350px: 350px;
|
||||
--min-width-800px: 800px;
|
||||
|
||||
--max-width-86px: 86px;
|
||||
--max-width-160px: 160px;
|
||||
--max-width-260px: 260px;
|
||||
--max-width-300px: 300px;
|
||||
--max-width-310px: 310px;
|
||||
--max-width-350px: 350px;
|
||||
--max-width-640px: 640px;
|
||||
--max-width-800px: 800px;
|
||||
--max-width-1024px: 1024px;
|
||||
|
||||
/* Animations */
|
||||
--animate-mark-2: mark-2 1.5s ease infinite;
|
||||
--animate-mark-3: mark-3 1.5s ease infinite;
|
||||
--animate-mark-6: mark-6 1.5s ease infinite;
|
||||
--animate-mark-7: mark-7 1.5s ease infinite;
|
||||
|
||||
/* Radius */
|
||||
--radius: 0.5rem;
|
||||
|
||||
/* Text Resizing */
|
||||
--text-xs: 1.2rem; /* 12px at 10px base */
|
||||
--text-sm: 1.4rem; /* 14px at 10px base */
|
||||
--text-base: 1.6rem; /* 16px at 10px base */
|
||||
--text-lg: 1.8rem; /* 18px at 10px base */
|
||||
--text-xl: 2rem; /* 20px at 10px base */
|
||||
--text-2xl: 2.4rem; /* 24px at 10px base */
|
||||
--text-3xl: 3rem; /* 30px at 10px base */
|
||||
--text-4xl: 3.6rem; /* 36px at 10px base */
|
||||
--text-5xl: 4.8rem; /* 48px at 10px base */
|
||||
--text-6xl: 6rem; /* 60px at 10px base */
|
||||
--text-7xl: 7.2rem; /* 72px at 10px base */
|
||||
--text-8xl: 9.6rem; /* 96px at 10px base */
|
||||
--text-9xl: 12.8rem; /* 128px at 10px base */
|
||||
--spacing: 0.4rem; /* 4px at 10px base */
|
||||
}
|
||||
|
||||
/* Keyframes */
|
||||
@keyframes mark-2 {
|
||||
50% {
|
||||
transform: translateY(-40px);
|
||||
}
|
||||
to {
|
||||
transform: translateY(0);
|
||||
}
|
||||
}
|
||||
|
||||
@keyframes mark-3 {
|
||||
50% {
|
||||
transform: translateY(-62px);
|
||||
}
|
||||
to {
|
||||
transform: translateY(0);
|
||||
}
|
||||
}
|
||||
|
||||
@keyframes mark-6 {
|
||||
50% {
|
||||
transform: translateY(40px);
|
||||
}
|
||||
to {
|
||||
transform: translateY(0);
|
||||
}
|
||||
}
|
||||
|
||||
@keyframes mark-7 {
|
||||
50% {
|
||||
transform: translateY(62px);
|
||||
}
|
||||
to {
|
||||
transform: translateY(0);
|
||||
}
|
||||
}
|
||||
|
||||
/* Theme colors that reference CSS variables */
|
||||
@theme inline {
|
||||
--color-background: hsl(var(--background));
|
||||
--color-foreground: hsl(var(--foreground));
|
||||
--color-muted: hsl(var(--muted));
|
||||
--color-muted-foreground: hsl(var(--muted-foreground));
|
||||
--color-popover: hsl(var(--popover));
|
||||
--color-popover-foreground: hsl(var(--popover-foreground));
|
||||
--color-card: hsl(var(--card));
|
||||
--color-card-foreground: hsl(var(--card-foreground));
|
||||
--color-border: hsl(var(--border));
|
||||
--color-input: hsl(var(--input));
|
||||
--color-primary: hsl(var(--primary));
|
||||
--color-primary-foreground: hsl(var(--primary-foreground));
|
||||
--color-secondary: hsl(var(--secondary));
|
||||
--color-secondary-foreground: hsl(var(--secondary-foreground));
|
||||
--color-accent: hsl(var(--accent));
|
||||
--color-accent-foreground: hsl(var(--accent-foreground));
|
||||
--color-destructive: hsl(var(--destructive));
|
||||
--color-destructive-foreground: hsl(var(--destructive-foreground));
|
||||
--color-ring: hsl(var(--ring));
|
||||
--color-chart-1: hsl(var(--chart-1, 12 76% 61%));
|
||||
--color-chart-2: hsl(var(--chart-2, 173 58% 39%));
|
||||
--color-chart-3: hsl(var(--chart-3, 197 37% 24%));
|
||||
--color-chart-4: hsl(var(--chart-4, 43 74% 66%));
|
||||
--color-chart-5: hsl(var(--chart-5, 27 87% 67%));
|
||||
}
|
||||
152
CLAUDE.md
Normal file
152
CLAUDE.md
Normal file
@@ -0,0 +1,152 @@
|
||||
# CLAUDE.md
|
||||
|
||||
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
|
||||
|
||||
## Project Overview
|
||||
|
||||
This is the Unraid API monorepo containing multiple packages that provide API functionality for Unraid servers. It uses pnpm workspaces with the following structure:
|
||||
|
||||
- `/api` - Core NestJS API server with GraphQL
|
||||
- `/web` - Nuxt.js frontend application
|
||||
- `/unraid-ui` - Vue 3 component library
|
||||
- `/plugin` - Unraid plugin package (.plg)
|
||||
- `/packages` - Shared packages and API plugins
|
||||
|
||||
## Essential Commands
|
||||
|
||||
### Development
|
||||
|
||||
```bash
|
||||
pnpm install # Install all dependencies
|
||||
pnpm dev # Run all dev servers concurrently
|
||||
pnpm build # Build all packages
|
||||
pnpm build:watch # Watch mode with local plugin build
|
||||
```
|
||||
|
||||
### Testing & Code Quality
|
||||
|
||||
```bash
|
||||
pnpm test # Run all tests
|
||||
pnpm lint # Run linting
|
||||
pnpm lint:fix # Fix linting issues
|
||||
pnpm type-check # TypeScript type checking
|
||||
```
|
||||
|
||||
### API Development
|
||||
|
||||
```bash
|
||||
cd api && pnpm dev # Run API server (http://localhost:3001)
|
||||
cd api && pnpm test:watch # Run tests in watch mode
|
||||
cd api && pnpm codegen # Generate GraphQL types
|
||||
```
|
||||
|
||||
### Deployment
|
||||
|
||||
```bash
|
||||
pnpm unraid:deploy <SERVER_IP> # Deploy all to Unraid server
|
||||
```
|
||||
|
||||
### Developer Tools
|
||||
|
||||
```bash
|
||||
unraid-api developer # Interactive prompt for tools
|
||||
unraid-api developer --sandbox true # Enable GraphQL sandbox
|
||||
unraid-api developer --sandbox false # Disable GraphQL sandbox
|
||||
unraid-api developer --enable-modal # Enable modal testing tool
|
||||
unraid-api developer --disable-modal # Disable modal testing tool
|
||||
```
|
||||
|
||||
## Architecture Notes
|
||||
|
||||
### API Structure (NestJS)
|
||||
|
||||
- Modules: `auth`, `config`, `plugins`, `emhttp`, `monitoring`
|
||||
- GraphQL API with Apollo Server at `/graphql`
|
||||
- Redux store for state management in `src/store/`
|
||||
- Plugin system for extending functionality
|
||||
- Entry points: `src/index.ts` (server), `src/cli.ts` (CLI)
|
||||
|
||||
### Key Patterns
|
||||
|
||||
- TypeScript imports use `.js` extensions (ESM compatibility)
|
||||
- NestJS dependency injection with decorators
|
||||
- GraphQL schema-first approach with code generation
|
||||
- API plugins follow specific structure (see `api/docs/developer/api-plugins.md`)
|
||||
|
||||
### Authentication
|
||||
|
||||
- API key authentication via headers
|
||||
- Cookie-based session management
|
||||
- Keys stored in `/boot/config/plugins/unraid-api/`
|
||||
|
||||
### Development Workflow
|
||||
|
||||
1. Work Intent required before starting development
|
||||
2. Fork from `main` branch
|
||||
3. Reference Work Intent in PR
|
||||
4. No direct pushes to main
|
||||
|
||||
### Debug Mode
|
||||
|
||||
```bash
|
||||
LOG_LEVEL=debug unraid-api start --debug
|
||||
```
|
||||
|
||||
Enables GraphQL playground at `http://tower.local/graphql`
|
||||
|
||||
## Coding Guidelines
|
||||
|
||||
### General Rules
|
||||
|
||||
- Never add comments unless they are needed for clarity of function
|
||||
- Never add comments for obvious things, and avoid commenting when starting and ending code blocks
|
||||
- Be CONCISE, keep replies shorter than a paragraph if at all possible
|
||||
|
||||
### API Development Rules (`api/**/*`)
|
||||
|
||||
- Use pnpm ONLY for package management
|
||||
- Always run scripts from api/package.json unless requested
|
||||
- Prefer adding new files to the NestJS repo located at `api/src/unraid-api/` instead of the legacy code
|
||||
- Test suite is VITEST, do not use jest
|
||||
- Run tests with: `pnpm --filter ./api test`
|
||||
- Prefer to not mock simple dependencies
|
||||
|
||||
### Web Development Rules (`web/**/*`)
|
||||
|
||||
- Always run `pnpm codegen` for GraphQL code generation in the web directory
|
||||
- GraphQL queries must be placed in `.query.ts` files
|
||||
- GraphQL mutations must be placed in `.mutation.ts` files
|
||||
- All GraphQL under `web/` must follow this naming convention
|
||||
|
||||
### Testing Guidelines
|
||||
|
||||
#### Vue Component Testing
|
||||
|
||||
- This is a Nuxt.js app but we are testing with vitest outside of the Nuxt environment
|
||||
- Nuxt is currently set to auto import so some vue files may need compute or ref imported
|
||||
- Use pnpm when running terminal commands and stay within the web directory
|
||||
- Tests are located under `web/__test__`, run with `pnpm test`
|
||||
- Use `mount` from Vue Test Utils for component testing
|
||||
- Stub complex child components that aren't the focus of the test
|
||||
- Mock external dependencies and services
|
||||
- Test component behavior and output, not implementation details
|
||||
- Use `createTestingPinia()` for mocking stores in components
|
||||
- Find elements with semantic queries like `find('button')` rather than data-test IDs
|
||||
- Use `await nextTick()` for DOM updates
|
||||
- Always await async operations before making assertions
|
||||
|
||||
#### Store Testing with Pinia
|
||||
|
||||
- Use `createPinia()` and `setActivePinia` when testing Store files
|
||||
- Only use `createTestingPinia` if you specifically need its testing features
|
||||
- Let stores initialize with their natural default state
|
||||
- Don't mock the store being tested
|
||||
- Ensure Vue reactivity imports are added to store files (computed, ref, watchEffect)
|
||||
- Place all mock declarations at the top level
|
||||
- Use factory functions for module mocks to avoid hoisting issues
|
||||
- Clear mocks between tests to ensure isolation
|
||||
|
||||
## Development Memories
|
||||
|
||||
- We are using tailwind v4 we do not need a tailwind config anymore
|
||||
- always search the internet for tailwind v4 documentation when making tailwind related style changes
|
||||
@@ -13,6 +13,10 @@ PATHS_PARITY_CHECKS=./dev/states/parity-checks.log
|
||||
PATHS_CONFIG_MODULES=./dev/configs
|
||||
PATHS_ACTIVATION_BASE=./dev/activation
|
||||
PATHS_PASSWD=./dev/passwd
|
||||
PATHS_RCLONE_SOCKET=./dev/rclone-socket
|
||||
PATHS_LOG_BASE=./dev/log # Where we store logs
|
||||
PATHS_LOGS_FILE=./dev/log/graphql-api.log
|
||||
PATHS_CONNECT_STATUS_FILE_PATH=./dev/connectStatus.json # Connect plugin status file
|
||||
ENVIRONMENT="development"
|
||||
NODE_ENV="development"
|
||||
PORT="3001"
|
||||
|
||||
@@ -13,5 +13,6 @@ PATHS_PARITY_CHECKS=./dev/states/parity-checks.log
|
||||
PATHS_CONFIG_MODULES=./dev/configs
|
||||
PATHS_ACTIVATION_BASE=./dev/activation
|
||||
PATHS_PASSWD=./dev/passwd
|
||||
PATHS_LOGS_FILE=./dev/log/graphql-api.log
|
||||
PORT=5000
|
||||
NODE_ENV="test"
|
||||
|
||||
105
api/.eslintrc.ts
105
api/.eslintrc.ts
@@ -4,54 +4,59 @@ import noRelativeImportPaths from 'eslint-plugin-no-relative-import-paths';
|
||||
import prettier from 'eslint-plugin-prettier';
|
||||
import tseslint from 'typescript-eslint';
|
||||
|
||||
export default tseslint.config(eslint.configs.recommended, ...tseslint.configs.recommended, {
|
||||
plugins: {
|
||||
'no-relative-import-paths': noRelativeImportPaths,
|
||||
prettier: prettier,
|
||||
import: importPlugin,
|
||||
export default tseslint.config(
|
||||
eslint.configs.recommended,
|
||||
...tseslint.configs.recommended,
|
||||
{
|
||||
ignores: ['src/graphql/generated/client/**/*', 'src/**/**/dummy-process.js'],
|
||||
},
|
||||
rules: {
|
||||
'@typescript-eslint/no-redundant-type-constituents': 'off',
|
||||
'@typescript-eslint/no-unsafe-call': 'off',
|
||||
'@typescript-eslint/naming-convention': 'off',
|
||||
'@typescript-eslint/no-unsafe-assignment': 'off',
|
||||
'@typescript-eslint/no-unsafe-return': 'off',
|
||||
'@typescript-eslint/ban-types': 'off',
|
||||
'@typescript-eslint/no-explicit-any': 'off',
|
||||
'@typescript-eslint/no-empty-object-type': 'off',
|
||||
'no-use-before-define': ['off'],
|
||||
'no-multiple-empty-lines': ['error', { max: 1, maxBOF: 0, maxEOF: 1 }],
|
||||
'@typescript-eslint/no-unused-vars': 'off',
|
||||
'@typescript-eslint/no-unused-expressions': 'off',
|
||||
'import/no-unresolved': 'off',
|
||||
'import/no-absolute-path': 'off',
|
||||
'import/prefer-default-export': 'off',
|
||||
'no-relative-import-paths/no-relative-import-paths': [
|
||||
'error',
|
||||
{ allowSameFolder: false, rootDir: 'src', prefix: '@app' },
|
||||
],
|
||||
'prettier/prettier': 'error',
|
||||
'import/extensions': [
|
||||
'error',
|
||||
'ignorePackages',
|
||||
{
|
||||
js: 'always',
|
||||
ts: 'always',
|
||||
},
|
||||
],
|
||||
'no-restricted-globals': [
|
||||
'error',
|
||||
{
|
||||
name: '__dirname',
|
||||
message: 'Use import.meta.url instead of __dirname in ESM',
|
||||
},
|
||||
{
|
||||
name: '__filename',
|
||||
message: 'Use import.meta.url instead of __filename in ESM',
|
||||
},
|
||||
],
|
||||
'eol-last': ['error', 'always'],
|
||||
},
|
||||
|
||||
ignores: ['src/graphql/generated/client/**/*'],
|
||||
});
|
||||
{
|
||||
plugins: {
|
||||
'no-relative-import-paths': noRelativeImportPaths,
|
||||
prettier: prettier,
|
||||
import: importPlugin,
|
||||
},
|
||||
rules: {
|
||||
'@typescript-eslint/no-redundant-type-constituents': 'off',
|
||||
'@typescript-eslint/no-unsafe-call': 'off',
|
||||
'@typescript-eslint/naming-convention': 'off',
|
||||
'@typescript-eslint/no-unsafe-assignment': 'off',
|
||||
'@typescript-eslint/no-unsafe-return': 'off',
|
||||
'@typescript-eslint/ban-types': 'off',
|
||||
'@typescript-eslint/no-explicit-any': 'off',
|
||||
'@typescript-eslint/no-empty-object-type': 'off',
|
||||
'no-use-before-define': ['off'],
|
||||
'no-multiple-empty-lines': ['error', { max: 1, maxBOF: 0, maxEOF: 1 }],
|
||||
'@typescript-eslint/no-unused-vars': 'off',
|
||||
'@typescript-eslint/no-unused-expressions': 'off',
|
||||
'import/no-unresolved': 'off',
|
||||
'import/no-absolute-path': 'off',
|
||||
'import/prefer-default-export': 'off',
|
||||
'no-relative-import-paths/no-relative-import-paths': [
|
||||
'error',
|
||||
{ allowSameFolder: false, rootDir: 'src', prefix: '@app' },
|
||||
],
|
||||
'prettier/prettier': 'error',
|
||||
'import/extensions': [
|
||||
'error',
|
||||
'ignorePackages',
|
||||
{
|
||||
js: 'always',
|
||||
ts: 'always',
|
||||
},
|
||||
],
|
||||
'no-restricted-globals': [
|
||||
'error',
|
||||
{
|
||||
name: '__dirname',
|
||||
message: 'Use import.meta.url instead of __dirname in ESM',
|
||||
},
|
||||
{
|
||||
name: '__filename',
|
||||
message: 'Use import.meta.url instead of __filename in ESM',
|
||||
},
|
||||
],
|
||||
'eol-last': ['error', 'always'],
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
7
api/.vscode/settings.json
vendored
7
api/.vscode/settings.json
vendored
@@ -1,7 +0,0 @@
|
||||
{
|
||||
"eslint.lintTask.options": "--flag unstable_ts_config",
|
||||
"eslint.options": {
|
||||
"flags": ["unstable_ts_config"],
|
||||
"overrideConfigFile": ".eslintrc.ts"
|
||||
}
|
||||
}
|
||||
169
api/CHANGELOG.md
169
api/CHANGELOG.md
@@ -1,5 +1,174 @@
|
||||
# Changelog
|
||||
|
||||
## [4.11.0](https://github.com/unraid/api/compare/v4.10.0...v4.11.0) (2025-07-28)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* tailwind v4 ([#1522](https://github.com/unraid/api/issues/1522)) ([2c62e0a](https://github.com/unraid/api/commit/2c62e0ad09c56d2293b76d07833dfb142c898937))
|
||||
* **web:** install and configure nuxt ui ([#1524](https://github.com/unraid/api/issues/1524)) ([407585c](https://github.com/unraid/api/commit/407585cd40c409175d8e7b861f8d61d8cabc11c9))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* add missing breakpoints ([#1535](https://github.com/unraid/api/issues/1535)) ([f5352e3](https://github.com/unraid/api/commit/f5352e3a26a2766e85d19ffb5f74960c536b91b3))
|
||||
* border color incorrect in tailwind ([#1544](https://github.com/unraid/api/issues/1544)) ([f14b74a](https://github.com/unraid/api/commit/f14b74af91783b08640c0949c51ba7f18508f06f))
|
||||
* **connect:** omit extraneous fields during connect config validation ([#1538](https://github.com/unraid/api/issues/1538)) ([45bd736](https://github.com/unraid/api/commit/45bd73698b2bd534a8aff2c6ac73403de6c58561))
|
||||
* **deps:** pin dependencies ([#1528](https://github.com/unraid/api/issues/1528)) ([a74d935](https://github.com/unraid/api/commit/a74d935b566dd7af1a21824c9b7ab562232f9d8b))
|
||||
* **deps:** pin dependency @nuxt/ui to 3.2.0 ([#1532](https://github.com/unraid/api/issues/1532)) ([8279531](https://github.com/unraid/api/commit/8279531f2b86a78e81a77e6c037a0fb752e98062))
|
||||
* **deps:** update all non-major dependencies ([#1510](https://github.com/unraid/api/issues/1510)) ([1a8da6d](https://github.com/unraid/api/commit/1a8da6d92b96d3afa2a8b42446b36f1ee98b64a0))
|
||||
* **deps:** update all non-major dependencies ([#1520](https://github.com/unraid/api/issues/1520)) ([e2fa648](https://github.com/unraid/api/commit/e2fa648d1cf5a6cbe3e55c3f52c203d26bb4d526))
|
||||
* inject Tailwind CSS into client entry point ([#1537](https://github.com/unraid/api/issues/1537)) ([86b6c4f](https://github.com/unraid/api/commit/86b6c4f85b7b30bb4a13d57450a76bf4c28a3fff))
|
||||
* make settings grid responsive ([#1463](https://github.com/unraid/api/issues/1463)) ([9dfdb8d](https://github.com/unraid/api/commit/9dfdb8dce781fa662d6434ee432e4521f905ffa5))
|
||||
* **notifications:** gracefully handle & mask invalid notifications ([#1529](https://github.com/unraid/api/issues/1529)) ([05056e7](https://github.com/unraid/api/commit/05056e7ca1702eb7bf6c507950460b6b15bf7916))
|
||||
* truncate log files when they take up more than 5mb of space ([#1530](https://github.com/unraid/api/issues/1530)) ([0a18b38](https://github.com/unraid/api/commit/0a18b38008dd86a125cde7f684636d5dbb36f082))
|
||||
* use async for primary file read/writes ([#1531](https://github.com/unraid/api/issues/1531)) ([23b2b88](https://github.com/unraid/api/commit/23b2b8846158a27d1c9808bce0cc1506779c4dc3))
|
||||
|
||||
## [4.10.0](https://github.com/unraid/api/compare/v4.9.5...v4.10.0) (2025-07-15)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* trial extension allowed within 5 days of expiration ([#1490](https://github.com/unraid/api/issues/1490)) ([f34a33b](https://github.com/unraid/api/commit/f34a33bc9f1a7e135d453d9d31888789bfc3f878))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* delay `nginx:reload` file mod effect by 10 seconds ([#1512](https://github.com/unraid/api/issues/1512)) ([af33e99](https://github.com/unraid/api/commit/af33e999a0480a77e3e6b2aa833b17b38b835656))
|
||||
* **deps:** update all non-major dependencies ([#1489](https://github.com/unraid/api/issues/1489)) ([53b05eb](https://github.com/unraid/api/commit/53b05ebe5e2050cb0916fcd65e8d41370aee0624))
|
||||
* ensure no crash if emhttp state configs are missing ([#1514](https://github.com/unraid/api/issues/1514)) ([1a7d35d](https://github.com/unraid/api/commit/1a7d35d3f6972fd8aff58c17b2b0fb79725e660e))
|
||||
* **my.servers:** improve DNS resolution robustness for backup server ([#1518](https://github.com/unraid/api/issues/1518)) ([eecd9b1](https://github.com/unraid/api/commit/eecd9b1017a63651d1dc782feaa224111cdee8b6))
|
||||
* over-eager cloud query from web components ([#1506](https://github.com/unraid/api/issues/1506)) ([074370c](https://github.com/unraid/api/commit/074370c42cdecc4dbc58193ff518aa25735c56b3))
|
||||
* replace myservers.cfg reads in UpdateFlashBackup.php ([#1517](https://github.com/unraid/api/issues/1517)) ([441e180](https://github.com/unraid/api/commit/441e1805c108a6c1cd35ee093246b975a03f8474))
|
||||
* rm short-circuit in `rc.unraid-api` if plugin config dir is absent ([#1515](https://github.com/unraid/api/issues/1515)) ([29dcb7d](https://github.com/unraid/api/commit/29dcb7d0f088937cefc5158055f48680e86e5c36))
|
||||
|
||||
## [4.9.5](https://github.com/unraid/api/compare/v4.9.4...v4.9.5) (2025-07-10)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **connect:** rm eager restart on `ERROR_RETYING` connection status ([#1502](https://github.com/unraid/api/issues/1502)) ([dd759d9](https://github.com/unraid/api/commit/dd759d9f0f841b296f8083bc67c6cd3f7a69aa5b))
|
||||
|
||||
## [4.9.4](https://github.com/unraid/api/compare/v4.9.3...v4.9.4) (2025-07-09)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* backport `<unraid-modals>` upon plg install when necessary ([#1499](https://github.com/unraid/api/issues/1499)) ([33e0b1a](https://github.com/unraid/api/commit/33e0b1ab24bedb6a2c7b376ea73dbe65bc3044be))
|
||||
* DefaultPageLayout patch rollback omits legacy header logo ([#1497](https://github.com/unraid/api/issues/1497)) ([ea20d1e](https://github.com/unraid/api/commit/ea20d1e2116fcafa154090fee78b42ec5d9ba584))
|
||||
* event emitter setup for writing status ([#1496](https://github.com/unraid/api/issues/1496)) ([ca4e2db](https://github.com/unraid/api/commit/ca4e2db1f29126a1fa3784af563832edda64b0ca))
|
||||
|
||||
## [4.9.3](https://github.com/unraid/api/compare/v4.9.2...v4.9.3) (2025-07-09)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* duplicated header logo after api stops ([#1493](https://github.com/unraid/api/issues/1493)) ([4168f43](https://github.com/unraid/api/commit/4168f43e3ecd51479bec3aae585abbe6dcd3e416))
|
||||
|
||||
## [4.9.2](https://github.com/unraid/api/compare/v4.9.1...v4.9.2) (2025-07-09)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* invalid configs no longer crash API ([#1491](https://github.com/unraid/api/issues/1491)) ([6bf3f77](https://github.com/unraid/api/commit/6bf3f776380edeff5133517e6aca223556e30144))
|
||||
* invalid state for unraid plugin ([#1492](https://github.com/unraid/api/issues/1492)) ([39b8f45](https://github.com/unraid/api/commit/39b8f453da23793ef51f8e7f7196370aada8c5aa))
|
||||
* release note escaping ([5b6bcb6](https://github.com/unraid/api/commit/5b6bcb6043a5269bff4dc28714d787a5a3f07e22))
|
||||
|
||||
## [4.9.1](https://github.com/unraid/api/compare/v4.9.0...v4.9.1) (2025-07-08)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **HeaderOsVersion:** adjust top margin for header component ([#1485](https://github.com/unraid/api/issues/1485)) ([862b54d](https://github.com/unraid/api/commit/862b54de8cd793606f1d29e76c19d4a0e1ae172f))
|
||||
* sign out doesn't work ([#1486](https://github.com/unraid/api/issues/1486)) ([f3671c3](https://github.com/unraid/api/commit/f3671c3e0750b79be1f19655a07a0e9932289b3f))
|
||||
|
||||
## [4.9.0](https://github.com/unraid/api/compare/v4.8.0...v4.9.0) (2025-07-08)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add graphql resource for API plugins ([#1420](https://github.com/unraid/api/issues/1420)) ([642a220](https://github.com/unraid/api/commit/642a220c3a796829505d8449dc774968c9d5c222))
|
||||
* add management page for API keys ([#1408](https://github.com/unraid/api/issues/1408)) ([0788756](https://github.com/unraid/api/commit/0788756b918a8e99be51f34bf6f96bbe5b67395a))
|
||||
* add rclone ([#1362](https://github.com/unraid/api/issues/1362)) ([5517e75](https://github.com/unraid/api/commit/5517e7506b05c7bef5012bb9f8d2103e91061997))
|
||||
* API key management ([#1407](https://github.com/unraid/api/issues/1407)) ([d37dc3b](https://github.com/unraid/api/commit/d37dc3bce28bad1c893ae7eff96ca5ffd9177648))
|
||||
* api plugin management via CLI ([#1416](https://github.com/unraid/api/issues/1416)) ([3dcbfbe](https://github.com/unraid/api/commit/3dcbfbe48973b8047f0c6c560068808d86ac6970))
|
||||
* build out docker components ([#1427](https://github.com/unraid/api/issues/1427)) ([711cc9a](https://github.com/unraid/api/commit/711cc9ac926958bcf2996455b023ad265b041530))
|
||||
* docker and info resolver issues ([#1423](https://github.com/unraid/api/issues/1423)) ([9901039](https://github.com/unraid/api/commit/9901039a3863de06b520e23cb2573b610716c673))
|
||||
* fix shading in UPC to be less severe ([#1438](https://github.com/unraid/api/issues/1438)) ([b7c2407](https://github.com/unraid/api/commit/b7c240784052276fc60e064bd7d64dd6e801ae90))
|
||||
* info resolver cleanup ([#1425](https://github.com/unraid/api/issues/1425)) ([1b279bb](https://github.com/unraid/api/commit/1b279bbab3a51e7d032e7e3c9898feac8bfdbafa))
|
||||
* initial codeql setup ([#1390](https://github.com/unraid/api/issues/1390)) ([2ade7eb](https://github.com/unraid/api/commit/2ade7eb52792ef481aaf711dc07029629ea107d9))
|
||||
* initialize claude code in codebse ([#1418](https://github.com/unraid/api/issues/1418)) ([b6c4ee6](https://github.com/unraid/api/commit/b6c4ee6eb4b9ebb6d6e59a341e1f51b253578752))
|
||||
* move api key fetching to use api key service ([#1439](https://github.com/unraid/api/issues/1439)) ([86bea56](https://github.com/unraid/api/commit/86bea5627270a2a18c5b7db36dd59061ab61e753))
|
||||
* move to cron v4 ([#1428](https://github.com/unraid/api/issues/1428)) ([b8035c2](https://github.com/unraid/api/commit/b8035c207a6e387c7af3346593a872664f6c867b))
|
||||
* move to iframe for changelog ([#1388](https://github.com/unraid/api/issues/1388)) ([fcd6fbc](https://github.com/unraid/api/commit/fcd6fbcdd48e7f224b3bd8799a668d9e01967f0c))
|
||||
* native slackware package ([#1381](https://github.com/unraid/api/issues/1381)) ([4f63b4c](https://github.com/unraid/api/commit/4f63b4cf3bb9391785f07a38defe54ec39071caa))
|
||||
* send active unraid theme to docs ([#1400](https://github.com/unraid/api/issues/1400)) ([f71943b](https://github.com/unraid/api/commit/f71943b62b30119e17766e56534962630f52a591))
|
||||
* slightly better watch mode ([#1398](https://github.com/unraid/api/issues/1398)) ([881f1e0](https://github.com/unraid/api/commit/881f1e09607d1e4a8606f8d048636ba09d8fcac1))
|
||||
* upgrade nuxt-custom-elements ([#1461](https://github.com/unraid/api/issues/1461)) ([345e83b](https://github.com/unraid/api/commit/345e83bfb0904381d784fc77b3dcd3ad7e53d898))
|
||||
* use bigint instead of long ([#1403](https://github.com/unraid/api/issues/1403)) ([574d572](https://github.com/unraid/api/commit/574d572d6567c652057b29776694e86267316ca7))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* activation indicator removed ([5edfd82](https://github.com/unraid/api/commit/5edfd823b862cfc1f864565021f12334fe9317c6))
|
||||
* alignment of settings on ManagementAccess settings page ([#1421](https://github.com/unraid/api/issues/1421)) ([70c790f](https://github.com/unraid/api/commit/70c790ff89075a785d7f0623bbf3c34a3806bbdc))
|
||||
* allow rclone to fail to initialize ([#1453](https://github.com/unraid/api/issues/1453)) ([7c6f02a](https://github.com/unraid/api/commit/7c6f02a5cb474fb285db294ec6f80d1c2c57e142))
|
||||
* always download 7.1 versioned files for patching ([edc0d15](https://github.com/unraid/api/commit/edc0d1578b89c3b3e56e637de07137e069656fa8))
|
||||
* api `pnpm type-check` ([#1442](https://github.com/unraid/api/issues/1442)) ([3122bdb](https://github.com/unraid/api/commit/3122bdb953eec58469fd9cf6f468e75621781040))
|
||||
* **api:** connect config `email` validation ([#1454](https://github.com/unraid/api/issues/1454)) ([b9a1b9b](https://github.com/unraid/api/commit/b9a1b9b08746b6d4cb2128d029a3dab7cdd47677))
|
||||
* backport unraid/webgui[#2269](https://github.com/unraid/api/issues/2269) rc.nginx update ([#1436](https://github.com/unraid/api/issues/1436)) ([a7ef06e](https://github.com/unraid/api/commit/a7ef06ea252545cef084e21cea741a8ec866e7cc))
|
||||
* bigint ([e54d27a](https://github.com/unraid/api/commit/e54d27aede1b1e784971468777c5e65cde66f2ac))
|
||||
* config migration from `myservers.cfg` ([#1440](https://github.com/unraid/api/issues/1440)) ([c4c9984](https://github.com/unraid/api/commit/c4c99843c7104414120bffc5dd5ed78ab6c8ba02))
|
||||
* **connect:** fatal race-condition in websocket disposal ([#1462](https://github.com/unraid/api/issues/1462)) ([0ec0de9](https://github.com/unraid/api/commit/0ec0de982f017b61a145c7a4176718b484834f41))
|
||||
* **connect:** mothership connection ([#1464](https://github.com/unraid/api/issues/1464)) ([7be8bc8](https://github.com/unraid/api/commit/7be8bc84d3831f9cea7ff62d0964612ad366a976))
|
||||
* console hidden ([9b85e00](https://github.com/unraid/api/commit/9b85e009b833706294a841a54498e45a8e0204ed))
|
||||
* debounce is too long ([#1426](https://github.com/unraid/api/issues/1426)) ([f12d231](https://github.com/unraid/api/commit/f12d231e6376d0f253cee67b7ed690c432c63ec5))
|
||||
* delete legacy connect keys and ensure description ([22fe91c](https://github.com/unraid/api/commit/22fe91cd561e88aa24e8f8cfa5a6143e7644e4e0))
|
||||
* **deps:** pin dependencies ([#1465](https://github.com/unraid/api/issues/1465)) ([ba75a40](https://github.com/unraid/api/commit/ba75a409a4d3e820308b78fd5a5380021d3757b0))
|
||||
* **deps:** pin dependencies ([#1470](https://github.com/unraid/api/issues/1470)) ([412b329](https://github.com/unraid/api/commit/412b32996d9c8352c25309cc0d549a57468d0fb5))
|
||||
* **deps:** storybook v9 ([#1476](https://github.com/unraid/api/issues/1476)) ([45bb49b](https://github.com/unraid/api/commit/45bb49bcd60a9753be492203111e489fd37c1a5f))
|
||||
* **deps:** update all non-major dependencies ([#1366](https://github.com/unraid/api/issues/1366)) ([291ee47](https://github.com/unraid/api/commit/291ee475fb9ef44f6da7b76a9eb11b7dd29a5d13))
|
||||
* **deps:** update all non-major dependencies ([#1379](https://github.com/unraid/api/issues/1379)) ([8f70326](https://github.com/unraid/api/commit/8f70326d0fe3e4c3bcd3e8e4e6566766f1c05eb7))
|
||||
* **deps:** update all non-major dependencies ([#1389](https://github.com/unraid/api/issues/1389)) ([cb43f95](https://github.com/unraid/api/commit/cb43f95233590888a8e20a130e62cadc176c6793))
|
||||
* **deps:** update all non-major dependencies ([#1399](https://github.com/unraid/api/issues/1399)) ([68df344](https://github.com/unraid/api/commit/68df344a4b412227cffa96867f086177b251f028))
|
||||
* **deps:** update dependency @types/diff to v8 ([#1393](https://github.com/unraid/api/issues/1393)) ([00da27d](https://github.com/unraid/api/commit/00da27d04f2ee2ca8b8b9cdcc6ea3c490c02a3a4))
|
||||
* **deps:** update dependency cache-manager to v7 ([#1413](https://github.com/unraid/api/issues/1413)) ([9492c2a](https://github.com/unraid/api/commit/9492c2ae6a0086d14e73d280c55746206b73a7b0))
|
||||
* **deps:** update dependency commander to v14 ([#1394](https://github.com/unraid/api/issues/1394)) ([106ea09](https://github.com/unraid/api/commit/106ea093996f2d0c71c1511bc009ecc9a6be91ec))
|
||||
* **deps:** update dependency diff to v8 ([#1386](https://github.com/unraid/api/issues/1386)) ([e580f64](https://github.com/unraid/api/commit/e580f646a52b8bda605132cf44ec58137e08dd42))
|
||||
* **deps:** update dependency dotenv to v17 ([#1474](https://github.com/unraid/api/issues/1474)) ([d613bfa](https://github.com/unraid/api/commit/d613bfa0410e7ef8451fc8ea20e57a7db67f7994))
|
||||
* **deps:** update dependency lucide-vue-next to ^0.509.0 ([#1383](https://github.com/unraid/api/issues/1383)) ([469333a](https://github.com/unraid/api/commit/469333acd4a0cbeecc9e9cbadb2884289d83aee3))
|
||||
* **deps:** update dependency marked to v16 ([#1444](https://github.com/unraid/api/issues/1444)) ([453a5b2](https://github.com/unraid/api/commit/453a5b2c9591f755ce07548a9996d7a6cf0925c4))
|
||||
* **deps:** update dependency shadcn-vue to v2 ([#1302](https://github.com/unraid/api/issues/1302)) ([26ecf77](https://github.com/unraid/api/commit/26ecf779e675d0bc533d61e045325ab062effcbf))
|
||||
* **deps:** update dependency vue-sonner to v2 ([#1401](https://github.com/unraid/api/issues/1401)) ([53ca414](https://github.com/unraid/api/commit/53ca41404f13c057c340dcf9010af72c3365e499))
|
||||
* disable file changes on Unraid 7.2 ([#1382](https://github.com/unraid/api/issues/1382)) ([02de89d](https://github.com/unraid/api/commit/02de89d1309f67e4b6d4f8de5f66815ee4d2464c))
|
||||
* do not start API with doinst.sh ([7d88b33](https://github.com/unraid/api/commit/7d88b3393cbd8ab1e93a86dfa1b7b74cc97255cc))
|
||||
* do not uninstall fully on 7.2 ([#1484](https://github.com/unraid/api/issues/1484)) ([2263881](https://github.com/unraid/api/commit/22638811a9fdb524420b1347ac49cfaa51bbecb5))
|
||||
* drop console with terser ([a87d455](https://github.com/unraid/api/commit/a87d455bace04aab9d7fa0e63cb61d26ef9b3b72))
|
||||
* error logs from `cloud` query when connect is not installed ([#1450](https://github.com/unraid/api/issues/1450)) ([719f460](https://github.com/unraid/api/commit/719f460016d769255582742d7d71ca97d132022b))
|
||||
* flash backup integration with Unraid Connect config ([#1448](https://github.com/unraid/api/issues/1448)) ([038c582](https://github.com/unraid/api/commit/038c582aed5f5efaea3583372778b9baa318e1ea))
|
||||
* header padding regression ([#1477](https://github.com/unraid/api/issues/1477)) ([e791cc6](https://github.com/unraid/api/commit/e791cc680de9c40378043348ddca70902da6d250))
|
||||
* incorrect state merging in redux store ([#1437](https://github.com/unraid/api/issues/1437)) ([17b7428](https://github.com/unraid/api/commit/17b74287796e6feb75466033e279dc3bcf57f1e6))
|
||||
* lanip copy button not present ([#1459](https://github.com/unraid/api/issues/1459)) ([a280786](https://github.com/unraid/api/commit/a2807864acef742e454d87bb093ee91806e527e5))
|
||||
* move to bigint scalar ([b625227](https://github.com/unraid/api/commit/b625227913e80e4731a13b54b525ec7385918c51))
|
||||
* node_modules dir removed on plugin update ([#1406](https://github.com/unraid/api/issues/1406)) ([7b005cb](https://github.com/unraid/api/commit/7b005cbbf682a1336641f5fc85022e9d651569d0))
|
||||
* omit Connect actions in UPC when plugin is not installed ([#1417](https://github.com/unraid/api/issues/1417)) ([8c8a527](https://github.com/unraid/api/commit/8c8a5276b49833c08bca133e374e1e66273b41aa))
|
||||
* parsing of `ssoEnabled` in state.php ([#1455](https://github.com/unraid/api/issues/1455)) ([f542c8e](https://github.com/unraid/api/commit/f542c8e0bd9596d9d3abf75b58b97d95fb033215))
|
||||
* pin ranges ([#1460](https://github.com/unraid/api/issues/1460)) ([f88400e](https://github.com/unraid/api/commit/f88400eea820ac80c867fdb63cd503ed91493146))
|
||||
* pr plugin promotion workflow ([#1456](https://github.com/unraid/api/issues/1456)) ([13bd9bb](https://github.com/unraid/api/commit/13bd9bb5670bb96b158068114d62572d88c7cae9))
|
||||
* proper fallback if missing paths config modules ([7067e9e](https://github.com/unraid/api/commit/7067e9e3dd3966309013b52c90090cc82de4e4fb))
|
||||
* rc.unraid-api now cleans up older dependencies ([#1404](https://github.com/unraid/api/issues/1404)) ([83076bb](https://github.com/unraid/api/commit/83076bb94088095de8b1a332a50bbef91421f0c1))
|
||||
* remote access lifecycle during boot & shutdown ([#1422](https://github.com/unraid/api/issues/1422)) ([7bc583b](https://github.com/unraid/api/commit/7bc583b18621c8140232772ca36c6d9b8d8a9cd7))
|
||||
* sign out correctly on error ([#1452](https://github.com/unraid/api/issues/1452)) ([d08fc94](https://github.com/unraid/api/commit/d08fc94afb94e386907da44402ee5a24cfb3d00a))
|
||||
* simplify usb listing ([#1402](https://github.com/unraid/api/issues/1402)) ([5355115](https://github.com/unraid/api/commit/5355115af2f4122af9afa3f63ed8f830b33cbf5c))
|
||||
* theme issues when sent from graph ([#1424](https://github.com/unraid/api/issues/1424)) ([75ad838](https://github.com/unraid/api/commit/75ad8381bd4f4045ab1d3aa84e08ecddfba27617))
|
||||
* **ui:** notifications positioning regression ([#1445](https://github.com/unraid/api/issues/1445)) ([f73e5e0](https://github.com/unraid/api/commit/f73e5e0058fcc3bedebfbe7380ffcb44aea981b8))
|
||||
* use some instead of every for connect detection ([9ce2fee](https://github.com/unraid/api/commit/9ce2fee380c4db1395f5d4df7f16ae6c57d1a748))
|
||||
|
||||
|
||||
### Reverts
|
||||
|
||||
* revert package.json dependency updates from commit 711cc9a for api and packages/* ([94420e4](https://github.com/unraid/api/commit/94420e4d45735b8def3915b5789c15c1c3121f1e))
|
||||
|
||||
## [4.8.0](https://github.com/unraid/api/compare/v4.7.0...v4.8.0) (2025-05-01)
|
||||
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
###########################################################
|
||||
# Development/Build Image
|
||||
###########################################################
|
||||
FROM node:22-bookworm-slim AS development
|
||||
FROM node:22.17.1-bookworm-slim AS development
|
||||
|
||||
# Install build tools and dependencies
|
||||
RUN apt-get update -y && apt-get install -y \
|
||||
|
||||
@@ -27,19 +27,13 @@ const config: CodegenConfig = {
|
||||
},
|
||||
},
|
||||
generates: {
|
||||
// Generate Types for Mothership GraphQL Client
|
||||
'src/graphql/generated/client/': {
|
||||
documents: './src/graphql/mothership/*.ts',
|
||||
schema: {
|
||||
[process.env.MOTHERSHIP_GRAPHQL_LINK as string]: {
|
||||
headers: {
|
||||
origin: 'https://forums.unraid.net',
|
||||
},
|
||||
},
|
||||
},
|
||||
// Generate Types for CLI Internal GraphQL Queries
|
||||
'src/unraid-api/cli/generated/': {
|
||||
documents: ['src/unraid-api/cli/queries/**/*.ts', 'src/unraid-api/cli/mutations/**/*.ts'],
|
||||
schema: './generated-schema.graphql',
|
||||
preset: 'client',
|
||||
presetConfig: {
|
||||
gqlTagName: 'graphql',
|
||||
gqlTagName: 'gql',
|
||||
},
|
||||
config: {
|
||||
useTypeImports: true,
|
||||
@@ -47,21 +41,6 @@ const config: CodegenConfig = {
|
||||
},
|
||||
plugins: [{ add: { content: '/* eslint-disable */' } }],
|
||||
},
|
||||
'src/graphql/generated/client/validators.ts': {
|
||||
schema: {
|
||||
[process.env.MOTHERSHIP_GRAPHQL_LINK as string]: {
|
||||
headers: {
|
||||
origin: 'https://forums.unraid.net',
|
||||
},
|
||||
},
|
||||
},
|
||||
plugins: ['typescript-validation-schema', { add: { content: '/* eslint-disable */' } }],
|
||||
config: {
|
||||
importFrom: '@app/graphql/generated/client/graphql.js',
|
||||
strictScalars: false,
|
||||
schema: 'zod',
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
[api]
|
||||
version="4.7.0"
|
||||
version="4.4.1"
|
||||
extraOrigins="https://google.com,https://test.com"
|
||||
[local]
|
||||
sandbox="yes"
|
||||
|
||||
7
api/dev/configs/api.json
Normal file
7
api/dev/configs/api.json
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"version": "4.11.0",
|
||||
"extraOrigins": [],
|
||||
"sandbox": false,
|
||||
"ssoSubIds": [],
|
||||
"plugins": []
|
||||
}
|
||||
@@ -1,3 +1,12 @@
|
||||
{
|
||||
"demo": "hello.unraider"
|
||||
"wanaccess": true,
|
||||
"wanport": 8443,
|
||||
"upnpEnabled": false,
|
||||
"apikey": "_______________________BIG_API_KEY_HERE_________________________",
|
||||
"localApiKey": "_______________________LOCAL_API_KEY_HERE_________________________",
|
||||
"email": "test@example.com",
|
||||
"username": "zspearmint",
|
||||
"avatar": "https://via.placeholder.com/200",
|
||||
"regWizTime": "1611175408732_0951-1653-3509-FBA155FA23C0",
|
||||
"dynamicRemoteAccessType": "DISABLED"
|
||||
}
|
||||
11
api/dev/keys/fc91da7b-0284-46f4-9018-55aa9759fba9.json
Normal file
11
api/dev/keys/fc91da7b-0284-46f4-9018-55aa9759fba9.json
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"createdAt": "2025-07-23T17:34:06.301Z",
|
||||
"description": "Internal admin API key used by CLI commands for system operations",
|
||||
"id": "fc91da7b-0284-46f4-9018-55aa9759fba9",
|
||||
"key": "_______SUPER_SECRET_KEY_______",
|
||||
"name": "CliInternal",
|
||||
"permissions": [],
|
||||
"roles": [
|
||||
"ADMIN"
|
||||
]
|
||||
}
|
||||
1
api/dev/log/.gitkeep
Normal file
1
api/dev/log/.gitkeep
Normal file
@@ -0,0 +1 @@
|
||||
# custom log directory for tests & development
|
||||
@@ -1,5 +1,5 @@
|
||||
[api]
|
||||
version="4.7.0"
|
||||
version="4.4.1"
|
||||
extraOrigins="https://google.com,https://test.com"
|
||||
[local]
|
||||
sandbox="yes"
|
||||
@@ -20,5 +20,5 @@ dynamicRemoteAccessType="DISABLED"
|
||||
ssoSubIds=""
|
||||
allowedOrigins="/var/run/unraid-notifications.sock, /var/run/unraid-php.sock, /var/run/unraid-cli.sock, http://localhost:8080, https://localhost:4443, https://tower.local:4443, https://192.168.1.150:4443, https://tower:4443, https://192-168-1-150.thisisfourtyrandomcharacters012345678900.myunraid.net:4443, https://85-121-123-122.thisisfourtyrandomcharacters012345678900.myunraid.net:8443, https://10-252-0-1.hash.myunraid.net:4443, https://10-252-1-1.hash.myunraid.net:4443, https://10-253-3-1.hash.myunraid.net:4443, https://10-253-4-1.hash.myunraid.net:4443, https://10-253-5-1.hash.myunraid.net:4443, https://10-100-0-1.hash.myunraid.net:4443, https://10-100-0-2.hash.myunraid.net:4443, https://10-123-1-2.hash.myunraid.net:4443, https://221-123-121-112.hash.myunraid.net:4443, https://google.com, https://test.com, https://connect.myunraid.net, https://connect-staging.myunraid.net, https://dev-my.myunraid.net:4000, https://studio.apollographql.com"
|
||||
[connectionStatus]
|
||||
minigraph="ERROR_RETRYING"
|
||||
minigraph="PRE_INIT"
|
||||
upnpStatus=""
|
||||
|
||||
@@ -10,22 +10,115 @@ where the API provides dependencies for the plugin while the plugin provides fun
|
||||
### Adding a local workspace package as an API plugin
|
||||
|
||||
The challenge with local workspace plugins is that they aren't available via npm during production.
|
||||
To solve this, we vendor them inside `dist/plugins`. To prevent the build from breaking, however,
|
||||
you should mark the workspace dependency as optional. For example:
|
||||
To solve this, we vendor them during the build process. Here's the complete process:
|
||||
|
||||
#### 1. Configure the build system
|
||||
|
||||
Add your workspace package to the vendoring configuration in `api/scripts/build.ts`:
|
||||
|
||||
```typescript
|
||||
const WORKSPACE_PACKAGES_TO_VENDOR = {
|
||||
'@unraid/shared': 'packages/unraid-shared',
|
||||
'unraid-api-plugin-connect': 'packages/unraid-api-plugin-connect',
|
||||
'your-plugin-name': 'packages/your-plugin-path', // Add your plugin here
|
||||
} as const;
|
||||
```
|
||||
|
||||
#### 2. Configure Vite
|
||||
|
||||
Add your workspace package to the Vite configuration in `api/vite.config.ts`:
|
||||
|
||||
```typescript
|
||||
const workspaceDependencies = {
|
||||
'@unraid/shared': 'packages/unraid-shared',
|
||||
'unraid-api-plugin-connect': 'packages/unraid-api-plugin-connect',
|
||||
'your-plugin-name': 'packages/your-plugin-path', // Add your plugin here
|
||||
};
|
||||
```
|
||||
|
||||
This ensures the package is:
|
||||
- Excluded from Vite's optimization during development
|
||||
- Marked as external during the build process
|
||||
- Properly handled in SSR mode
|
||||
|
||||
#### 3. Configure the API package.json
|
||||
|
||||
Add your workspace package as a peer dependency in `api/package.json`:
|
||||
|
||||
```json
|
||||
{
|
||||
"peerDependencies": {
|
||||
"unraid-api-plugin-connect": "workspace:*"
|
||||
"unraid-api-plugin-connect": "workspace:*",
|
||||
"your-plugin-name": "workspace:*"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"unraid-api-plugin-connect": {
|
||||
"optional": true
|
||||
},
|
||||
"your-plugin-name": {
|
||||
"optional": true
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
By marking the workspace dependency "optional", npm will not attempt to install it.
|
||||
Thus, even though the "workspace:*" identifier will be invalid during build-time and run-time,
|
||||
it will not cause problems.
|
||||
By marking the workspace dependency "optional", npm will not attempt to install it during development.
|
||||
The "workspace:*" identifier will be invalid during build-time and run-time, but won't cause problems
|
||||
because the package gets vendored instead.
|
||||
|
||||
#### 4. Plugin package setup
|
||||
|
||||
Your workspace plugin package should:
|
||||
|
||||
1. **Export types and main entry**: Set up proper `main`, `types`, and `exports` fields:
|
||||
```json
|
||||
{
|
||||
"name": "your-plugin-name",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"type": "module",
|
||||
"exports": {
|
||||
".": {
|
||||
"types": "./dist/index.d.ts",
|
||||
"import": "./dist/index.js"
|
||||
}
|
||||
},
|
||||
"files": ["dist"]
|
||||
}
|
||||
```
|
||||
|
||||
2. **Use peer dependencies**: Declare shared dependencies as peer dependencies to avoid duplication:
|
||||
```json
|
||||
{
|
||||
"peerDependencies": {
|
||||
"@nestjs/common": "^11.0.11",
|
||||
"@nestjs/core": "^11.0.11",
|
||||
"graphql": "^16.9.0"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
3. **Include build script**: Add a build script that compiles TypeScript:
|
||||
```json
|
||||
{
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"prepare": "npm run build"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
#### 5. Build process
|
||||
|
||||
During production builds:
|
||||
|
||||
1. The build script (`api/scripts/build.ts`) will automatically pack and install your workspace package as a tarball
|
||||
2. This happens after `npm install --omit=dev` in the pack directory
|
||||
3. The vendored package becomes a regular node_modules dependency in the final build
|
||||
|
||||
#### 6. Development vs Production
|
||||
|
||||
- **Development**: Vite resolves workspace packages directly from their source
|
||||
- **Production**: Packages are vendored as tarballs in `node_modules`
|
||||
|
||||
This approach ensures that workspace plugins work seamlessly in both development and production environments.
|
||||
|
||||
@@ -62,10 +62,17 @@ Switch between production and staging environments.
|
||||
### Developer Mode
|
||||
|
||||
```bash
|
||||
unraid-api developer
|
||||
unraid-api developer # Interactive prompt for tools
|
||||
unraid-api developer --sandbox true # Enable GraphQL sandbox
|
||||
unraid-api developer --sandbox false # Disable GraphQL sandbox
|
||||
unraid-api developer --enable-modal # Enable modal testing tool
|
||||
unraid-api developer --disable-modal # Disable modal testing tool
|
||||
```
|
||||
|
||||
Configure developer features for the API (e.g., GraphQL sandbox).
|
||||
Configure developer features for the API:
|
||||
|
||||
- **GraphQL Sandbox**: Enable/disable Apollo GraphQL sandbox at `/graphql`
|
||||
- **Modal Testing Tool**: Enable/disable UI modal testing in the Unraid menu
|
||||
|
||||
## API Key Management
|
||||
|
||||
|
||||
@@ -4,13 +4,19 @@ The Unraid API provides a GraphQL interface that allows you to interact with you
|
||||
|
||||
## Enabling the GraphQL Sandbox
|
||||
|
||||
1. First, enable developer mode using the CLI:
|
||||
1. Enable developer mode using the CLI:
|
||||
|
||||
```bash
|
||||
unraid-api developer --sandbox true
|
||||
```
|
||||
|
||||
Or use the interactive mode:
|
||||
|
||||
```bash
|
||||
unraid-api developer
|
||||
```
|
||||
|
||||
2. Follow the prompts to enable the sandbox. This will allow you to access the Apollo Sandbox interface.
|
||||
2. Once enabled, you can access the Apollo Sandbox interface
|
||||
|
||||
3. Access the GraphQL playground by navigating to:
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -14,46 +14,6 @@ directive @usePermissions(
|
||||
possession: AuthPossession
|
||||
) on FIELD_DEFINITION
|
||||
|
||||
type ApiKeyResponse {
|
||||
valid: Boolean!
|
||||
error: String
|
||||
}
|
||||
|
||||
type MinigraphqlResponse {
|
||||
status: MinigraphStatus!
|
||||
timeout: Int
|
||||
error: String
|
||||
}
|
||||
|
||||
enum MinigraphStatus {
|
||||
PRE_INIT
|
||||
CONNECTING
|
||||
CONNECTED
|
||||
PING_FAILURE
|
||||
ERROR_RETRYING
|
||||
}
|
||||
|
||||
type CloudResponse {
|
||||
status: String!
|
||||
ip: String
|
||||
error: String
|
||||
}
|
||||
|
||||
type RelayResponse {
|
||||
status: String!
|
||||
timeout: String
|
||||
error: String
|
||||
}
|
||||
|
||||
type Cloud {
|
||||
error: String
|
||||
apiKey: ApiKeyResponse!
|
||||
relay: RelayResponse
|
||||
minigraphql: MinigraphqlResponse!
|
||||
cloud: CloudResponse!
|
||||
allowedOrigins: [String!]!
|
||||
}
|
||||
|
||||
type Capacity {
|
||||
"""Free capacity"""
|
||||
free: String!
|
||||
@@ -84,7 +44,7 @@ type ArrayDisk implements Node {
|
||||
device: String
|
||||
|
||||
"""(KB) Disk Size total"""
|
||||
size: Long
|
||||
size: BigInt
|
||||
status: ArrayDiskStatus
|
||||
|
||||
"""Is the disk a HDD or SSD."""
|
||||
@@ -96,26 +56,26 @@ type ArrayDisk implements Node {
|
||||
"""
|
||||
Count of I/O read requests sent to the device I/O drivers. These statistics may be cleared at any time.
|
||||
"""
|
||||
numReads: Long
|
||||
numReads: BigInt
|
||||
|
||||
"""
|
||||
Count of I/O writes requests sent to the device I/O drivers. These statistics may be cleared at any time.
|
||||
"""
|
||||
numWrites: Long
|
||||
numWrites: BigInt
|
||||
|
||||
"""
|
||||
Number of unrecoverable errors reported by the device I/O drivers. Missing data due to unrecoverable array read errors is filled in on-the-fly using parity reconstruct (and we attempt to write this data back to the sector(s) which failed). Any unrecoverable write error results in disabling the disk.
|
||||
"""
|
||||
numErrors: Long
|
||||
numErrors: BigInt
|
||||
|
||||
"""(KB) Total Size of the FS (Not present on Parity type drive)"""
|
||||
fsSize: Long
|
||||
fsSize: BigInt
|
||||
|
||||
"""(KB) Free Size on the FS (Not present on Parity type drive)"""
|
||||
fsFree: Long
|
||||
fsFree: BigInt
|
||||
|
||||
"""(KB) Used Size on the FS (Not present on Parity type drive)"""
|
||||
fsUsed: Long
|
||||
fsUsed: BigInt
|
||||
exportable: Boolean
|
||||
|
||||
"""Type of Disk - used to differentiate Cache / Flash / Array / Parity"""
|
||||
@@ -145,8 +105,10 @@ interface Node {
|
||||
id: PrefixedID!
|
||||
}
|
||||
|
||||
"""The `Long` scalar type represents 52-bit integers"""
|
||||
scalar Long
|
||||
"""
|
||||
The `BigInt` scalar type represents non-fractional signed whole numeric values.
|
||||
"""
|
||||
scalar BigInt
|
||||
|
||||
enum ArrayDiskStatus {
|
||||
DISK_NP
|
||||
@@ -222,13 +184,13 @@ type Share implements Node {
|
||||
name: String
|
||||
|
||||
"""(KB) Free space"""
|
||||
free: Long
|
||||
free: BigInt
|
||||
|
||||
"""(KB) Used Size"""
|
||||
used: Long
|
||||
used: BigInt
|
||||
|
||||
"""(KB) Total size"""
|
||||
size: Long
|
||||
size: BigInt
|
||||
|
||||
"""Disks that are included in this share"""
|
||||
include: [String!]
|
||||
@@ -264,147 +226,6 @@ type Share implements Node {
|
||||
luksStatus: String
|
||||
}
|
||||
|
||||
type AccessUrl {
|
||||
type: URL_TYPE!
|
||||
name: String
|
||||
ipv4: URL
|
||||
ipv6: URL
|
||||
}
|
||||
|
||||
enum URL_TYPE {
|
||||
LAN
|
||||
WIREGUARD
|
||||
WAN
|
||||
MDNS
|
||||
OTHER
|
||||
DEFAULT
|
||||
}
|
||||
|
||||
"""
|
||||
A field whose value conforms to the standard URL format as specified in RFC3986: https://www.ietf.org/rfc/rfc3986.txt.
|
||||
"""
|
||||
scalar URL
|
||||
|
||||
type RemoteAccess {
|
||||
"""The type of WAN access used for Remote Access"""
|
||||
accessType: WAN_ACCESS_TYPE!
|
||||
|
||||
"""The type of port forwarding used for Remote Access"""
|
||||
forwardType: WAN_FORWARD_TYPE
|
||||
|
||||
"""The port used for Remote Access"""
|
||||
port: Int
|
||||
}
|
||||
|
||||
enum WAN_ACCESS_TYPE {
|
||||
DYNAMIC
|
||||
ALWAYS
|
||||
DISABLED
|
||||
}
|
||||
|
||||
enum WAN_FORWARD_TYPE {
|
||||
UPNP
|
||||
STATIC
|
||||
}
|
||||
|
||||
type DynamicRemoteAccessStatus {
|
||||
"""The type of dynamic remote access that is enabled"""
|
||||
enabledType: DynamicRemoteAccessType!
|
||||
|
||||
"""The type of dynamic remote access that is currently running"""
|
||||
runningType: DynamicRemoteAccessType!
|
||||
|
||||
"""Any error message associated with the dynamic remote access"""
|
||||
error: String
|
||||
}
|
||||
|
||||
enum DynamicRemoteAccessType {
|
||||
STATIC
|
||||
UPNP
|
||||
DISABLED
|
||||
}
|
||||
|
||||
type ConnectSettingsValues {
|
||||
"""
|
||||
If true, the GraphQL sandbox is enabled and available at /graphql. If false, the GraphQL sandbox is disabled and only the production API will be available.
|
||||
"""
|
||||
sandbox: Boolean!
|
||||
|
||||
"""A list of origins allowed to interact with the API"""
|
||||
extraOrigins: [String!]!
|
||||
|
||||
"""The type of WAN access used for Remote Access"""
|
||||
accessType: WAN_ACCESS_TYPE!
|
||||
|
||||
"""The type of port forwarding used for Remote Access"""
|
||||
forwardType: WAN_FORWARD_TYPE
|
||||
|
||||
"""The port used for Remote Access"""
|
||||
port: Int
|
||||
|
||||
"""A list of Unique Unraid Account ID's"""
|
||||
ssoUserIds: [String!]!
|
||||
}
|
||||
|
||||
type ConnectSettings implements Node {
|
||||
id: PrefixedID!
|
||||
|
||||
"""The data schema for the Connect settings"""
|
||||
dataSchema: JSON!
|
||||
|
||||
"""The UI schema for the Connect settings"""
|
||||
uiSchema: JSON!
|
||||
|
||||
"""The values for the Connect settings"""
|
||||
values: ConnectSettingsValues!
|
||||
}
|
||||
|
||||
"""
|
||||
The `JSON` scalar type represents JSON values as specified by [ECMA-404](http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf).
|
||||
"""
|
||||
scalar JSON @specifiedBy(url: "http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf")
|
||||
|
||||
type Connect implements Node {
|
||||
id: PrefixedID!
|
||||
|
||||
"""The status of dynamic remote access"""
|
||||
dynamicRemoteAccess: DynamicRemoteAccessStatus!
|
||||
|
||||
"""The settings for the Connect instance"""
|
||||
settings: ConnectSettings!
|
||||
}
|
||||
|
||||
type Network implements Node {
|
||||
id: PrefixedID!
|
||||
accessUrls: [AccessUrl!]
|
||||
}
|
||||
|
||||
type ProfileModel implements Node {
|
||||
id: PrefixedID!
|
||||
username: String!
|
||||
url: String!
|
||||
avatar: String!
|
||||
}
|
||||
|
||||
type Server implements Node {
|
||||
id: PrefixedID!
|
||||
owner: ProfileModel!
|
||||
guid: String!
|
||||
apikey: String!
|
||||
name: String!
|
||||
status: ServerStatus!
|
||||
wanip: String!
|
||||
lanip: String!
|
||||
localurl: String!
|
||||
remoteurl: String!
|
||||
}
|
||||
|
||||
enum ServerStatus {
|
||||
ONLINE
|
||||
OFFLINE
|
||||
NEVER_CONNECTED
|
||||
}
|
||||
|
||||
type DiskPartition {
|
||||
"""The name of the partition"""
|
||||
name: String!
|
||||
@@ -796,6 +617,7 @@ type ApiKey implements Node {
|
||||
"""Available roles for API keys and users"""
|
||||
enum Role {
|
||||
ADMIN
|
||||
USER
|
||||
CONNECT
|
||||
GUEST
|
||||
}
|
||||
@@ -810,6 +632,46 @@ type ApiKeyWithSecret implements Node {
|
||||
key: String!
|
||||
}
|
||||
|
||||
type RCloneDrive {
|
||||
"""Provider name"""
|
||||
name: String!
|
||||
|
||||
"""Provider options and configuration schema"""
|
||||
options: JSON!
|
||||
}
|
||||
|
||||
"""
|
||||
The `JSON` scalar type represents JSON values as specified by [ECMA-404](http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf).
|
||||
"""
|
||||
scalar JSON @specifiedBy(url: "http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf")
|
||||
|
||||
type RCloneBackupConfigForm {
|
||||
id: ID!
|
||||
dataSchema: JSON!
|
||||
uiSchema: JSON!
|
||||
}
|
||||
|
||||
type RCloneBackupSettings {
|
||||
configForm(formOptions: RCloneConfigFormInput): RCloneBackupConfigForm!
|
||||
drives: [RCloneDrive!]!
|
||||
remotes: [RCloneRemote!]!
|
||||
}
|
||||
|
||||
input RCloneConfigFormInput {
|
||||
providerType: String
|
||||
showAdvanced: Boolean = false
|
||||
parameters: JSON
|
||||
}
|
||||
|
||||
type RCloneRemote {
|
||||
name: String!
|
||||
type: String!
|
||||
parameters: JSON!
|
||||
|
||||
"""Complete remote configuration"""
|
||||
config: JSON!
|
||||
}
|
||||
|
||||
type ArrayMutations {
|
||||
"""Set array state"""
|
||||
setState(input: ArrayStateInput!): UnraidArray!
|
||||
@@ -881,6 +743,63 @@ type VmMutations {
|
||||
reset(id: PrefixedID!): Boolean!
|
||||
}
|
||||
|
||||
"""API Key related mutations"""
|
||||
type ApiKeyMutations {
|
||||
"""Create an API key"""
|
||||
create(input: CreateApiKeyInput!): ApiKeyWithSecret!
|
||||
|
||||
"""Add a role to an API key"""
|
||||
addRole(input: AddRoleForApiKeyInput!): Boolean!
|
||||
|
||||
"""Remove a role from an API key"""
|
||||
removeRole(input: RemoveRoleFromApiKeyInput!): Boolean!
|
||||
|
||||
"""Delete one or more API keys"""
|
||||
delete(input: DeleteApiKeyInput!): Boolean!
|
||||
|
||||
"""Update an API key"""
|
||||
update(input: UpdateApiKeyInput!): ApiKeyWithSecret!
|
||||
}
|
||||
|
||||
input CreateApiKeyInput {
|
||||
name: String!
|
||||
description: String
|
||||
roles: [Role!]
|
||||
permissions: [AddPermissionInput!]
|
||||
|
||||
"""
|
||||
This will replace the existing key if one already exists with the same name, otherwise returns the existing key
|
||||
"""
|
||||
overwrite: Boolean
|
||||
}
|
||||
|
||||
input AddPermissionInput {
|
||||
resource: Resource!
|
||||
actions: [String!]!
|
||||
}
|
||||
|
||||
input AddRoleForApiKeyInput {
|
||||
apiKeyId: PrefixedID!
|
||||
role: Role!
|
||||
}
|
||||
|
||||
input RemoveRoleFromApiKeyInput {
|
||||
apiKeyId: PrefixedID!
|
||||
role: Role!
|
||||
}
|
||||
|
||||
input DeleteApiKeyInput {
|
||||
ids: [PrefixedID!]!
|
||||
}
|
||||
|
||||
input UpdateApiKeyInput {
|
||||
id: PrefixedID!
|
||||
name: String
|
||||
description: String
|
||||
roles: [Role!]
|
||||
permissions: [AddPermissionInput!]
|
||||
}
|
||||
|
||||
"""
|
||||
Parity check related mutations, WIP, response types and functionaliy will change
|
||||
"""
|
||||
@@ -898,6 +817,25 @@ type ParityCheckMutations {
|
||||
cancel: JSON!
|
||||
}
|
||||
|
||||
"""RClone related mutations"""
|
||||
type RCloneMutations {
|
||||
"""Create a new RClone remote"""
|
||||
createRCloneRemote(input: CreateRCloneRemoteInput!): RCloneRemote!
|
||||
|
||||
"""Delete an existing RClone remote"""
|
||||
deleteRCloneRemote(input: DeleteRCloneRemoteInput!): Boolean!
|
||||
}
|
||||
|
||||
input CreateRCloneRemoteInput {
|
||||
name: String!
|
||||
type: String!
|
||||
parameters: JSON!
|
||||
}
|
||||
|
||||
input DeleteRCloneRemoteInput {
|
||||
name: String!
|
||||
}
|
||||
|
||||
type ParityCheck {
|
||||
"""Date of the parity check"""
|
||||
date: DateTime
|
||||
@@ -981,14 +919,14 @@ type Theme {
|
||||
"""Whether to show the banner gradient"""
|
||||
showBannerGradient: Boolean!
|
||||
|
||||
"""The background color of the header"""
|
||||
headerBackgroundColor: String!
|
||||
|
||||
"""Whether to show the description in the header"""
|
||||
showHeaderDescription: Boolean!
|
||||
|
||||
"""The background color of the header"""
|
||||
headerBackgroundColor: String
|
||||
|
||||
"""The text color of the header"""
|
||||
headerPrimaryTextColor: String!
|
||||
headerPrimaryTextColor: String
|
||||
|
||||
"""The secondary text color of the header"""
|
||||
headerSecondaryTextColor: String
|
||||
@@ -1116,7 +1054,7 @@ enum Temperature {
|
||||
|
||||
type MemoryLayout implements Node {
|
||||
id: PrefixedID!
|
||||
size: Int!
|
||||
size: BigInt!
|
||||
bank: String
|
||||
type: String
|
||||
clockSpeed: Int
|
||||
@@ -1131,16 +1069,16 @@ type MemoryLayout implements Node {
|
||||
|
||||
type InfoMemory implements Node {
|
||||
id: PrefixedID!
|
||||
max: Int!
|
||||
total: Int!
|
||||
free: Int!
|
||||
used: Int!
|
||||
active: Int!
|
||||
available: Int!
|
||||
buffcache: Int!
|
||||
swaptotal: Int!
|
||||
swapused: Int!
|
||||
swapfree: Int!
|
||||
max: BigInt!
|
||||
total: BigInt!
|
||||
free: BigInt!
|
||||
used: BigInt!
|
||||
active: BigInt!
|
||||
available: BigInt!
|
||||
buffcache: BigInt!
|
||||
swaptotal: BigInt!
|
||||
swapused: BigInt!
|
||||
swapfree: BigInt!
|
||||
layout: [MemoryLayout!]!
|
||||
}
|
||||
|
||||
@@ -1251,20 +1189,15 @@ type DockerContainer implements Node {
|
||||
|
||||
"""Total size of all the files in the container"""
|
||||
sizeRootFs: Int
|
||||
labels: JSONObject
|
||||
labels: JSON
|
||||
state: ContainerState!
|
||||
status: String!
|
||||
hostConfig: ContainerHostConfig
|
||||
networkSettings: JSONObject
|
||||
mounts: [JSONObject!]
|
||||
networkSettings: JSON
|
||||
mounts: [JSON!]
|
||||
autoStart: Boolean!
|
||||
}
|
||||
|
||||
"""
|
||||
The `JSONObject` scalar type represents JSON objects as specified by [ECMA-404](http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf).
|
||||
"""
|
||||
scalar JSONObject @specifiedBy(url: "http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf")
|
||||
|
||||
enum ContainerState {
|
||||
RUNNING
|
||||
EXITED
|
||||
@@ -1277,15 +1210,15 @@ type DockerNetwork implements Node {
|
||||
scope: String!
|
||||
driver: String!
|
||||
enableIPv6: Boolean!
|
||||
ipam: JSONObject!
|
||||
ipam: JSON!
|
||||
internal: Boolean!
|
||||
attachable: Boolean!
|
||||
ingress: Boolean!
|
||||
configFrom: JSONObject!
|
||||
configFrom: JSON!
|
||||
configOnly: Boolean!
|
||||
containers: JSONObject!
|
||||
options: JSONObject!
|
||||
labels: JSONObject!
|
||||
containers: JSON!
|
||||
options: JSON!
|
||||
labels: JSON!
|
||||
}
|
||||
|
||||
type Docker implements Node {
|
||||
@@ -1294,6 +1227,14 @@ type Docker implements Node {
|
||||
networks(skipCache: Boolean! = false): [DockerNetwork!]!
|
||||
}
|
||||
|
||||
type FlashBackupStatus {
|
||||
"""Status message indicating the outcome of the backup initiation."""
|
||||
status: String!
|
||||
|
||||
"""Job ID if available, can be used to check job status."""
|
||||
jobId: String
|
||||
}
|
||||
|
||||
type Flash implements Node {
|
||||
id: PrefixedID!
|
||||
guid: String!
|
||||
@@ -1389,6 +1330,71 @@ type Owner {
|
||||
avatar: String!
|
||||
}
|
||||
|
||||
type ProfileModel implements Node {
|
||||
id: PrefixedID!
|
||||
username: String!
|
||||
url: String!
|
||||
avatar: String!
|
||||
}
|
||||
|
||||
type Server implements Node {
|
||||
id: PrefixedID!
|
||||
owner: ProfileModel!
|
||||
guid: String!
|
||||
apikey: String!
|
||||
name: String!
|
||||
status: ServerStatus!
|
||||
wanip: String!
|
||||
lanip: String!
|
||||
localurl: String!
|
||||
remoteurl: String!
|
||||
}
|
||||
|
||||
enum ServerStatus {
|
||||
ONLINE
|
||||
OFFLINE
|
||||
NEVER_CONNECTED
|
||||
}
|
||||
|
||||
type ApiConfig {
|
||||
version: String!
|
||||
extraOrigins: [String!]!
|
||||
sandbox: Boolean
|
||||
ssoSubIds: [String!]!
|
||||
plugins: [String!]!
|
||||
}
|
||||
|
||||
type UnifiedSettings implements Node {
|
||||
id: PrefixedID!
|
||||
|
||||
"""The data schema for the settings"""
|
||||
dataSchema: JSON!
|
||||
|
||||
"""The UI schema for the settings"""
|
||||
uiSchema: JSON!
|
||||
|
||||
"""The current values of the settings"""
|
||||
values: JSON!
|
||||
}
|
||||
|
||||
type UpdateSettingsResponse {
|
||||
"""Whether a restart is required for the changes to take effect"""
|
||||
restartRequired: Boolean!
|
||||
|
||||
"""The updated settings values"""
|
||||
values: JSON!
|
||||
}
|
||||
|
||||
type Settings implements Node {
|
||||
id: PrefixedID!
|
||||
|
||||
"""A view of all settings"""
|
||||
unified: UnifiedSettings!
|
||||
|
||||
"""The API setting values"""
|
||||
api: ApiConfig!
|
||||
}
|
||||
|
||||
type VmDomain implements Node {
|
||||
"""The unique identifier for the vm (uuid)"""
|
||||
id: PrefixedID!
|
||||
@@ -1449,13 +1455,186 @@ type UserAccount implements Node {
|
||||
permissions: [Permission!]
|
||||
}
|
||||
|
||||
type Plugin {
|
||||
"""The name of the plugin package"""
|
||||
name: String!
|
||||
|
||||
"""The version of the plugin package"""
|
||||
version: String!
|
||||
|
||||
"""Whether the plugin has an API module"""
|
||||
hasApiModule: Boolean
|
||||
|
||||
"""Whether the plugin has a CLI module"""
|
||||
hasCliModule: Boolean
|
||||
}
|
||||
|
||||
type AccessUrl {
|
||||
type: URL_TYPE!
|
||||
name: String
|
||||
ipv4: URL
|
||||
ipv6: URL
|
||||
}
|
||||
|
||||
enum URL_TYPE {
|
||||
LAN
|
||||
WIREGUARD
|
||||
WAN
|
||||
MDNS
|
||||
OTHER
|
||||
DEFAULT
|
||||
}
|
||||
|
||||
"""
|
||||
A field whose value conforms to the standard URL format as specified in RFC3986: https://www.ietf.org/rfc/rfc3986.txt.
|
||||
"""
|
||||
scalar URL
|
||||
|
||||
type AccessUrlObject {
|
||||
ipv4: String
|
||||
ipv6: String
|
||||
type: URL_TYPE!
|
||||
name: String
|
||||
}
|
||||
|
||||
type ApiKeyResponse {
|
||||
valid: Boolean!
|
||||
error: String
|
||||
}
|
||||
|
||||
type MinigraphqlResponse {
|
||||
status: MinigraphStatus!
|
||||
timeout: Int
|
||||
error: String
|
||||
}
|
||||
|
||||
"""The status of the minigraph"""
|
||||
enum MinigraphStatus {
|
||||
PRE_INIT
|
||||
CONNECTING
|
||||
CONNECTED
|
||||
PING_FAILURE
|
||||
ERROR_RETRYING
|
||||
}
|
||||
|
||||
type CloudResponse {
|
||||
status: String!
|
||||
ip: String
|
||||
error: String
|
||||
}
|
||||
|
||||
type RelayResponse {
|
||||
status: String!
|
||||
timeout: String
|
||||
error: String
|
||||
}
|
||||
|
||||
type Cloud {
|
||||
error: String
|
||||
apiKey: ApiKeyResponse!
|
||||
relay: RelayResponse
|
||||
minigraphql: MinigraphqlResponse!
|
||||
cloud: CloudResponse!
|
||||
allowedOrigins: [String!]!
|
||||
}
|
||||
|
||||
type RemoteAccess {
|
||||
"""The type of WAN access used for Remote Access"""
|
||||
accessType: WAN_ACCESS_TYPE!
|
||||
|
||||
"""The type of port forwarding used for Remote Access"""
|
||||
forwardType: WAN_FORWARD_TYPE
|
||||
|
||||
"""The port used for Remote Access"""
|
||||
port: Int
|
||||
}
|
||||
|
||||
enum WAN_ACCESS_TYPE {
|
||||
DYNAMIC
|
||||
ALWAYS
|
||||
DISABLED
|
||||
}
|
||||
|
||||
enum WAN_FORWARD_TYPE {
|
||||
UPNP
|
||||
STATIC
|
||||
}
|
||||
|
||||
type DynamicRemoteAccessStatus {
|
||||
"""The type of dynamic remote access that is enabled"""
|
||||
enabledType: DynamicRemoteAccessType!
|
||||
|
||||
"""The type of dynamic remote access that is currently running"""
|
||||
runningType: DynamicRemoteAccessType!
|
||||
|
||||
"""Any error message associated with the dynamic remote access"""
|
||||
error: String
|
||||
}
|
||||
|
||||
enum DynamicRemoteAccessType {
|
||||
STATIC
|
||||
UPNP
|
||||
DISABLED
|
||||
}
|
||||
|
||||
type ConnectSettingsValues {
|
||||
"""The type of WAN access used for Remote Access"""
|
||||
accessType: WAN_ACCESS_TYPE!
|
||||
|
||||
"""The type of port forwarding used for Remote Access"""
|
||||
forwardType: WAN_FORWARD_TYPE
|
||||
|
||||
"""The port used for Remote Access"""
|
||||
port: Int
|
||||
}
|
||||
|
||||
type ConnectSettings implements Node {
|
||||
id: PrefixedID!
|
||||
|
||||
"""The data schema for the Connect settings"""
|
||||
dataSchema: JSON!
|
||||
|
||||
"""The UI schema for the Connect settings"""
|
||||
uiSchema: JSON!
|
||||
|
||||
"""The values for the Connect settings"""
|
||||
values: ConnectSettingsValues!
|
||||
}
|
||||
|
||||
type Connect implements Node {
|
||||
id: PrefixedID!
|
||||
|
||||
"""The status of dynamic remote access"""
|
||||
dynamicRemoteAccess: DynamicRemoteAccessStatus!
|
||||
|
||||
"""The settings for the Connect instance"""
|
||||
settings: ConnectSettings!
|
||||
}
|
||||
|
||||
type Network implements Node {
|
||||
id: PrefixedID!
|
||||
accessUrls: [AccessUrl!]
|
||||
}
|
||||
|
||||
input AccessUrlObjectInput {
|
||||
ipv4: String
|
||||
ipv6: String
|
||||
type: URL_TYPE!
|
||||
name: String
|
||||
}
|
||||
|
||||
"\n### Description:\n\nID scalar type that prefixes the underlying ID with the server identifier on output and strips it on input.\n\nWe use this scalar type to ensure that the ID is unique across all servers, allowing the same underlying resource ID to be used across different server instances.\n\n#### Input Behavior:\n\nWhen providing an ID as input (e.g., in arguments or input objects), the server identifier prefix ('<serverId>:') is optional.\n\n- If the prefix is present (e.g., '123:456'), it will be automatically stripped, and only the underlying ID ('456') will be used internally.\n- If the prefix is absent (e.g., '456'), the ID will be used as-is.\n\nThis makes it flexible for clients, as they don't strictly need to know or provide the server ID.\n\n#### Output Behavior:\n\nWhen an ID is returned in the response (output), it will *always* be prefixed with the current server's unique identifier (e.g., '123:456').\n\n#### Example:\n\nNote: The server identifier is '123' in this example.\n\n##### Input (Prefix Optional):\n```graphql\n# Both of these are valid inputs resolving to internal ID '456'\n{\n someQuery(id: \"123:456\") { ... }\n anotherQuery(id: \"456\") { ... }\n}\n```\n\n##### Output (Prefix Always Added):\n```graphql\n# Assuming internal ID is '456'\n{\n \"data\": {\n \"someResource\": {\n \"id\": \"123:456\" \n }\n }\n}\n```\n "
|
||||
scalar PrefixedID
|
||||
|
||||
type Query {
|
||||
apiKeys: [ApiKey!]!
|
||||
apiKey(id: PrefixedID!): ApiKey
|
||||
cloud: Cloud!
|
||||
|
||||
"""All possible roles for API keys"""
|
||||
apiKeyPossibleRoles: [Role!]!
|
||||
|
||||
"""All possible permissions for API keys"""
|
||||
apiKeyPossiblePermissions: [Permission!]!
|
||||
config: Config!
|
||||
display: Display!
|
||||
flash: Flash!
|
||||
@@ -1463,7 +1642,6 @@ type Query {
|
||||
logFiles: [LogFile!]!
|
||||
logFile(path: String!, lines: Int, startLine: Int): LogFileContent!
|
||||
me: UserAccount!
|
||||
network: Network!
|
||||
|
||||
"""Get all notifications"""
|
||||
notifications: Notifications!
|
||||
@@ -1475,29 +1653,31 @@ type Query {
|
||||
services: [Service!]!
|
||||
shares: [Share!]!
|
||||
vars: Vars!
|
||||
isInitialSetup: Boolean!
|
||||
|
||||
"""Get information about all VMs on the system"""
|
||||
vms: Vms!
|
||||
parityHistory: [ParityCheck!]!
|
||||
array: UnraidArray!
|
||||
connect: Connect!
|
||||
remoteAccess: RemoteAccess!
|
||||
extraAllowedOrigins: [String!]!
|
||||
customization: Customization
|
||||
publicPartnerInfo: PublicPartnerInfo
|
||||
publicTheme: Theme!
|
||||
docker: Docker!
|
||||
disks: [Disk!]!
|
||||
disk(id: PrefixedID!): Disk!
|
||||
health: String!
|
||||
getDemo: String!
|
||||
rclone: RCloneBackupSettings!
|
||||
settings: Settings!
|
||||
isSSOEnabled: Boolean!
|
||||
|
||||
"""List all installed plugins with their metadata"""
|
||||
plugins: [Plugin!]!
|
||||
remoteAccess: RemoteAccess!
|
||||
connect: Connect!
|
||||
network: Network!
|
||||
cloud: Cloud!
|
||||
}
|
||||
|
||||
type Mutation {
|
||||
createApiKey(input: CreateApiKeyInput!): ApiKeyWithSecret!
|
||||
addRoleForApiKey(input: AddRoleForApiKeyInput!): Boolean!
|
||||
removeRoleFromApiKey(input: RemoveRoleFromApiKeyInput!): Boolean!
|
||||
|
||||
"""Creates a new notification record"""
|
||||
createNotification(input: NotificationData!): Notification!
|
||||
deleteNotification(id: PrefixedID!, type: NotificationType!): NotificationOverview!
|
||||
@@ -1521,40 +1701,27 @@ type Mutation {
|
||||
docker: DockerMutations!
|
||||
vm: VmMutations!
|
||||
parityCheck: ParityCheckMutations!
|
||||
updateApiSettings(input: ApiSettingsInput!): ConnectSettingsValues!
|
||||
apiKey: ApiKeyMutations!
|
||||
rclone: RCloneMutations!
|
||||
|
||||
"""Initiates a flash drive backup using a configured remote."""
|
||||
initiateFlashBackup(input: InitiateFlashBackupInput!): FlashBackupStatus!
|
||||
updateSettings(input: JSON!): UpdateSettingsResponse!
|
||||
|
||||
"""
|
||||
Add one or more plugins to the API. Returns false if restart was triggered automatically, true if manual restart is required.
|
||||
"""
|
||||
addPlugin(input: PluginManagementInput!): Boolean!
|
||||
|
||||
"""
|
||||
Remove one or more plugins from the API. Returns false if restart was triggered automatically, true if manual restart is required.
|
||||
"""
|
||||
removePlugin(input: PluginManagementInput!): Boolean!
|
||||
updateApiSettings(input: ConnectSettingsInput!): ConnectSettingsValues!
|
||||
connectSignIn(input: ConnectSignInInput!): Boolean!
|
||||
connectSignOut: Boolean!
|
||||
setupRemoteAccess(input: SetupRemoteAccessInput!): Boolean!
|
||||
setAdditionalAllowedOrigins(input: AllowedOriginInput!): [String!]!
|
||||
enableDynamicRemoteAccess(input: EnableDynamicRemoteAccessInput!): Boolean!
|
||||
setDemo: String!
|
||||
}
|
||||
|
||||
input CreateApiKeyInput {
|
||||
name: String!
|
||||
description: String
|
||||
roles: [Role!]
|
||||
permissions: [AddPermissionInput!]
|
||||
|
||||
"""
|
||||
This will replace the existing key if one already exists with the same name, otherwise returns the existing key
|
||||
"""
|
||||
overwrite: Boolean
|
||||
}
|
||||
|
||||
input AddPermissionInput {
|
||||
resource: Resource!
|
||||
actions: [String!]!
|
||||
}
|
||||
|
||||
input AddRoleForApiKeyInput {
|
||||
apiKeyId: PrefixedID!
|
||||
role: Role!
|
||||
}
|
||||
|
||||
input RemoveRoleFromApiKeyInput {
|
||||
apiKeyId: PrefixedID!
|
||||
role: Role!
|
||||
}
|
||||
|
||||
input NotificationData {
|
||||
@@ -1565,15 +1732,38 @@ input NotificationData {
|
||||
link: String
|
||||
}
|
||||
|
||||
input ApiSettingsInput {
|
||||
"""
|
||||
If true, the GraphQL sandbox will be enabled and available at /graphql. If false, the GraphQL sandbox will be disabled and only the production API will be available.
|
||||
"""
|
||||
sandbox: Boolean
|
||||
input InitiateFlashBackupInput {
|
||||
"""The name of the remote configuration to use for the backup."""
|
||||
remoteName: String!
|
||||
|
||||
"""A list of origins allowed to interact with the API"""
|
||||
extraOrigins: [String!]
|
||||
"""Source path to backup (typically the flash drive)."""
|
||||
sourcePath: String!
|
||||
|
||||
"""Destination path on the remote."""
|
||||
destinationPath: String!
|
||||
|
||||
"""
|
||||
Additional options for the backup operation, such as --dry-run or --transfers.
|
||||
"""
|
||||
options: JSON
|
||||
}
|
||||
|
||||
input PluginManagementInput {
|
||||
"""Array of plugin package names to add or remove"""
|
||||
names: [String!]!
|
||||
|
||||
"""
|
||||
Whether to treat plugins as bundled plugins. Bundled plugins are installed to node_modules at build time and controlled via config only.
|
||||
"""
|
||||
bundled: Boolean! = false
|
||||
|
||||
"""
|
||||
Whether to restart the API after the operation. When false, a restart has already been queued.
|
||||
"""
|
||||
restart: Boolean! = true
|
||||
}
|
||||
|
||||
input ConnectSettingsInput {
|
||||
"""The type of WAN access to use for Remote Access"""
|
||||
accessType: WAN_ACCESS_TYPE
|
||||
|
||||
@@ -1584,26 +1774,14 @@ input ApiSettingsInput {
|
||||
The port to use for Remote Access. Not required for UPNP forwardType. Required for STATIC forwardType. Ignored if accessType is DISABLED or forwardType is UPNP.
|
||||
"""
|
||||
port: Int
|
||||
|
||||
"""A list of Unique Unraid Account ID's"""
|
||||
ssoUserIds: [String!]
|
||||
}
|
||||
|
||||
input ConnectSignInInput {
|
||||
"""The API key for authentication"""
|
||||
apiKey: String!
|
||||
|
||||
"""The ID token for authentication"""
|
||||
idToken: String
|
||||
|
||||
"""User information for the sign-in"""
|
||||
userInfo: ConnectUserInfoInput
|
||||
|
||||
"""The access token for authentication"""
|
||||
accessToken: String
|
||||
|
||||
"""The refresh token for authentication"""
|
||||
refreshToken: String
|
||||
}
|
||||
|
||||
input ConnectUserInfoInput {
|
||||
@@ -1630,11 +1808,6 @@ input SetupRemoteAccessInput {
|
||||
port: Int
|
||||
}
|
||||
|
||||
input AllowedOriginInput {
|
||||
"""A list of origins allowed to interact with the API"""
|
||||
origins: [String!]!
|
||||
}
|
||||
|
||||
input EnableDynamicRemoteAccessInput {
|
||||
"""The AccessURL Input for dynamic remote access"""
|
||||
url: AccessUrlInput!
|
||||
@@ -1657,7 +1830,6 @@ type Subscription {
|
||||
notificationAdded: Notification!
|
||||
notificationsOverview: NotificationOverview!
|
||||
ownerSubscription: Owner!
|
||||
registrationSubscription: Registration!
|
||||
serversSubscription: Server!
|
||||
parityHistorySubscription: ParityCheck!
|
||||
arraySubscription: UnraidArray!
|
||||
|
||||
322
api/package.json
322
api/package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@unraid/api",
|
||||
"version": "4.8.0",
|
||||
"version": "4.11.0",
|
||||
"main": "src/cli/index.ts",
|
||||
"type": "module",
|
||||
"corepack": {
|
||||
@@ -10,26 +10,26 @@
|
||||
"author": "Lime Technology, Inc. <unraid.net>",
|
||||
"license": "GPL-2.0-or-later",
|
||||
"engines": {
|
||||
"pnpm": ">=8.0.0"
|
||||
"pnpm": "10.13.1"
|
||||
},
|
||||
"scripts": {
|
||||
"// Development": "",
|
||||
"start": "node dist/main.js",
|
||||
"dev": "vite",
|
||||
"command": "pnpm run build && clear && ./dist/cli.js",
|
||||
"dev:debug": "NODE_OPTIONS='--inspect-brk=9229 --enable-source-maps' vite",
|
||||
"command": "COMMAND_TESTER=true pnpm run build > /dev/null 2>&1 && NODE_ENV=development ./dist/cli.js",
|
||||
"command:raw": "./dist/cli.js",
|
||||
"// Build and Deploy": "",
|
||||
"build": "vite build --mode=production",
|
||||
"postbuild": "chmod +x dist/main.js && chmod +x dist/cli.js && node scripts/copy-plugins.js",
|
||||
"build:watch": "nodemon --watch src --ext ts,js,json --exec 'tsx ./scripts/build.ts'",
|
||||
"postbuild": "chmod +x dist/main.js && chmod +x dist/cli.js",
|
||||
"build:watch": "WATCH_MODE=true nodemon --watch src --ext ts,js,json --exec 'tsx ./scripts/build.ts'",
|
||||
"build:docker": "./scripts/dc.sh run --rm builder",
|
||||
"build:release": "tsx ./scripts/build.ts",
|
||||
"preunraid:deploy": "pnpm build",
|
||||
"unraid:deploy": "./scripts/deploy-dev.sh",
|
||||
"// GraphQL Codegen": "",
|
||||
"codegen": "MOTHERSHIP_GRAPHQL_LINK='https://staging.mothership.unraid.net/ws' graphql-codegen --config codegen.ts -r dotenv/config './.env.staging'",
|
||||
"codegen:watch": "DOTENV_CONFIG_PATH='./.env.staging' graphql-codegen --config codegen.ts --watch -r dotenv/config",
|
||||
"codegen:local": "NODE_TLS_REJECT_UNAUTHORIZED=0 MOTHERSHIP_GRAPHQL_LINK='https://mothership.localhost/ws' graphql-codegen --config codegen.ts --watch",
|
||||
"codegen": "graphql-codegen --config codegen.ts",
|
||||
"codegen:watch": "graphql-codegen --config codegen.ts --watch",
|
||||
"// Code Quality": "",
|
||||
"lint": "eslint --config .eslintrc.ts src/",
|
||||
"lint:fix": "eslint --fix --config .eslintrc.ts src/",
|
||||
@@ -51,96 +51,98 @@
|
||||
"unraid-api": "dist/cli.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"@apollo/client": "^3.11.8",
|
||||
"@apollo/server": "^4.11.2",
|
||||
"@as-integrations/fastify": "^2.1.1",
|
||||
"@fastify/cookie": "^11.0.2",
|
||||
"@fastify/helmet": "^13.0.1",
|
||||
"@graphql-codegen/client-preset": "^4.5.0",
|
||||
"@graphql-tools/load-files": "^7.0.0",
|
||||
"@graphql-tools/merge": "^9.0.8",
|
||||
"@graphql-tools/schema": "^10.0.7",
|
||||
"@graphql-tools/utils": "^10.5.5",
|
||||
"@jsonforms/core": "^3.5.1",
|
||||
"@nestjs/apollo": "^13.0.3",
|
||||
"@nestjs/cache-manager": "^3.0.1",
|
||||
"@nestjs/common": "^11.0.11",
|
||||
"@nestjs/config": "^4.0.2",
|
||||
"@nestjs/core": "^11.0.11",
|
||||
"@nestjs/graphql": "^13.0.3",
|
||||
"@nestjs/passport": "^11.0.0",
|
||||
"@nestjs/platform-fastify": "^11.0.11",
|
||||
"@nestjs/schedule": "^5.0.0",
|
||||
"@nestjs/throttler": "^6.2.1",
|
||||
"@reduxjs/toolkit": "^2.3.0",
|
||||
"@runonflux/nat-upnp": "^1.0.2",
|
||||
"@types/diff": "^7.0.1",
|
||||
"@unraid/libvirt": "^2.1.0",
|
||||
"accesscontrol": "^2.2.1",
|
||||
"bycontract": "^2.0.11",
|
||||
"bytes": "^3.1.2",
|
||||
"cache-manager": "^6.4.2",
|
||||
"cacheable-lookup": "^7.0.0",
|
||||
"camelcase-keys": "^9.1.3",
|
||||
"casbin": "^5.32.0",
|
||||
"change-case": "^5.4.4",
|
||||
"chokidar": "^4.0.1",
|
||||
"class-transformer": "^0.5.1",
|
||||
"class-validator": "^0.14.1",
|
||||
"cli-table": "^0.3.11",
|
||||
"command-exists": "^1.2.9",
|
||||
"convert": "^5.8.0",
|
||||
"cookie": "^1.0.2",
|
||||
"cron": "3.5.0",
|
||||
"cross-fetch": "^4.0.0",
|
||||
"diff": "^7.0.0",
|
||||
"dockerode": "^4.0.5",
|
||||
"dotenv": "^16.4.5",
|
||||
"execa": "^9.5.1",
|
||||
"exit-hook": "^4.0.0",
|
||||
"fastify": "^5.2.1",
|
||||
"filenamify": "^6.0.0",
|
||||
"fs-extra": "^11.2.0",
|
||||
"glob": "^11.0.1",
|
||||
"global-agent": "^3.0.0",
|
||||
"got": "^14.4.6",
|
||||
"graphql": "^16.9.0",
|
||||
"graphql-fields": "^2.0.3",
|
||||
"graphql-scalars": "^1.23.0",
|
||||
"graphql-subscriptions": "^3.0.0",
|
||||
"graphql-tag": "^2.12.6",
|
||||
"graphql-type-json": "^0.3.2",
|
||||
"graphql-type-uuid": "^0.2.0",
|
||||
"graphql-ws": "^6.0.0",
|
||||
"ini": "^5.0.0",
|
||||
"ip": "^2.0.1",
|
||||
"jose": "^6.0.0",
|
||||
"lodash-es": "^4.17.21",
|
||||
"multi-ini": "^2.3.2",
|
||||
"mustache": "^4.2.0",
|
||||
"nest-authz": "^2.14.0",
|
||||
"nest-commander": "^3.15.0",
|
||||
"nestjs-pino": "^4.1.0",
|
||||
"node-cache": "^5.1.2",
|
||||
"node-window-polyfill": "^1.0.2",
|
||||
"p-retry": "^6.2.0",
|
||||
"passport-custom": "^1.1.1",
|
||||
"passport-http-header-strategy": "^1.1.0",
|
||||
"path-type": "^6.0.0",
|
||||
"pino": "^9.5.0",
|
||||
"pino-http": "^10.3.0",
|
||||
"pino-pretty": "^13.0.0",
|
||||
"pm2": "^6.0.0",
|
||||
"@apollo/client": "3.13.8",
|
||||
"@apollo/server": "4.12.2",
|
||||
"@as-integrations/fastify": "2.1.1",
|
||||
"@fastify/cookie": "11.0.2",
|
||||
"@fastify/helmet": "13.0.1",
|
||||
"@graphql-codegen/client-preset": "4.8.3",
|
||||
"@graphql-tools/load-files": "7.0.1",
|
||||
"@graphql-tools/merge": "9.1.1",
|
||||
"@graphql-tools/schema": "10.0.25",
|
||||
"@graphql-tools/utils": "10.9.1",
|
||||
"@jsonforms/core": "3.6.0",
|
||||
"@nestjs/apollo": "13.1.0",
|
||||
"@nestjs/cache-manager": "3.0.1",
|
||||
"@nestjs/common": "11.1.5",
|
||||
"@nestjs/config": "4.0.2",
|
||||
"@nestjs/core": "11.1.5",
|
||||
"@nestjs/event-emitter": "3.0.1",
|
||||
"@nestjs/graphql": "13.1.0",
|
||||
"@nestjs/passport": "11.0.5",
|
||||
"@nestjs/platform-fastify": "11.1.5",
|
||||
"@nestjs/schedule": "6.0.0",
|
||||
"@nestjs/throttler": "6.4.0",
|
||||
"@reduxjs/toolkit": "2.8.2",
|
||||
"@runonflux/nat-upnp": "1.0.2",
|
||||
"@types/diff": "8.0.0",
|
||||
"@unraid/libvirt": "2.1.0",
|
||||
"@unraid/shared": "workspace:*",
|
||||
"accesscontrol": "2.2.1",
|
||||
"atomically": "2.0.3",
|
||||
"bycontract": "2.0.11",
|
||||
"bytes": "3.1.2",
|
||||
"cache-manager": "7.0.1",
|
||||
"cacheable-lookup": "7.0.0",
|
||||
"camelcase-keys": "9.1.3",
|
||||
"casbin": "5.38.0",
|
||||
"change-case": "5.4.4",
|
||||
"chokidar": "4.0.3",
|
||||
"class-transformer": "0.5.1",
|
||||
"class-validator": "0.14.2",
|
||||
"cli-table": "0.3.11",
|
||||
"command-exists": "1.2.9",
|
||||
"convert": "5.12.0",
|
||||
"cookie": "1.0.2",
|
||||
"cron": "4.3.2",
|
||||
"cross-fetch": "4.1.0",
|
||||
"diff": "8.0.2",
|
||||
"dockerode": "4.0.7",
|
||||
"dotenv": "17.2.1",
|
||||
"execa": "9.6.0",
|
||||
"exit-hook": "4.0.0",
|
||||
"fastify": "5.4.0",
|
||||
"filenamify": "6.0.0",
|
||||
"fs-extra": "11.3.0",
|
||||
"glob": "11.0.3",
|
||||
"global-agent": "3.0.0",
|
||||
"got": "14.4.7",
|
||||
"graphql": "16.11.0",
|
||||
"graphql-fields": "2.0.3",
|
||||
"graphql-scalars": "1.24.2",
|
||||
"graphql-subscriptions": "3.0.0",
|
||||
"graphql-tag": "2.12.6",
|
||||
"graphql-ws": "6.0.6",
|
||||
"ini": "5.0.0",
|
||||
"ip": "2.0.1",
|
||||
"jose": "6.0.12",
|
||||
"json-bigint-patch": "0.0.8",
|
||||
"lodash-es": "4.17.21",
|
||||
"multi-ini": "2.3.2",
|
||||
"mustache": "4.2.0",
|
||||
"nest-authz": "2.17.0",
|
||||
"nest-commander": "3.18.0",
|
||||
"nestjs-pino": "4.4.0",
|
||||
"node-cache": "5.1.2",
|
||||
"node-window-polyfill": "1.0.4",
|
||||
"p-retry": "6.2.1",
|
||||
"passport-custom": "1.1.1",
|
||||
"passport-http-header-strategy": "1.1.0",
|
||||
"path-type": "6.0.0",
|
||||
"pino": "9.7.0",
|
||||
"pino-http": "10.5.0",
|
||||
"pino-pretty": "13.0.0",
|
||||
"pm2": "6.0.8",
|
||||
"reflect-metadata": "^0.1.14",
|
||||
"request": "^2.88.2",
|
||||
"rxjs": "^7.8.2",
|
||||
"semver": "^7.6.3",
|
||||
"strftime": "^0.10.3",
|
||||
"systeminformation": "^5.25.11",
|
||||
"uuid": "^11.0.2",
|
||||
"ws": "^8.18.0",
|
||||
"zen-observable-ts": "^1.1.0",
|
||||
"zod": "^3.23.8"
|
||||
"request": "2.88.2",
|
||||
"rxjs": "7.8.2",
|
||||
"semver": "7.7.2",
|
||||
"strftime": "0.10.3",
|
||||
"systeminformation": "5.27.7",
|
||||
"uuid": "11.1.0",
|
||||
"ws": "8.18.3",
|
||||
"zen-observable-ts": "1.1.0",
|
||||
"zod": "3.25.76"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"unraid-api-plugin-connect": "workspace:*"
|
||||
@@ -151,71 +153,73 @@
|
||||
}
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/js": "^9.21.0",
|
||||
"@graphql-codegen/add": "^5.0.3",
|
||||
"@graphql-codegen/cli": "^5.0.3",
|
||||
"@graphql-codegen/fragment-matcher": "^5.0.2",
|
||||
"@graphql-codegen/import-types-preset": "^3.0.0",
|
||||
"@graphql-codegen/typed-document-node": "^5.0.11",
|
||||
"@graphql-codegen/typescript": "^4.1.1",
|
||||
"@graphql-codegen/typescript-operations": "^4.3.1",
|
||||
"@graphql-codegen/typescript-resolvers": "4.5.0",
|
||||
"@graphql-typed-document-node/core": "^3.2.0",
|
||||
"@ianvs/prettier-plugin-sort-imports": "^4.4.0",
|
||||
"@nestjs/testing": "^11.0.11",
|
||||
"@originjs/vite-plugin-commonjs": "^1.0.3",
|
||||
"@rollup/plugin-node-resolve": "^16.0.0",
|
||||
"@swc/core": "^1.10.1",
|
||||
"@types/async-exit-hook": "^2.0.2",
|
||||
"@types/bytes": "^3.1.4",
|
||||
"@types/cli-table": "^0.3.4",
|
||||
"@types/command-exists": "^1.2.3",
|
||||
"@types/cors": "^2.8.17",
|
||||
"@types/dockerode": "^3.3.31",
|
||||
"@types/graphql-fields": "^1.3.9",
|
||||
"@types/graphql-type-uuid": "^0.2.6",
|
||||
"@types/ini": "^4.1.1",
|
||||
"@types/ip": "^1.1.3",
|
||||
"@types/lodash": "^4.17.13",
|
||||
"@types/lodash-es": "^4.17.12",
|
||||
"@types/mustache": "^4.2.5",
|
||||
"@types/node": "^22.13.4",
|
||||
"@types/pify": "^6.0.0",
|
||||
"@types/semver": "^7.5.8",
|
||||
"@types/sendmail": "^1.4.7",
|
||||
"@types/stoppable": "^1.1.3",
|
||||
"@types/strftime": "^0.9.8",
|
||||
"@types/uuid": "^10.0.0",
|
||||
"@types/ws": "^8.5.13",
|
||||
"@types/wtfnode": "^0.7.3",
|
||||
"@vitest/coverage-v8": "^3.0.5",
|
||||
"@vitest/ui": "^3.0.5",
|
||||
"@eslint/js": "9.32.0",
|
||||
"@graphql-codegen/add": "5.0.3",
|
||||
"@graphql-codegen/cli": "5.0.7",
|
||||
"@graphql-codegen/fragment-matcher": "5.1.0",
|
||||
"@graphql-codegen/import-types-preset": "3.0.1",
|
||||
"@graphql-codegen/typed-document-node": "5.1.2",
|
||||
"@graphql-codegen/typescript": "4.1.6",
|
||||
"@graphql-codegen/typescript-operations": "4.6.1",
|
||||
"@graphql-codegen/typescript-resolvers": "4.5.1",
|
||||
"@graphql-typed-document-node/core": "3.2.0",
|
||||
"@ianvs/prettier-plugin-sort-imports": "4.5.1",
|
||||
"@nestjs/testing": "11.1.5",
|
||||
"@originjs/vite-plugin-commonjs": "1.0.3",
|
||||
"@rollup/plugin-node-resolve": "16.0.1",
|
||||
"@swc/core": "1.13.2",
|
||||
"@types/async-exit-hook": "2.0.2",
|
||||
"@types/bytes": "3.1.5",
|
||||
"@types/cli-table": "0.3.4",
|
||||
"@types/command-exists": "1.2.3",
|
||||
"@types/cors": "2.8.19",
|
||||
"@types/dockerode": "3.3.42",
|
||||
"@types/graphql-fields": "1.3.9",
|
||||
"@types/graphql-type-uuid": "0.2.6",
|
||||
"@types/ini": "4.1.1",
|
||||
"@types/ip": "1.1.3",
|
||||
"@types/lodash": "4.17.20",
|
||||
"@types/lodash-es": "4.17.12",
|
||||
"@types/mustache": "4.2.6",
|
||||
"@types/node": "22.16.5",
|
||||
"@types/pify": "6.1.0",
|
||||
"@types/semver": "7.7.0",
|
||||
"@types/sendmail": "1.4.7",
|
||||
"@types/stoppable": "1.1.3",
|
||||
"@types/strftime": "0.9.8",
|
||||
"@types/supertest": "6.0.3",
|
||||
"@types/uuid": "10.0.0",
|
||||
"@types/ws": "8.18.1",
|
||||
"@types/wtfnode": "0.7.3",
|
||||
"@vitest/coverage-v8": "3.2.4",
|
||||
"@vitest/ui": "3.2.4",
|
||||
"commit-and-tag-version": "9.6.0",
|
||||
"cz-conventional-changelog": "3.3.0",
|
||||
"eslint": "^9.20.1",
|
||||
"eslint-plugin-import": "^2.31.0",
|
||||
"eslint-plugin-n": "^17.0.0",
|
||||
"eslint-plugin-no-relative-import-paths": "^1.6.1",
|
||||
"eslint-plugin-prettier": "^5.2.3",
|
||||
"graphql-codegen-typescript-validation-schema": "^0.17.0",
|
||||
"jiti": "^2.4.0",
|
||||
"nodemon": "^3.1.7",
|
||||
"prettier": "^3.5.2",
|
||||
"rollup-plugin-node-externals": "^8.0.0",
|
||||
"standard-version": "^9.5.0",
|
||||
"tsx": "^4.19.2",
|
||||
"type-fest": "^4.37.0",
|
||||
"typescript": "^5.6.3",
|
||||
"typescript-eslint": "^8.13.0",
|
||||
"unplugin-swc": "^1.5.1",
|
||||
"vite": "^6.0.0",
|
||||
"vite-plugin-node": "^5.0.0",
|
||||
"vite-tsconfig-paths": "^5.1.0",
|
||||
"vitest": "^3.0.5",
|
||||
"zx": "^8.3.2"
|
||||
"eslint": "9.32.0",
|
||||
"eslint-plugin-import": "2.32.0",
|
||||
"eslint-plugin-n": "17.21.2",
|
||||
"eslint-plugin-no-relative-import-paths": "1.6.1",
|
||||
"eslint-plugin-prettier": "5.5.3",
|
||||
"graphql-codegen-typescript-validation-schema": "0.17.1",
|
||||
"jiti": "2.5.1",
|
||||
"nodemon": "3.1.10",
|
||||
"prettier": "3.6.2",
|
||||
"rollup-plugin-node-externals": "8.0.1",
|
||||
"supertest": "7.1.4",
|
||||
"tsx": "4.20.3",
|
||||
"type-fest": "4.41.0",
|
||||
"typescript": "5.8.3",
|
||||
"typescript-eslint": "8.38.0",
|
||||
"unplugin-swc": "1.5.5",
|
||||
"vite": "7.0.6",
|
||||
"vite-plugin-node": "7.0.0",
|
||||
"vite-tsconfig-paths": "5.1.4",
|
||||
"vitest": "3.2.4",
|
||||
"zx": "8.7.1"
|
||||
},
|
||||
"overrides": {
|
||||
"eslint": {
|
||||
"jiti": "2"
|
||||
"jiti": "2.5.1"
|
||||
},
|
||||
"@as-integrations/fastify": {
|
||||
"fastify": "$fastify"
|
||||
@@ -226,5 +230,5 @@
|
||||
}
|
||||
},
|
||||
"private": true,
|
||||
"packageManager": "pnpm@10.8.1"
|
||||
"packageManager": "pnpm@10.13.1"
|
||||
}
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
#!/usr/bin/env zx
|
||||
import { mkdir, readFile, writeFile } from 'fs/promises';
|
||||
import { existsSync } from 'node:fs';
|
||||
import { basename, join, resolve } from 'node:path';
|
||||
import { exit } from 'process';
|
||||
|
||||
import type { PackageJson } from 'type-fest';
|
||||
@@ -10,8 +12,48 @@ import { getDeploymentVersion } from './get-deployment-version.js';
|
||||
type ApiPackageJson = PackageJson & {
|
||||
version: string;
|
||||
peerDependencies: Record<string, string>;
|
||||
dependencies?: Record<string, string>;
|
||||
};
|
||||
|
||||
/**
|
||||
* Map of workspace packages to vendor into production builds.
|
||||
* Key: package name, Value: path from monorepo root to the package directory
|
||||
*/
|
||||
const WORKSPACE_PACKAGES_TO_VENDOR = {
|
||||
'@unraid/shared': 'packages/unraid-shared',
|
||||
'unraid-api-plugin-connect': 'packages/unraid-api-plugin-connect',
|
||||
} as const;
|
||||
|
||||
/**
|
||||
* Packs a workspace package and installs it as a tarball dependency.
|
||||
*/
|
||||
const packAndInstallWorkspacePackage = async (pkgName: string, pkgPath: string, tempDir: string) => {
|
||||
const [fullPkgPath, fullTempDir] = [resolve(pkgPath), resolve(tempDir)];
|
||||
if (!existsSync(fullPkgPath)) {
|
||||
console.warn(`Workspace package ${pkgName} not found at ${fullPkgPath}. Skipping.`);
|
||||
return;
|
||||
}
|
||||
console.log(`Building and packing workspace package ${pkgName}...`);
|
||||
// Pack the package to a tarball
|
||||
const packedResult = await $`pnpm --filter ${pkgName} pack --pack-destination ${fullTempDir}`;
|
||||
const tarballPath = packedResult.lines().at(-1)!;
|
||||
const tarballName = basename(tarballPath);
|
||||
|
||||
// Install the tarball
|
||||
const tarballPattern = join(fullTempDir, tarballName);
|
||||
await $`npm install ${tarballPattern}`;
|
||||
};
|
||||
|
||||
/**------------------------------------------------------------------------
|
||||
* Build Script
|
||||
*
|
||||
* Builds & vendors the API for deployment to an Unraid server.
|
||||
*
|
||||
* Places artifacts in the `deploy/` folder:
|
||||
* - release/ contains source code & assets
|
||||
* - node-modules-archive/ contains tarball of node_modules
|
||||
*------------------------------------------------------------------------**/
|
||||
|
||||
try {
|
||||
// Create release and pack directories
|
||||
await mkdir('./deploy/release', { recursive: true });
|
||||
@@ -30,6 +72,20 @@ try {
|
||||
|
||||
// Update the package.json version to the deployment version
|
||||
parsedPackageJson.version = deploymentVersion;
|
||||
|
||||
/**---------------------------------------------
|
||||
* Handle workspace runtime dependencies
|
||||
*--------------------------------------------*/
|
||||
const workspaceDeps = Object.keys(WORKSPACE_PACKAGES_TO_VENDOR);
|
||||
if (workspaceDeps.length > 0) {
|
||||
console.log(`Stripping workspace deps from package.json: ${workspaceDeps.join(', ')}`);
|
||||
workspaceDeps.forEach((dep) => {
|
||||
if (parsedPackageJson.dependencies?.[dep]) {
|
||||
delete parsedPackageJson.dependencies[dep];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// omit dev dependencies from vendored dependencies in release build
|
||||
parsedPackageJson.devDependencies = {};
|
||||
|
||||
@@ -49,13 +105,26 @@ try {
|
||||
|
||||
await writeFile('package.json', JSON.stringify(parsedPackageJson, null, 4));
|
||||
|
||||
const sudoCheck = await $`command -v sudo`.nothrow();
|
||||
const SUDO = sudoCheck.exitCode === 0 ? 'sudo' : '';
|
||||
await $`${SUDO} chown -R 0:0 node_modules`;
|
||||
/** After npm install, vendor workspace packages via pack/install */
|
||||
if (workspaceDeps.length > 0) {
|
||||
console.log('Vendoring workspace packages...');
|
||||
const tempDir = './packages';
|
||||
await mkdir(tempDir, { recursive: true });
|
||||
|
||||
await $`XZ_OPT=-5 tar -cJf packed-node-modules.tar.xz node_modules`;
|
||||
await $`mv packed-node-modules.tar.xz ../`;
|
||||
await $`${SUDO} rm -rf node_modules`;
|
||||
for (const dep of workspaceDeps) {
|
||||
const pkgPath =
|
||||
WORKSPACE_PACKAGES_TO_VENDOR[dep as keyof typeof WORKSPACE_PACKAGES_TO_VENDOR];
|
||||
// The extra '../../../' prefix adjusts for the fact that we're in the pack directory.
|
||||
// this way, pkgPath can be defined relative to the monorepo root.
|
||||
await packAndInstallWorkspacePackage(dep, join('../../../', pkgPath), tempDir);
|
||||
}
|
||||
}
|
||||
|
||||
// Clean the release directory
|
||||
await $`rm -rf ../release/*`;
|
||||
|
||||
// Copy other files to release directory
|
||||
await $`cp -r ./* ../release/`;
|
||||
|
||||
// chmod the cli
|
||||
await $`chmod +x ./dist/cli.js`;
|
||||
|
||||
@@ -1,59 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
/**
|
||||
* This AI-generated script copies workspace plugin dist folders to the dist/plugins directory
|
||||
* to ensure they're available for dynamic imports in production.
|
||||
*/
|
||||
import { execSync } from 'child_process';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
|
||||
// Get the package.json to find workspace dependencies
|
||||
const packageJsonPath = path.resolve(__dirname, '../package.json');
|
||||
const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, 'utf8'));
|
||||
|
||||
// Create the plugins directory if it doesn't exist
|
||||
const pluginsDir = path.resolve(__dirname, '../dist/plugins');
|
||||
if (!fs.existsSync(pluginsDir)) {
|
||||
fs.mkdirSync(pluginsDir, { recursive: true });
|
||||
}
|
||||
|
||||
// Find all workspace plugins
|
||||
const pluginPrefix = 'unraid-api-plugin-';
|
||||
const workspacePlugins = Object.keys(packageJson.peerDependencies || {}).filter((pkgName) =>
|
||||
pkgName.startsWith(pluginPrefix)
|
||||
);
|
||||
|
||||
// Copy each plugin's dist folder to the plugins directory
|
||||
for (const pkgName of workspacePlugins) {
|
||||
const pluginPath = path.resolve(__dirname, `../../packages/${pkgName}`);
|
||||
const pluginDistPath = path.resolve(pluginPath, 'dist');
|
||||
const targetPath = path.resolve(pluginsDir, pkgName);
|
||||
|
||||
console.log(`Building ${pkgName}...`);
|
||||
try {
|
||||
execSync('pnpm build', {
|
||||
cwd: pluginPath,
|
||||
stdio: 'inherit',
|
||||
});
|
||||
console.log(`Successfully built ${pkgName}`);
|
||||
} catch (error) {
|
||||
console.error(`Failed to build ${pkgName}:`, error.message);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (!fs.existsSync(pluginDistPath)) {
|
||||
console.warn(`Plugin ${pkgName} dist folder not found at ${pluginDistPath}`);
|
||||
process.exit(1);
|
||||
}
|
||||
console.log(`Copying ${pkgName} dist folder to ${targetPath}`);
|
||||
fs.mkdirSync(targetPath, { recursive: true });
|
||||
fs.cpSync(pluginDistPath, targetPath, { recursive: true });
|
||||
console.log(`Successfully copied ${pkgName} dist folder`);
|
||||
}
|
||||
|
||||
console.log('Plugin dist folders copied successfully');
|
||||
@@ -1,45 +0,0 @@
|
||||
import { getAllowedOrigins } from '@app/common/allowed-origins.js';
|
||||
import { store } from '@app/store/index.js';
|
||||
import { loadConfigFile } from '@app/store/modules/config.js';
|
||||
import { loadStateFiles } from '@app/store/modules/emhttp.js';
|
||||
|
||||
import 'reflect-metadata';
|
||||
|
||||
import { expect, test } from 'vitest';
|
||||
|
||||
test('Returns allowed origins', async () => {
|
||||
// Load state files into store
|
||||
await store.dispatch(loadStateFiles()).unwrap();
|
||||
await store.dispatch(loadConfigFile()).unwrap();
|
||||
|
||||
// Get allowed origins
|
||||
const allowedOrigins = getAllowedOrigins();
|
||||
|
||||
// Test that the result is an array
|
||||
expect(Array.isArray(allowedOrigins)).toBe(true);
|
||||
|
||||
// Test that it contains the expected socket paths
|
||||
expect(allowedOrigins).toContain('/var/run/unraid-notifications.sock');
|
||||
expect(allowedOrigins).toContain('/var/run/unraid-php.sock');
|
||||
expect(allowedOrigins).toContain('/var/run/unraid-cli.sock');
|
||||
|
||||
// Test that it contains the expected local URLs
|
||||
expect(allowedOrigins).toContain('http://localhost:8080');
|
||||
expect(allowedOrigins).toContain('https://localhost:4443');
|
||||
|
||||
// Test that it contains the expected connect URLs
|
||||
expect(allowedOrigins).toContain('https://connect.myunraid.net');
|
||||
expect(allowedOrigins).toContain('https://connect-staging.myunraid.net');
|
||||
expect(allowedOrigins).toContain('https://dev-my.myunraid.net:4000');
|
||||
|
||||
// Test that it contains the extra origins from config
|
||||
expect(allowedOrigins).toContain('https://google.com');
|
||||
expect(allowedOrigins).toContain('https://test.com');
|
||||
|
||||
// Test that it contains some of the remote URLs
|
||||
expect(allowedOrigins).toContain('https://tower.local:4443');
|
||||
expect(allowedOrigins).toContain('https://192.168.1.150:4443');
|
||||
|
||||
// Test that there are no duplicates
|
||||
expect(allowedOrigins.length).toBe(new Set(allowedOrigins).size);
|
||||
});
|
||||
@@ -1,158 +0,0 @@
|
||||
import 'reflect-metadata';
|
||||
|
||||
import { cloneDeep } from 'lodash-es';
|
||||
import { expect, test } from 'vitest';
|
||||
|
||||
import { getWriteableConfig } from '@app/core/utils/files/config-file-normalizer.js';
|
||||
import { initialState } from '@app/store/modules/config.js';
|
||||
|
||||
test('it creates a FLASH config with NO OPTIONAL values', () => {
|
||||
const basicConfig = initialState;
|
||||
const config = getWriteableConfig(basicConfig, 'flash');
|
||||
expect(config).toMatchInlineSnapshot(`
|
||||
{
|
||||
"api": {
|
||||
"extraOrigins": "",
|
||||
"version": "",
|
||||
},
|
||||
"local": {
|
||||
"sandbox": "no",
|
||||
},
|
||||
"remote": {
|
||||
"accesstoken": "",
|
||||
"apikey": "",
|
||||
"avatar": "",
|
||||
"dynamicRemoteAccessType": "DISABLED",
|
||||
"email": "",
|
||||
"idtoken": "",
|
||||
"localApiKey": "",
|
||||
"refreshtoken": "",
|
||||
"regWizTime": "",
|
||||
"ssoSubIds": "",
|
||||
"upnpEnabled": "",
|
||||
"username": "",
|
||||
"wanaccess": "",
|
||||
"wanport": "",
|
||||
},
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
test('it creates a MEMORY config with NO OPTIONAL values', () => {
|
||||
const basicConfig = initialState;
|
||||
const config = getWriteableConfig(basicConfig, 'memory');
|
||||
expect(config).toMatchInlineSnapshot(`
|
||||
{
|
||||
"api": {
|
||||
"extraOrigins": "",
|
||||
"version": "",
|
||||
},
|
||||
"connectionStatus": {
|
||||
"minigraph": "PRE_INIT",
|
||||
"upnpStatus": "",
|
||||
},
|
||||
"local": {
|
||||
"sandbox": "no",
|
||||
},
|
||||
"remote": {
|
||||
"accesstoken": "",
|
||||
"allowedOrigins": "/var/run/unraid-notifications.sock, /var/run/unraid-php.sock, /var/run/unraid-cli.sock, https://connect.myunraid.net, https://connect-staging.myunraid.net, https://dev-my.myunraid.net:4000",
|
||||
"apikey": "",
|
||||
"avatar": "",
|
||||
"dynamicRemoteAccessType": "DISABLED",
|
||||
"email": "",
|
||||
"idtoken": "",
|
||||
"localApiKey": "",
|
||||
"refreshtoken": "",
|
||||
"regWizTime": "",
|
||||
"ssoSubIds": "",
|
||||
"upnpEnabled": "",
|
||||
"username": "",
|
||||
"wanaccess": "",
|
||||
"wanport": "",
|
||||
},
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
test('it creates a FLASH config with OPTIONAL values', () => {
|
||||
const basicConfig = cloneDeep(initialState);
|
||||
// 2fa & t2fa should be ignored
|
||||
basicConfig.remote['2Fa'] = 'yes';
|
||||
basicConfig.local['2Fa'] = 'yes';
|
||||
|
||||
basicConfig.api.extraOrigins = 'myextra.origins';
|
||||
basicConfig.remote.upnpEnabled = 'yes';
|
||||
basicConfig.connectionStatus.upnpStatus = 'Turned On';
|
||||
const config = getWriteableConfig(basicConfig, 'flash');
|
||||
expect(config).toMatchInlineSnapshot(`
|
||||
{
|
||||
"api": {
|
||||
"extraOrigins": "myextra.origins",
|
||||
"version": "",
|
||||
},
|
||||
"local": {
|
||||
"sandbox": "no",
|
||||
},
|
||||
"remote": {
|
||||
"accesstoken": "",
|
||||
"apikey": "",
|
||||
"avatar": "",
|
||||
"dynamicRemoteAccessType": "DISABLED",
|
||||
"email": "",
|
||||
"idtoken": "",
|
||||
"localApiKey": "",
|
||||
"refreshtoken": "",
|
||||
"regWizTime": "",
|
||||
"ssoSubIds": "",
|
||||
"upnpEnabled": "yes",
|
||||
"username": "",
|
||||
"wanaccess": "",
|
||||
"wanport": "",
|
||||
},
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
test('it creates a MEMORY config with OPTIONAL values', () => {
|
||||
const basicConfig = cloneDeep(initialState);
|
||||
// 2fa & t2fa should be ignored
|
||||
basicConfig.remote['2Fa'] = 'yes';
|
||||
basicConfig.local['2Fa'] = 'yes';
|
||||
basicConfig.api.extraOrigins = 'myextra.origins';
|
||||
basicConfig.remote.upnpEnabled = 'yes';
|
||||
basicConfig.connectionStatus.upnpStatus = 'Turned On';
|
||||
const config = getWriteableConfig(basicConfig, 'memory');
|
||||
expect(config).toMatchInlineSnapshot(`
|
||||
{
|
||||
"api": {
|
||||
"extraOrigins": "myextra.origins",
|
||||
"version": "",
|
||||
},
|
||||
"connectionStatus": {
|
||||
"minigraph": "PRE_INIT",
|
||||
"upnpStatus": "Turned On",
|
||||
},
|
||||
"local": {
|
||||
"sandbox": "no",
|
||||
},
|
||||
"remote": {
|
||||
"accesstoken": "",
|
||||
"allowedOrigins": "/var/run/unraid-notifications.sock, /var/run/unraid-php.sock, /var/run/unraid-cli.sock, https://connect.myunraid.net, https://connect-staging.myunraid.net, https://dev-my.myunraid.net:4000",
|
||||
"apikey": "",
|
||||
"avatar": "",
|
||||
"dynamicRemoteAccessType": "DISABLED",
|
||||
"email": "",
|
||||
"idtoken": "",
|
||||
"localApiKey": "",
|
||||
"refreshtoken": "",
|
||||
"regWizTime": "",
|
||||
"ssoSubIds": "",
|
||||
"upnpEnabled": "yes",
|
||||
"username": "",
|
||||
"wanaccess": "",
|
||||
"wanport": "",
|
||||
},
|
||||
}
|
||||
`);
|
||||
});
|
||||
5
api/src/__test__/core/utils/pm2/dummy-process.js
Normal file
5
api/src/__test__/core/utils/pm2/dummy-process.js
Normal file
@@ -0,0 +1,5 @@
|
||||
/* eslint-disable no-undef */
|
||||
// Dummy process for PM2 testing
|
||||
setInterval(() => {
|
||||
// Keep process alive
|
||||
}, 1000);
|
||||
@@ -0,0 +1,216 @@
|
||||
import { existsSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
|
||||
import { execa } from 'execa';
|
||||
import pm2 from 'pm2';
|
||||
import { afterAll, afterEach, beforeAll, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { isUnraidApiRunning } from '@app/core/utils/pm2/unraid-api-running.js';
|
||||
|
||||
const __dirname = fileURLToPath(new URL('.', import.meta.url));
|
||||
const PROJECT_ROOT = join(__dirname, '../../../../..');
|
||||
const DUMMY_PROCESS_PATH = join(__dirname, 'dummy-process.js');
|
||||
const CLI_PATH = join(PROJECT_ROOT, 'dist/cli.js');
|
||||
const TEST_PROCESS_NAME = 'test-unraid-api';
|
||||
|
||||
// Shared PM2 connection state
|
||||
let pm2Connected = false;
|
||||
|
||||
// Helper function to run CLI command (assumes CLI is built)
|
||||
async function runCliCommand(command: string, options: any = {}) {
|
||||
return await execa('node', [CLI_PATH, command], options);
|
||||
}
|
||||
|
||||
// Helper to ensure PM2 connection is established
|
||||
async function ensurePM2Connection() {
|
||||
if (pm2Connected) return;
|
||||
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
pm2.connect((err) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
pm2Connected = true;
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Helper to delete specific test processes (lightweight, reuses connection)
|
||||
async function deleteTestProcesses() {
|
||||
if (!pm2Connected) {
|
||||
// No connection, nothing to clean up
|
||||
return;
|
||||
}
|
||||
|
||||
const deletePromise = new Promise<void>((resolve) => {
|
||||
// Delete specific processes we might have created
|
||||
const processNames = ['unraid-api', TEST_PROCESS_NAME];
|
||||
let deletedCount = 0;
|
||||
|
||||
const deleteNext = () => {
|
||||
if (deletedCount >= processNames.length) {
|
||||
resolve();
|
||||
return;
|
||||
}
|
||||
|
||||
const processName = processNames[deletedCount];
|
||||
pm2.delete(processName, (deleteErr) => {
|
||||
// Ignore errors, process might not exist
|
||||
deletedCount++;
|
||||
deleteNext();
|
||||
});
|
||||
};
|
||||
|
||||
deleteNext();
|
||||
});
|
||||
|
||||
const timeoutPromise = new Promise<void>((resolve) => {
|
||||
setTimeout(() => resolve(), 3000); // 3 second timeout
|
||||
});
|
||||
|
||||
return Promise.race([deletePromise, timeoutPromise]);
|
||||
}
|
||||
|
||||
// Helper to ensure PM2 is completely clean (heavy cleanup with daemon kill)
|
||||
async function cleanupAllPM2Processes() {
|
||||
// First delete test processes if we have a connection
|
||||
if (pm2Connected) {
|
||||
await deleteTestProcesses();
|
||||
}
|
||||
|
||||
return new Promise<void>((resolve) => {
|
||||
// Always connect fresh for daemon kill (in case we weren't connected)
|
||||
pm2.connect((err) => {
|
||||
if (err) {
|
||||
// If we can't connect, assume PM2 is not running
|
||||
pm2Connected = false;
|
||||
resolve();
|
||||
return;
|
||||
}
|
||||
|
||||
// Kill the daemon to ensure fresh state
|
||||
pm2.killDaemon((killErr) => {
|
||||
pm2.disconnect();
|
||||
pm2Connected = false;
|
||||
// Small delay to let PM2 fully shutdown
|
||||
setTimeout(resolve, 500);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
describe.skipIf(!!process.env.CI)('PM2 integration tests', () => {
|
||||
beforeAll(async () => {
|
||||
// Build the CLI if it doesn't exist (only for CLI tests)
|
||||
if (!existsSync(CLI_PATH)) {
|
||||
console.log('Building CLI for integration tests...');
|
||||
try {
|
||||
await execa('pnpm', ['build'], {
|
||||
cwd: PROJECT_ROOT,
|
||||
stdio: 'inherit',
|
||||
timeout: 120000, // 2 minute timeout for build
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Failed to build CLI:', error);
|
||||
throw new Error(
|
||||
'Cannot run CLI integration tests without built CLI. Run `pnpm build` first.'
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Only do a full cleanup once at the beginning
|
||||
await cleanupAllPM2Processes();
|
||||
}, 150000); // 2.5 minute timeout for setup
|
||||
|
||||
afterAll(async () => {
|
||||
// Only do a full cleanup once at the end
|
||||
await cleanupAllPM2Processes();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
// Lightweight cleanup after each test - just delete our test processes
|
||||
await deleteTestProcesses();
|
||||
}, 5000); // 5 second timeout for cleanup
|
||||
|
||||
describe('isUnraidApiRunning function', () => {
|
||||
it('should return false when PM2 is not running the unraid-api process', async () => {
|
||||
const result = await isUnraidApiRunning();
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('should return true when PM2 has unraid-api process running', async () => {
|
||||
// Ensure PM2 connection
|
||||
await ensurePM2Connection();
|
||||
|
||||
// Start a dummy process with the name 'unraid-api'
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
pm2.start(
|
||||
{
|
||||
script: DUMMY_PROCESS_PATH,
|
||||
name: 'unraid-api',
|
||||
},
|
||||
(startErr) => {
|
||||
if (startErr) return reject(startErr);
|
||||
resolve();
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
// Give PM2 time to start the process
|
||||
await new Promise((resolve) => setTimeout(resolve, 2000));
|
||||
|
||||
const result = await isUnraidApiRunning();
|
||||
expect(result).toBe(true);
|
||||
}, 30000);
|
||||
|
||||
it('should return false when unraid-api process is stopped', async () => {
|
||||
// Ensure PM2 connection
|
||||
await ensurePM2Connection();
|
||||
|
||||
// Start and then stop the process
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
pm2.start(
|
||||
{
|
||||
script: DUMMY_PROCESS_PATH,
|
||||
name: 'unraid-api',
|
||||
},
|
||||
(startErr) => {
|
||||
if (startErr) return reject(startErr);
|
||||
|
||||
// Stop the process after starting
|
||||
setTimeout(() => {
|
||||
pm2.stop('unraid-api', (stopErr) => {
|
||||
if (stopErr) return reject(stopErr);
|
||||
resolve();
|
||||
});
|
||||
}, 1000);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 1000));
|
||||
|
||||
const result = await isUnraidApiRunning();
|
||||
expect(result).toBe(false);
|
||||
}, 30000);
|
||||
|
||||
it('should handle PM2 connection errors gracefully', async () => {
|
||||
// Set an invalid PM2_HOME to force connection failure
|
||||
const originalPM2Home = process.env.PM2_HOME;
|
||||
process.env.PM2_HOME = '/invalid/path/that/does/not/exist';
|
||||
|
||||
const result = await isUnraidApiRunning();
|
||||
expect(result).toBe(false);
|
||||
|
||||
// Restore original PM2_HOME
|
||||
if (originalPM2Home) {
|
||||
process.env.PM2_HOME = originalPM2Home;
|
||||
} else {
|
||||
delete process.env.PM2_HOME;
|
||||
}
|
||||
}, 15000); // 15 second timeout to allow for the Promise.race timeout
|
||||
});
|
||||
});
|
||||
@@ -1,29 +0,0 @@
|
||||
import 'reflect-metadata';
|
||||
|
||||
import { readFileSync } from 'fs';
|
||||
import { join } from 'path';
|
||||
|
||||
import { expect, test } from 'vitest';
|
||||
|
||||
import { checkMothershipAuthentication } from '@app/graphql/resolvers/query/cloud/check-mothership-authentication.js';
|
||||
|
||||
test('It fails to authenticate with mothership with no credentials', async () => {
|
||||
try {
|
||||
const packageJson = JSON.parse(readFileSync(join(process.cwd(), 'package.json'), 'utf-8'));
|
||||
await expect(
|
||||
checkMothershipAuthentication('BAD', 'BAD')
|
||||
).rejects.toThrowErrorMatchingInlineSnapshot(
|
||||
`[Error: Failed to connect to https://mothership.unraid.net/ws with a "426" HTTP error.]`
|
||||
);
|
||||
expect(packageJson.version).not.toBeNull();
|
||||
await expect(
|
||||
checkMothershipAuthentication(packageJson.version, 'BAD_API_KEY')
|
||||
).rejects.toThrowErrorMatchingInlineSnapshot(`[Error: Invalid credentials]`);
|
||||
} catch (error) {
|
||||
if (error instanceof Error && error.message.includes('Timeout')) {
|
||||
// Test succeeds on timeout
|
||||
return;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
374
api/src/__test__/graphql/resolvers/rclone-api.service.test.ts
Normal file
374
api/src/__test__/graphql/resolvers/rclone-api.service.test.ts
Normal file
@@ -0,0 +1,374 @@
|
||||
import { HTTPError } from 'got';
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { RCloneApiService } from '@app/unraid-api/graph/resolvers/rclone/rclone-api.service.js';
|
||||
import {
|
||||
CreateRCloneRemoteDto,
|
||||
DeleteRCloneRemoteDto,
|
||||
GetRCloneJobStatusDto,
|
||||
GetRCloneRemoteConfigDto,
|
||||
GetRCloneRemoteDetailsDto,
|
||||
RCloneStartBackupInput,
|
||||
UpdateRCloneRemoteDto,
|
||||
} from '@app/unraid-api/graph/resolvers/rclone/rclone.model.js';
|
||||
|
||||
vi.mock('got');
|
||||
vi.mock('execa');
|
||||
vi.mock('p-retry');
|
||||
vi.mock('node:fs', () => ({
|
||||
existsSync: vi.fn(),
|
||||
}));
|
||||
vi.mock('node:fs/promises', () => ({
|
||||
mkdir: vi.fn(),
|
||||
rm: vi.fn(),
|
||||
writeFile: vi.fn(),
|
||||
}));
|
||||
vi.mock('@app/core/log.js', () => ({
|
||||
sanitizeParams: vi.fn((params) => params),
|
||||
}));
|
||||
vi.mock('@app/store/index.js', () => ({
|
||||
getters: {
|
||||
paths: () => ({
|
||||
'rclone-socket': '/tmp/rclone.sock',
|
||||
'log-base': '/var/log',
|
||||
}),
|
||||
},
|
||||
}));
|
||||
|
||||
// Mock NestJS Logger to suppress logs during tests
|
||||
vi.mock('@nestjs/common', async (importOriginal) => {
|
||||
const original = await importOriginal<typeof import('@nestjs/common')>();
|
||||
return {
|
||||
...original,
|
||||
Logger: vi.fn(() => ({
|
||||
log: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
})),
|
||||
};
|
||||
});
|
||||
|
||||
describe('RCloneApiService', () => {
|
||||
let service: RCloneApiService;
|
||||
let mockGot: any;
|
||||
let mockExeca: any;
|
||||
let mockPRetry: any;
|
||||
let mockExistsSync: any;
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.clearAllMocks();
|
||||
|
||||
const { default: got } = await import('got');
|
||||
const { execa } = await import('execa');
|
||||
const pRetry = await import('p-retry');
|
||||
const { existsSync } = await import('node:fs');
|
||||
|
||||
mockGot = vi.mocked(got);
|
||||
mockExeca = vi.mocked(execa);
|
||||
mockPRetry = vi.mocked(pRetry.default);
|
||||
mockExistsSync = vi.mocked(existsSync);
|
||||
|
||||
mockGot.post = vi.fn().mockResolvedValue({ body: {} });
|
||||
mockExeca.mockReturnValue({
|
||||
on: vi.fn(),
|
||||
kill: vi.fn(),
|
||||
killed: false,
|
||||
pid: 12345,
|
||||
} as any);
|
||||
mockPRetry.mockResolvedValue(undefined);
|
||||
mockExistsSync.mockReturnValue(false);
|
||||
|
||||
service = new RCloneApiService();
|
||||
await service.onModuleInit();
|
||||
});
|
||||
|
||||
describe('getProviders', () => {
|
||||
it('should return list of providers', async () => {
|
||||
const mockProviders = [
|
||||
{ name: 'aws', prefix: 's3', description: 'Amazon S3' },
|
||||
{ name: 'google', prefix: 'drive', description: 'Google Drive' },
|
||||
];
|
||||
mockGot.post.mockResolvedValue({
|
||||
body: { providers: mockProviders },
|
||||
});
|
||||
|
||||
const result = await service.getProviders();
|
||||
|
||||
expect(result).toEqual(mockProviders);
|
||||
expect(mockGot.post).toHaveBeenCalledWith(
|
||||
'http://unix:/tmp/rclone.sock:/config/providers',
|
||||
expect.objectContaining({
|
||||
json: {},
|
||||
responseType: 'json',
|
||||
enableUnixSockets: true,
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it('should return empty array when no providers', async () => {
|
||||
mockGot.post.mockResolvedValue({ body: {} });
|
||||
|
||||
const result = await service.getProviders();
|
||||
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('listRemotes', () => {
|
||||
it('should return list of remotes', async () => {
|
||||
const mockRemotes = ['backup-s3', 'drive-storage'];
|
||||
mockGot.post.mockResolvedValue({
|
||||
body: { remotes: mockRemotes },
|
||||
});
|
||||
|
||||
const result = await service.listRemotes();
|
||||
|
||||
expect(result).toEqual(mockRemotes);
|
||||
expect(mockGot.post).toHaveBeenCalledWith(
|
||||
'http://unix:/tmp/rclone.sock:/config/listremotes',
|
||||
expect.objectContaining({
|
||||
json: {},
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it('should return empty array when no remotes', async () => {
|
||||
mockGot.post.mockResolvedValue({ body: {} });
|
||||
|
||||
const result = await service.listRemotes();
|
||||
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getRemoteDetails', () => {
|
||||
it('should return remote details', async () => {
|
||||
const input: GetRCloneRemoteDetailsDto = { name: 'test-remote' };
|
||||
const mockConfig = { type: 's3', provider: 'AWS' };
|
||||
mockGot.post.mockResolvedValue({ body: mockConfig });
|
||||
|
||||
const result = await service.getRemoteDetails(input);
|
||||
|
||||
expect(result).toEqual(mockConfig);
|
||||
expect(mockGot.post).toHaveBeenCalledWith(
|
||||
'http://unix:/tmp/rclone.sock:/config/get',
|
||||
expect.objectContaining({
|
||||
json: { name: 'test-remote' },
|
||||
})
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getRemoteConfig', () => {
|
||||
it('should return remote configuration', async () => {
|
||||
const input: GetRCloneRemoteConfigDto = { name: 'test-remote' };
|
||||
const mockConfig = { type: 's3', access_key_id: 'AKIA...' };
|
||||
mockGot.post.mockResolvedValue({ body: mockConfig });
|
||||
|
||||
const result = await service.getRemoteConfig(input);
|
||||
|
||||
expect(result).toEqual(mockConfig);
|
||||
});
|
||||
});
|
||||
|
||||
describe('createRemote', () => {
|
||||
it('should create a new remote', async () => {
|
||||
const input: CreateRCloneRemoteDto = {
|
||||
name: 'new-remote',
|
||||
type: 's3',
|
||||
parameters: { access_key_id: 'AKIA...', secret_access_key: 'secret' },
|
||||
};
|
||||
const mockResponse = { success: true };
|
||||
mockGot.post.mockResolvedValue({ body: mockResponse });
|
||||
|
||||
const result = await service.createRemote(input);
|
||||
|
||||
expect(result).toEqual(mockResponse);
|
||||
expect(mockGot.post).toHaveBeenCalledWith(
|
||||
'http://unix:/tmp/rclone.sock:/config/create',
|
||||
expect.objectContaining({
|
||||
json: {
|
||||
name: 'new-remote',
|
||||
type: 's3',
|
||||
parameters: { access_key_id: 'AKIA...', secret_access_key: 'secret' },
|
||||
},
|
||||
})
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('updateRemote', () => {
|
||||
it('should update an existing remote', async () => {
|
||||
const input: UpdateRCloneRemoteDto = {
|
||||
name: 'existing-remote',
|
||||
parameters: { access_key_id: 'NEW_AKIA...' },
|
||||
};
|
||||
const mockResponse = { success: true };
|
||||
mockGot.post.mockResolvedValue({ body: mockResponse });
|
||||
|
||||
const result = await service.updateRemote(input);
|
||||
|
||||
expect(result).toEqual(mockResponse);
|
||||
expect(mockGot.post).toHaveBeenCalledWith(
|
||||
'http://unix:/tmp/rclone.sock:/config/update',
|
||||
expect.objectContaining({
|
||||
json: {
|
||||
name: 'existing-remote',
|
||||
access_key_id: 'NEW_AKIA...',
|
||||
},
|
||||
})
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteRemote', () => {
|
||||
it('should delete a remote', async () => {
|
||||
const input: DeleteRCloneRemoteDto = { name: 'remote-to-delete' };
|
||||
const mockResponse = { success: true };
|
||||
mockGot.post.mockResolvedValue({ body: mockResponse });
|
||||
|
||||
const result = await service.deleteRemote(input);
|
||||
|
||||
expect(result).toEqual(mockResponse);
|
||||
expect(mockGot.post).toHaveBeenCalledWith(
|
||||
'http://unix:/tmp/rclone.sock:/config/delete',
|
||||
expect.objectContaining({
|
||||
json: { name: 'remote-to-delete' },
|
||||
})
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('startBackup', () => {
|
||||
it('should start a backup operation', async () => {
|
||||
const input: RCloneStartBackupInput = {
|
||||
srcPath: '/source/path',
|
||||
dstPath: 'remote:backup/path',
|
||||
options: { delete_on: 'dst' },
|
||||
};
|
||||
const mockResponse = { jobid: 'job-123' };
|
||||
mockGot.post.mockResolvedValue({ body: mockResponse });
|
||||
|
||||
const result = await service.startBackup(input);
|
||||
|
||||
expect(result).toEqual(mockResponse);
|
||||
expect(mockGot.post).toHaveBeenCalledWith(
|
||||
'http://unix:/tmp/rclone.sock:/sync/copy',
|
||||
expect.objectContaining({
|
||||
json: {
|
||||
srcFs: '/source/path',
|
||||
dstFs: 'remote:backup/path',
|
||||
delete_on: 'dst',
|
||||
},
|
||||
})
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getJobStatus', () => {
|
||||
it('should return job status', async () => {
|
||||
const input: GetRCloneJobStatusDto = { jobId: 'job-123' };
|
||||
const mockStatus = { status: 'running', progress: 0.5 };
|
||||
mockGot.post.mockResolvedValue({ body: mockStatus });
|
||||
|
||||
const result = await service.getJobStatus(input);
|
||||
|
||||
expect(result).toEqual(mockStatus);
|
||||
expect(mockGot.post).toHaveBeenCalledWith(
|
||||
'http://unix:/tmp/rclone.sock:/job/status',
|
||||
expect.objectContaining({
|
||||
json: { jobid: 'job-123' },
|
||||
})
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('listRunningJobs', () => {
|
||||
it('should return list of running jobs', async () => {
|
||||
const mockJobs = [
|
||||
{ id: 'job-1', status: 'running' },
|
||||
{ id: 'job-2', status: 'finished' },
|
||||
];
|
||||
mockGot.post.mockResolvedValue({ body: mockJobs });
|
||||
|
||||
const result = await service.listRunningJobs();
|
||||
|
||||
expect(result).toEqual(mockJobs);
|
||||
expect(mockGot.post).toHaveBeenCalledWith(
|
||||
'http://unix:/tmp/rclone.sock:/job/list',
|
||||
expect.objectContaining({
|
||||
json: {},
|
||||
})
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('error handling', () => {
|
||||
it('should handle HTTP errors with detailed messages', async () => {
|
||||
const httpError = {
|
||||
name: 'HTTPError',
|
||||
message: 'Request failed',
|
||||
response: {
|
||||
statusCode: 500,
|
||||
body: JSON.stringify({ error: 'Internal server error' }),
|
||||
},
|
||||
};
|
||||
Object.setPrototypeOf(httpError, HTTPError.prototype);
|
||||
mockGot.post.mockRejectedValue(httpError);
|
||||
|
||||
await expect(service.getProviders()).rejects.toThrow(
|
||||
'Rclone API Error (config/providers, HTTP 500): Rclone Error: Internal server error'
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle HTTP errors with empty response body', async () => {
|
||||
const httpError = {
|
||||
name: 'HTTPError',
|
||||
message: 'Request failed',
|
||||
response: {
|
||||
statusCode: 404,
|
||||
body: '',
|
||||
},
|
||||
};
|
||||
Object.setPrototypeOf(httpError, HTTPError.prototype);
|
||||
mockGot.post.mockRejectedValue(httpError);
|
||||
|
||||
await expect(service.getProviders()).rejects.toThrow(
|
||||
'Rclone API Error (config/providers, HTTP 404): Failed to process error response body. Raw body:'
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle HTTP errors with malformed JSON', async () => {
|
||||
const httpError = {
|
||||
name: 'HTTPError',
|
||||
message: 'Request failed',
|
||||
response: {
|
||||
statusCode: 400,
|
||||
body: 'invalid json',
|
||||
},
|
||||
};
|
||||
Object.setPrototypeOf(httpError, HTTPError.prototype);
|
||||
mockGot.post.mockRejectedValue(httpError);
|
||||
|
||||
await expect(service.getProviders()).rejects.toThrow(
|
||||
'Rclone API Error (config/providers, HTTP 400): Failed to process error response body. Raw body: invalid json'
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle non-HTTP errors', async () => {
|
||||
const networkError = new Error('Network connection failed');
|
||||
mockGot.post.mockRejectedValue(networkError);
|
||||
|
||||
await expect(service.getProviders()).rejects.toThrow('Network connection failed');
|
||||
});
|
||||
|
||||
it('should handle unknown errors', async () => {
|
||||
mockGot.post.mockRejectedValue('unknown error');
|
||||
|
||||
await expect(service.getProviders()).rejects.toThrow(
|
||||
'Unknown error calling RClone API (config/providers) with params {}: unknown error'
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,227 +0,0 @@
|
||||
import { expect, test, vi } from 'vitest';
|
||||
|
||||
import type { NginxUrlFields } from '@app/graphql/resolvers/subscription/network.js';
|
||||
import { type Nginx } from '@app/core/types/states/nginx.js';
|
||||
import {
|
||||
getServerIps,
|
||||
getUrlForField,
|
||||
getUrlForServer,
|
||||
} from '@app/graphql/resolvers/subscription/network.js';
|
||||
import { store } from '@app/store/index.js';
|
||||
import { loadConfigFile } from '@app/store/modules/config.js';
|
||||
import { loadStateFiles } from '@app/store/modules/emhttp.js';
|
||||
import { URL_TYPE } from '@app/unraid-api/graph/resolvers/connect/connect.model.js';
|
||||
|
||||
test.each([
|
||||
[{ httpPort: 80, httpsPort: 443, url: 'my-default-url.com' }],
|
||||
[{ httpPort: 123, httpsPort: 443, url: 'my-default-url.com' }],
|
||||
[{ httpPort: 80, httpsPort: 12_345, url: 'my-default-url.com' }],
|
||||
[{ httpPort: 212, httpsPort: 3_233, url: 'my-default-url.com' }],
|
||||
[{ httpPort: 80, httpsPort: 443, url: 'https://BROKEN_URL' }],
|
||||
])('getUrlForField', ({ httpPort, httpsPort, url }) => {
|
||||
const responseInsecure = getUrlForField({
|
||||
port: httpPort,
|
||||
url,
|
||||
});
|
||||
|
||||
const responseSecure = getUrlForField({
|
||||
portSsl: httpsPort,
|
||||
url,
|
||||
});
|
||||
if (httpPort === 80) {
|
||||
expect(responseInsecure.port).toBe('');
|
||||
} else {
|
||||
expect(responseInsecure.port).toBe(httpPort.toString());
|
||||
}
|
||||
|
||||
if (httpsPort === 443) {
|
||||
expect(responseSecure.port).toBe('');
|
||||
} else {
|
||||
expect(responseSecure.port).toBe(httpsPort.toString());
|
||||
}
|
||||
});
|
||||
|
||||
test('getUrlForServer - field exists, ssl disabled', () => {
|
||||
const result = getUrlForServer({
|
||||
nginx: {
|
||||
lanIp: '192.168.1.1',
|
||||
sslEnabled: false,
|
||||
httpPort: 123,
|
||||
httpsPort: 445,
|
||||
} as const as Nginx,
|
||||
field: 'lanIp',
|
||||
});
|
||||
expect(result).toMatchInlineSnapshot('"http://192.168.1.1:123/"');
|
||||
});
|
||||
|
||||
test('getUrlForServer - field exists, ssl yes', () => {
|
||||
const result = getUrlForServer({
|
||||
nginx: {
|
||||
lanIp: '192.168.1.1',
|
||||
sslEnabled: true,
|
||||
sslMode: 'yes',
|
||||
httpPort: 123,
|
||||
httpsPort: 445,
|
||||
} as const as Nginx,
|
||||
field: 'lanIp',
|
||||
});
|
||||
expect(result).toMatchInlineSnapshot('"https://192.168.1.1:445/"');
|
||||
});
|
||||
|
||||
test('getUrlForServer - field exists, ssl yes, port empty', () => {
|
||||
const result = getUrlForServer({
|
||||
nginx: {
|
||||
lanIp: '192.168.1.1',
|
||||
sslEnabled: true,
|
||||
sslMode: 'yes',
|
||||
httpPort: 80,
|
||||
httpsPort: 443,
|
||||
} as const as Nginx,
|
||||
field: 'lanIp',
|
||||
});
|
||||
expect(result).toMatchInlineSnapshot('"https://192.168.1.1/"');
|
||||
});
|
||||
|
||||
test('getUrlForServer - field exists, ssl auto', async () => {
|
||||
const getResult = async () =>
|
||||
getUrlForServer({
|
||||
nginx: {
|
||||
lanIp: '192.168.1.1',
|
||||
sslEnabled: true,
|
||||
sslMode: 'auto',
|
||||
httpPort: 123,
|
||||
httpsPort: 445,
|
||||
} as const as Nginx,
|
||||
field: 'lanIp',
|
||||
});
|
||||
await expect(getResult).rejects.toThrowErrorMatchingInlineSnapshot(
|
||||
`[Error: Cannot get IP Based URL for field: "lanIp" SSL mode auto]`
|
||||
);
|
||||
});
|
||||
|
||||
test('getUrlForServer - field does not exist, ssl disabled', async () => {
|
||||
const getResult = async () =>
|
||||
getUrlForServer({
|
||||
nginx: { lanIp: '192.168.1.1', sslEnabled: false, sslMode: 'no' } as const as Nginx,
|
||||
ports: {
|
||||
port: ':123',
|
||||
portSsl: ':445',
|
||||
defaultUrl: new URL('https://my-default-url.unraid.net'),
|
||||
},
|
||||
// @ts-expect-error Field doesn't exist
|
||||
field: 'idontexist',
|
||||
});
|
||||
await expect(getResult).rejects.toThrowErrorMatchingInlineSnapshot(
|
||||
`[Error: IP URL Resolver: Could not resolve any access URL for field: "idontexist", is FQDN?: false]`
|
||||
);
|
||||
});
|
||||
|
||||
test('getUrlForServer - FQDN - field exists, port non-empty', () => {
|
||||
const result = getUrlForServer({
|
||||
nginx: { lanFqdn: 'my-fqdn.unraid.net', httpsPort: 445 } as unknown as Nginx,
|
||||
field: 'lanFqdn' as NginxUrlFields,
|
||||
});
|
||||
expect(result).toMatchInlineSnapshot('"https://my-fqdn.unraid.net:445/"');
|
||||
});
|
||||
|
||||
test('getUrlForServer - FQDN - field exists, port empty', () => {
|
||||
const result = getUrlForServer({
|
||||
nginx: { lanFqdn: 'my-fqdn.unraid.net', httpPort: 80, httpsPort: 443 } as unknown as Nginx,
|
||||
field: 'lanFqdn' as NginxUrlFields,
|
||||
});
|
||||
expect(result).toMatchInlineSnapshot('"https://my-fqdn.unraid.net/"');
|
||||
});
|
||||
|
||||
test.each([
|
||||
[
|
||||
{
|
||||
nginx: {
|
||||
lanFqdn: 'my-fqdn.unraid.net',
|
||||
sslEnabled: false,
|
||||
sslMode: 'no',
|
||||
httpPort: 80,
|
||||
httpsPort: 443,
|
||||
} as unknown as Nginx,
|
||||
field: 'lanFqdn' as NginxUrlFields,
|
||||
},
|
||||
],
|
||||
[
|
||||
{
|
||||
nginx: {
|
||||
wanFqdn: 'my-fqdn.unraid.net',
|
||||
sslEnabled: true,
|
||||
sslMode: 'yes',
|
||||
httpPort: 80,
|
||||
httpsPort: 443,
|
||||
} as unknown as Nginx,
|
||||
field: 'wanFqdn' as NginxUrlFields,
|
||||
},
|
||||
],
|
||||
[
|
||||
{
|
||||
nginx: {
|
||||
wanFqdn6: 'my-fqdn.unraid.net',
|
||||
sslEnabled: true,
|
||||
sslMode: 'auto',
|
||||
httpPort: 80,
|
||||
httpsPort: 443,
|
||||
} as unknown as Nginx,
|
||||
field: 'wanFqdn6' as NginxUrlFields,
|
||||
},
|
||||
],
|
||||
])('getUrlForServer - FQDN', ({ nginx, field }) => {
|
||||
const result = getUrlForServer({ nginx, field });
|
||||
expect(result.toString()).toBe('https://my-fqdn.unraid.net/');
|
||||
});
|
||||
|
||||
test('getUrlForServer - field does not exist, ssl disabled', async () => {
|
||||
const getResult = async () =>
|
||||
getUrlForServer({
|
||||
nginx: { lanFqdn: 'my-fqdn.unraid.net' } as unknown as Nginx,
|
||||
ports: { portSsl: '', port: '', defaultUrl: new URL('https://my-default-url.unraid.net') },
|
||||
// @ts-expect-error Field doesn't exist
|
||||
field: 'idontexist',
|
||||
});
|
||||
await expect(getResult).rejects.toThrowErrorMatchingInlineSnapshot(
|
||||
`[Error: IP URL Resolver: Could not resolve any access URL for field: "idontexist", is FQDN?: false]`
|
||||
);
|
||||
});
|
||||
|
||||
test('integration test, loading nginx ini and generating all URLs', async () => {
|
||||
await store.dispatch(loadStateFiles());
|
||||
await store.dispatch(loadConfigFile());
|
||||
|
||||
// Instead of mocking the getServerIps function, we'll use the actual function
|
||||
// and verify the structure of the returned URLs
|
||||
const urls = getServerIps();
|
||||
|
||||
// Verify that we have URLs
|
||||
expect(urls.urls.length).toBeGreaterThan(0);
|
||||
expect(urls.errors.length).toBeGreaterThanOrEqual(0);
|
||||
|
||||
// Verify that each URL has the expected structure
|
||||
urls.urls.forEach((url) => {
|
||||
expect(url).toHaveProperty('ipv4');
|
||||
expect(url).toHaveProperty('name');
|
||||
expect(url).toHaveProperty('type');
|
||||
|
||||
// Verify that the URL matches the expected pattern based on its type
|
||||
if (url.type === URL_TYPE.DEFAULT) {
|
||||
expect(url.ipv4?.toString()).toMatch(/^https:\/\/.*:\d+\/$/);
|
||||
expect(url.ipv6?.toString()).toMatch(/^https:\/\/.*:\d+\/$/);
|
||||
} else if (url.type === URL_TYPE.LAN) {
|
||||
expect(url.ipv4?.toString()).toMatch(/^https:\/\/.*:\d+\/$/);
|
||||
} else if (url.type === URL_TYPE.MDNS) {
|
||||
expect(url.ipv4?.toString()).toMatch(/^https:\/\/.*:\d+\/$/);
|
||||
} else if (url.type === URL_TYPE.WIREGUARD) {
|
||||
expect(url.ipv4?.toString()).toMatch(/^https:\/\/.*:\d+\/$/);
|
||||
}
|
||||
});
|
||||
|
||||
// Verify that the error message contains the expected text
|
||||
if (urls.errors.length > 0) {
|
||||
expect(urls.errors[0].message).toContain(
|
||||
'IP URL Resolver: Could not resolve any access URL for field:'
|
||||
);
|
||||
}
|
||||
});
|
||||
@@ -7,6 +7,7 @@ exports[`Returns paths 1`] = `
|
||||
"unraid-data",
|
||||
"docker-autostart",
|
||||
"docker-socket",
|
||||
"rclone-socket",
|
||||
"parity-checks",
|
||||
"htpasswd",
|
||||
"emhttpd-socket",
|
||||
@@ -16,7 +17,6 @@ exports[`Returns paths 1`] = `
|
||||
"myservers-base",
|
||||
"myservers-config",
|
||||
"myservers-config-states",
|
||||
"myservers-env",
|
||||
"myservers-keepalive",
|
||||
"keyfile-base",
|
||||
"machine-id",
|
||||
|
||||
@@ -1,310 +0,0 @@
|
||||
import { beforeEach, expect, test, vi } from 'vitest';
|
||||
|
||||
import { pubsub, PUBSUB_CHANNEL } from '@app/core/pubsub.js';
|
||||
import { GraphQLClient } from '@app/mothership/graphql-client.js';
|
||||
import { stopPingTimeoutJobs } from '@app/mothership/jobs/ping-timeout-jobs.js';
|
||||
import { setGraphqlConnectionStatus } from '@app/store/actions/set-minigraph-status.js';
|
||||
import { setupRemoteAccessThunk } from '@app/store/actions/setup-remote-access.js';
|
||||
import { store } from '@app/store/index.js';
|
||||
import { MyServersConfigMemory } from '@app/types/my-servers-config.js';
|
||||
import { MinigraphStatus } from '@app/unraid-api/graph/resolvers/cloud/cloud.model.js';
|
||||
import {
|
||||
WAN_ACCESS_TYPE,
|
||||
WAN_FORWARD_TYPE,
|
||||
} from '@app/unraid-api/graph/resolvers/connect/connect.model.js';
|
||||
|
||||
// Mock dependencies
|
||||
vi.mock('@app/core/pubsub.js', () => {
|
||||
const mockPublish = vi.fn();
|
||||
return {
|
||||
pubsub: {
|
||||
publish: mockPublish,
|
||||
},
|
||||
PUBSUB_CHANNEL: {
|
||||
OWNER: 'OWNER',
|
||||
SERVERS: 'SERVERS',
|
||||
},
|
||||
__esModule: true,
|
||||
default: {
|
||||
pubsub: {
|
||||
publish: mockPublish,
|
||||
},
|
||||
PUBSUB_CHANNEL: {
|
||||
OWNER: 'OWNER',
|
||||
SERVERS: 'SERVERS',
|
||||
},
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
// Get the mock function for pubsub.publish
|
||||
const mockPublish = vi.mocked(pubsub.publish);
|
||||
|
||||
// Clear mock before each test
|
||||
beforeEach(() => {
|
||||
mockPublish.mockClear();
|
||||
});
|
||||
|
||||
vi.mock('@app/mothership/graphql-client.js', () => ({
|
||||
GraphQLClient: {
|
||||
clearInstance: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
vi.mock('@app/mothership/jobs/ping-timeout-jobs.js', () => ({
|
||||
stopPingTimeoutJobs: vi.fn(),
|
||||
}));
|
||||
|
||||
const createConfigMatcher = (specificValues: Partial<MyServersConfigMemory> = {}) => {
|
||||
const defaultMatcher = {
|
||||
api: expect.objectContaining({
|
||||
extraOrigins: expect.any(String),
|
||||
version: expect.any(String),
|
||||
}),
|
||||
connectionStatus: expect.objectContaining({
|
||||
minigraph: expect.any(String),
|
||||
upnpStatus: expect.any(String),
|
||||
}),
|
||||
local: expect.objectContaining({
|
||||
sandbox: expect.any(String),
|
||||
}),
|
||||
nodeEnv: expect.any(String),
|
||||
remote: expect.objectContaining({
|
||||
accesstoken: expect.any(String),
|
||||
allowedOrigins: expect.any(String),
|
||||
apikey: expect.any(String),
|
||||
avatar: expect.any(String),
|
||||
dynamicRemoteAccessType: expect.any(String),
|
||||
email: expect.any(String),
|
||||
idtoken: expect.any(String),
|
||||
localApiKey: expect.any(String),
|
||||
refreshtoken: expect.any(String),
|
||||
regWizTime: expect.any(String),
|
||||
ssoSubIds: expect.any(String),
|
||||
upnpEnabled: expect.any(String),
|
||||
username: expect.any(String),
|
||||
wanaccess: expect.any(String),
|
||||
wanport: expect.any(String),
|
||||
}),
|
||||
status: expect.any(String),
|
||||
};
|
||||
|
||||
return expect.objectContaining({
|
||||
...defaultMatcher,
|
||||
...specificValues,
|
||||
});
|
||||
};
|
||||
|
||||
test('Before init returns default values for all fields', async () => {
|
||||
const state = store.getState().config;
|
||||
expect(state).toMatchSnapshot();
|
||||
}, 10_000);
|
||||
|
||||
test('After init returns values from cfg file for all fields', async () => {
|
||||
const { loadConfigFile } = await import('@app/store/modules/config.js');
|
||||
|
||||
// Load cfg into store
|
||||
await store.dispatch(loadConfigFile());
|
||||
|
||||
// Check if store has cfg contents loaded
|
||||
const state = store.getState().config;
|
||||
expect(state).toMatchObject(createConfigMatcher());
|
||||
});
|
||||
|
||||
test('updateUserConfig merges in changes to current state', async () => {
|
||||
const { loadConfigFile, updateUserConfig } = await import('@app/store/modules/config.js');
|
||||
|
||||
// Load cfg into store
|
||||
await store.dispatch(loadConfigFile());
|
||||
|
||||
// Update store
|
||||
store.dispatch(
|
||||
updateUserConfig({
|
||||
remote: { avatar: 'https://via.placeholder.com/200' },
|
||||
})
|
||||
);
|
||||
|
||||
const state = store.getState().config;
|
||||
expect(state).toMatchObject(
|
||||
createConfigMatcher({
|
||||
remote: expect.objectContaining({
|
||||
avatar: 'https://via.placeholder.com/200',
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
test('loginUser updates state and publishes to pubsub', async () => {
|
||||
const { loginUser } = await import('@app/store/modules/config.js');
|
||||
const userInfo = {
|
||||
email: 'test@example.com',
|
||||
avatar: 'https://via.placeholder.com/200',
|
||||
username: 'testuser',
|
||||
apikey: 'test-api-key',
|
||||
localApiKey: 'test-local-api-key',
|
||||
};
|
||||
|
||||
await store.dispatch(loginUser(userInfo));
|
||||
|
||||
expect(pubsub.publish).toHaveBeenCalledWith(PUBSUB_CHANNEL.OWNER, {
|
||||
owner: {
|
||||
username: userInfo.username,
|
||||
url: '',
|
||||
avatar: userInfo.avatar,
|
||||
},
|
||||
});
|
||||
|
||||
const state = store.getState().config;
|
||||
expect(state).toMatchObject(
|
||||
createConfigMatcher({
|
||||
remote: expect.objectContaining(userInfo),
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
test('logoutUser clears state and publishes to pubsub', async () => {
|
||||
const { logoutUser } = await import('@app/store/modules/config.js');
|
||||
|
||||
await store.dispatch(logoutUser({ reason: 'test logout' }));
|
||||
|
||||
expect(pubsub.publish).toHaveBeenCalledWith(PUBSUB_CHANNEL.SERVERS, { servers: [] });
|
||||
expect(pubsub.publish).toHaveBeenCalledWith(PUBSUB_CHANNEL.OWNER, {
|
||||
owner: {
|
||||
username: 'root',
|
||||
url: '',
|
||||
avatar: '',
|
||||
},
|
||||
});
|
||||
expect(stopPingTimeoutJobs).toHaveBeenCalled();
|
||||
expect(GraphQLClient.clearInstance).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('updateAccessTokens updates token fields', async () => {
|
||||
const { updateAccessTokens } = await import('@app/store/modules/config.js');
|
||||
const tokens = {
|
||||
accesstoken: 'new-access-token',
|
||||
refreshtoken: 'new-refresh-token',
|
||||
idtoken: 'new-id-token',
|
||||
};
|
||||
|
||||
store.dispatch(updateAccessTokens(tokens));
|
||||
|
||||
const state = store.getState().config;
|
||||
expect(state).toMatchObject(
|
||||
createConfigMatcher({
|
||||
remote: expect.objectContaining(tokens),
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
test('updateAllowedOrigins updates extraOrigins', async () => {
|
||||
const { updateAllowedOrigins } = await import('@app/store/modules/config.js');
|
||||
const origins = ['https://test1.com', 'https://test2.com'];
|
||||
|
||||
store.dispatch(updateAllowedOrigins(origins));
|
||||
|
||||
const state = store.getState().config;
|
||||
expect(state.api.extraOrigins).toBe(origins.join(', '));
|
||||
});
|
||||
|
||||
test('setUpnpState updates upnp settings', async () => {
|
||||
const { setUpnpState } = await import('@app/store/modules/config.js');
|
||||
|
||||
store.dispatch(setUpnpState({ enabled: 'yes', status: 'active' }));
|
||||
|
||||
const state = store.getState().config;
|
||||
expect(state.remote.upnpEnabled).toBe('yes');
|
||||
expect(state.connectionStatus.upnpStatus).toBe('active');
|
||||
});
|
||||
|
||||
test('setWanPortToValue updates wanport', async () => {
|
||||
const { setWanPortToValue } = await import('@app/store/modules/config.js');
|
||||
|
||||
store.dispatch(setWanPortToValue(8443));
|
||||
|
||||
const state = store.getState().config;
|
||||
expect(state.remote.wanport).toBe('8443');
|
||||
});
|
||||
|
||||
test('setWanAccess updates wanaccess', async () => {
|
||||
const { setWanAccess } = await import('@app/store/modules/config.js');
|
||||
|
||||
store.dispatch(setWanAccess('yes'));
|
||||
|
||||
const state = store.getState().config;
|
||||
expect(state.remote.wanaccess).toBe('yes');
|
||||
});
|
||||
|
||||
test('addSsoUser adds user to ssoSubIds', async () => {
|
||||
const { addSsoUser } = await import('@app/store/modules/config.js');
|
||||
|
||||
store.dispatch(addSsoUser('user1'));
|
||||
store.dispatch(addSsoUser('user2'));
|
||||
|
||||
const state = store.getState().config;
|
||||
expect(state.remote.ssoSubIds).toBe('user1,user2');
|
||||
});
|
||||
|
||||
test('removeSsoUser removes user from ssoSubIds', async () => {
|
||||
const { addSsoUser, removeSsoUser } = await import('@app/store/modules/config.js');
|
||||
|
||||
store.dispatch(addSsoUser('user1'));
|
||||
store.dispatch(addSsoUser('user2'));
|
||||
store.dispatch(removeSsoUser('user1'));
|
||||
|
||||
const state = store.getState().config;
|
||||
expect(state.remote.ssoSubIds).toBe('user2');
|
||||
});
|
||||
|
||||
test('removeSsoUser with null clears all ssoSubIds', async () => {
|
||||
const { addSsoUser, removeSsoUser } = await import('@app/store/modules/config.js');
|
||||
|
||||
store.dispatch(addSsoUser('user1'));
|
||||
store.dispatch(addSsoUser('user2'));
|
||||
store.dispatch(removeSsoUser(null));
|
||||
|
||||
const state = store.getState().config;
|
||||
expect(state.remote.ssoSubIds).toBe('');
|
||||
});
|
||||
|
||||
test('setLocalApiKey updates localApiKey', async () => {
|
||||
const { setLocalApiKey } = await import('@app/store/modules/config.js');
|
||||
|
||||
store.dispatch(setLocalApiKey('new-local-api-key'));
|
||||
|
||||
const state = store.getState().config;
|
||||
expect(state.remote.localApiKey).toBe('new-local-api-key');
|
||||
});
|
||||
|
||||
test('setLocalApiKey with null clears localApiKey', async () => {
|
||||
const { setLocalApiKey } = await import('@app/store/modules/config.js');
|
||||
|
||||
store.dispatch(setLocalApiKey(null));
|
||||
|
||||
const state = store.getState().config;
|
||||
expect(state.remote.localApiKey).toBe('');
|
||||
});
|
||||
|
||||
test('setGraphqlConnectionStatus updates minigraph status', async () => {
|
||||
store.dispatch(setGraphqlConnectionStatus({ status: MinigraphStatus.CONNECTED, error: null }));
|
||||
|
||||
const state = store.getState().config;
|
||||
expect(state.connectionStatus.minigraph).toBe(MinigraphStatus.CONNECTED);
|
||||
});
|
||||
|
||||
test('setupRemoteAccessThunk.fulfilled updates remote access settings', async () => {
|
||||
const remoteAccessSettings = {
|
||||
accessType: WAN_ACCESS_TYPE.DYNAMIC,
|
||||
forwardType: WAN_FORWARD_TYPE.UPNP,
|
||||
};
|
||||
|
||||
await store.dispatch(setupRemoteAccessThunk(remoteAccessSettings));
|
||||
|
||||
const state = store.getState().config;
|
||||
expect(state.remote).toMatchObject({
|
||||
wanaccess: 'no',
|
||||
dynamicRemoteAccessType: 'UPNP',
|
||||
wanport: '',
|
||||
upnpEnabled: 'yes',
|
||||
});
|
||||
});
|
||||
@@ -24,7 +24,7 @@ test('Before init returns default values for all fields', async () => {
|
||||
`);
|
||||
});
|
||||
|
||||
test('After init returns values from cfg file for all fields', async () => {
|
||||
test('After init returns values from cfg file for all fields', { timeout: 30000 }, async () => {
|
||||
const { loadStateFiles } = await import('@app/store/modules/emhttp.js');
|
||||
|
||||
// Load state files into store
|
||||
|
||||
@@ -24,7 +24,6 @@ test('Returns paths', async () => {
|
||||
'myservers-base': '/boot/config/plugins/dynamix.my.servers/',
|
||||
'myservers-config': expect.stringContaining('api/dev/Unraid.net/myservers.cfg'),
|
||||
'myservers-config-states': expect.stringContaining('api/dev/states/myservers.cfg'),
|
||||
'myservers-env': '/boot/config/plugins/dynamix.my.servers/env',
|
||||
'myservers-keepalive': './dev/Unraid.net/fb_keepalive',
|
||||
'keyfile-base': expect.stringContaining('api/dev/Unraid.net'),
|
||||
'machine-id': expect.stringContaining('api/dev/data/machine-id'),
|
||||
|
||||
@@ -1,34 +0,0 @@
|
||||
import { expect, test, vi } from 'vitest';
|
||||
|
||||
import { store } from '@app/store/index.js';
|
||||
import { loadStateFiles } from '@app/store/modules/emhttp.js';
|
||||
import { loadRegistrationKey } from '@app/store/modules/registration.js';
|
||||
import { createRegistrationEvent } from '@app/store/sync/registration-sync.js';
|
||||
|
||||
vi.mock('@app/core/pubsub', () => ({
|
||||
pubsub: { publish: vi.fn() },
|
||||
}));
|
||||
|
||||
test('Creates a registration event', async () => {
|
||||
// Load state files into store
|
||||
|
||||
const config = await store.dispatch(loadStateFiles()).unwrap();
|
||||
await store.dispatch(loadRegistrationKey());
|
||||
expect(config.var.regFile).toBe('/app/dev/Unraid.net/Pro.key');
|
||||
|
||||
const state = store.getState();
|
||||
const registrationEvent = createRegistrationEvent(state);
|
||||
expect(registrationEvent).toMatchInlineSnapshot(`
|
||||
{
|
||||
"registration": {
|
||||
"guid": "13FE-4200-C300-58C372A52B19",
|
||||
"keyFile": {
|
||||
"contents": "hVs1tLjvC9FiiQsIwIQ7G1KszAcexf0IneThhnmf22SB0dGs5WzRkqMiSMmt2DtR5HOXFUD32YyxuzGeUXmky3zKpSu6xhZNKVg5atGM1OfvkzHBMldI3SeBLuUFSgejLbpNUMdTrbk64JJdbzle4O8wiQgkIpAMIGxeYLwLBD4zHBcfyzq40QnxG--HcX6j25eE0xqa2zWj-j0b0rCAXahJV2a3ySCbPzr1MvfPRTVb0rr7KJ-25R592hYrz4H7Sc1B3p0lr6QUxHE6o7bcYrWKDRtIVoZ8SMPpd1_0gzYIcl5GsDFzFumTXUh8NEnl0Q8hwW1YE-tRc6Y_rrvd7w",
|
||||
"location": "/app/dev/Unraid.net/Pro.key",
|
||||
},
|
||||
"state": "PRO",
|
||||
"type": "PRO",
|
||||
},
|
||||
}
|
||||
`);
|
||||
});
|
||||
@@ -1,20 +0,0 @@
|
||||
import { type Mapping } from '@runonflux/nat-upnp';
|
||||
import { expect, test, vi } from 'vitest';
|
||||
|
||||
import { getWanPortForUpnp } from '@app/upnp/helpers.js';
|
||||
|
||||
test('it successfully gets a wan port given no exclusions', () => {
|
||||
const port = getWanPortForUpnp(null, 36_000, 38_000);
|
||||
expect(port).toBeGreaterThan(35_999);
|
||||
expect(port).toBeLessThan(38_001);
|
||||
});
|
||||
|
||||
test('it fails to get a wan port given exclusions', () => {
|
||||
const port = getWanPortForUpnp([{ public: { port: 36_000 } }] as Mapping[], 36_000, 36_000);
|
||||
expect(port).toBeNull();
|
||||
});
|
||||
|
||||
test('it succeeds in getting a wan port given exclusions', () => {
|
||||
const port = getWanPortForUpnp([{ public: { port: 36_000 } }] as Mapping[], 30_000, 36_000);
|
||||
expect(port).not.toBeNull();
|
||||
});
|
||||
@@ -1,6 +1,6 @@
|
||||
import { describe, expect, it } from 'vitest';
|
||||
|
||||
import { csvStringToArray, formatDatetime } from '@app/utils.js';
|
||||
import { csvStringToArray, formatDatetime, parsePackageArg } from '@app/utils.js';
|
||||
|
||||
describe('formatDatetime', () => {
|
||||
const testDate = new Date('2024-02-14T12:34:56');
|
||||
@@ -103,3 +103,78 @@ describe('csvStringToArray', () => {
|
||||
expect(csvStringToArray(',one,')).toEqual(['one']);
|
||||
});
|
||||
});
|
||||
|
||||
describe('parsePackageArg', () => {
|
||||
it('parses simple package names without version', () => {
|
||||
expect(parsePackageArg('lodash')).toEqual({ name: 'lodash' });
|
||||
expect(parsePackageArg('express')).toEqual({ name: 'express' });
|
||||
expect(parsePackageArg('react')).toEqual({ name: 'react' });
|
||||
});
|
||||
|
||||
it('parses simple package names with version', () => {
|
||||
expect(parsePackageArg('lodash@4.17.21')).toEqual({ name: 'lodash', version: '4.17.21' });
|
||||
expect(parsePackageArg('express@4.18.2')).toEqual({ name: 'express', version: '4.18.2' });
|
||||
expect(parsePackageArg('react@18.2.0')).toEqual({ name: 'react', version: '18.2.0' });
|
||||
});
|
||||
|
||||
it('parses scoped package names without version', () => {
|
||||
expect(parsePackageArg('@types/node')).toEqual({ name: '@types/node' });
|
||||
expect(parsePackageArg('@angular/core')).toEqual({ name: '@angular/core' });
|
||||
expect(parsePackageArg('@nestjs/common')).toEqual({ name: '@nestjs/common' });
|
||||
});
|
||||
|
||||
it('parses scoped package names with version', () => {
|
||||
expect(parsePackageArg('@types/node@18.15.0')).toEqual({
|
||||
name: '@types/node',
|
||||
version: '18.15.0',
|
||||
});
|
||||
expect(parsePackageArg('@angular/core@15.2.0')).toEqual({
|
||||
name: '@angular/core',
|
||||
version: '15.2.0',
|
||||
});
|
||||
expect(parsePackageArg('@nestjs/common@9.3.12')).toEqual({
|
||||
name: '@nestjs/common',
|
||||
version: '9.3.12',
|
||||
});
|
||||
});
|
||||
|
||||
it('handles version ranges and tags', () => {
|
||||
expect(parsePackageArg('lodash@^4.17.0')).toEqual({ name: 'lodash', version: '^4.17.0' });
|
||||
expect(parsePackageArg('react@~18.2.0')).toEqual({ name: 'react', version: '~18.2.0' });
|
||||
expect(parsePackageArg('express@latest')).toEqual({ name: 'express', version: 'latest' });
|
||||
expect(parsePackageArg('vue@beta')).toEqual({ name: 'vue', version: 'beta' });
|
||||
expect(parsePackageArg('@types/node@next')).toEqual({ name: '@types/node', version: 'next' });
|
||||
});
|
||||
|
||||
it('handles multiple @ symbols correctly', () => {
|
||||
expect(parsePackageArg('package@1.0.0@extra')).toEqual({
|
||||
name: 'package@1.0.0',
|
||||
version: 'extra',
|
||||
});
|
||||
expect(parsePackageArg('@scope/pkg@1.0.0@extra')).toEqual({
|
||||
name: '@scope/pkg@1.0.0',
|
||||
version: 'extra',
|
||||
});
|
||||
});
|
||||
|
||||
it('ignores versions that contain forward slashes', () => {
|
||||
expect(parsePackageArg('package@github:user/repo')).toEqual({
|
||||
name: 'package@github:user/repo',
|
||||
});
|
||||
expect(parsePackageArg('@scope/pkg@git+https://github.com/user/repo.git')).toEqual({
|
||||
name: '@scope/pkg@git+https://github.com/user/repo.git',
|
||||
});
|
||||
});
|
||||
|
||||
it('handles edge cases', () => {
|
||||
expect(parsePackageArg('@')).toEqual({ name: '@' });
|
||||
expect(parsePackageArg('@scope')).toEqual({ name: '@scope' });
|
||||
expect(parsePackageArg('package@')).toEqual({ name: 'package@' });
|
||||
expect(parsePackageArg('@scope/pkg@')).toEqual({ name: '@scope/pkg@' });
|
||||
});
|
||||
|
||||
it('handles empty version strings', () => {
|
||||
expect(parsePackageArg('package@')).toEqual({ name: 'package@' });
|
||||
expect(parsePackageArg('@scope/package@')).toEqual({ name: '@scope/package@' });
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,28 +1,37 @@
|
||||
import '@app/dotenv.js';
|
||||
|
||||
import { execa } from 'execa';
|
||||
import { Logger } from '@nestjs/common';
|
||||
|
||||
import { CommandFactory } from 'nest-commander';
|
||||
|
||||
import { internalLogger, logger } from '@app/core/log.js';
|
||||
import { LOG_LEVEL } from '@app/environment.js';
|
||||
import { CliModule } from '@app/unraid-api/cli/cli.module.js';
|
||||
import { LOG_LEVEL, SUPPRESS_LOGS } from '@app/environment.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
|
||||
const getUnraidApiLocation = async () => {
|
||||
const { execa } = await import('execa');
|
||||
try {
|
||||
const shellToUse = await execa('which unraid-api');
|
||||
return shellToUse.stdout.trim();
|
||||
} catch (err) {
|
||||
logger.debug('Could not find unraid-api in PATH, using default location');
|
||||
|
||||
return '/usr/bin/unraid-api';
|
||||
}
|
||||
};
|
||||
|
||||
const getLogger = () => {
|
||||
if (LOG_LEVEL === 'TRACE' && !SUPPRESS_LOGS) {
|
||||
return new LogService();
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
const logger = getLogger();
|
||||
try {
|
||||
await import('json-bigint-patch');
|
||||
const { CliModule } = await import('@app/unraid-api/cli/cli.module.js');
|
||||
|
||||
await CommandFactory.run(CliModule, {
|
||||
cliName: 'unraid-api',
|
||||
logger: LOG_LEVEL === 'TRACE' ? new LogService() : false, // - enable this to see nest initialization issues
|
||||
logger: logger, // - enable this to see nest initialization issues
|
||||
completion: {
|
||||
fig: false,
|
||||
cmd: 'completion-script',
|
||||
@@ -31,10 +40,8 @@ try {
|
||||
});
|
||||
process.exit(0);
|
||||
} catch (error) {
|
||||
logger.error('ERROR:', error);
|
||||
internalLogger.error({
|
||||
message: 'Failed to start unraid-api',
|
||||
error,
|
||||
});
|
||||
if (logger) {
|
||||
logger.error('ERROR:', error);
|
||||
}
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
@@ -1,99 +0,0 @@
|
||||
import { uniq } from 'lodash-es';
|
||||
|
||||
import type { RootState } from '@app/store/index.js';
|
||||
import { logger } from '@app/core/log.js';
|
||||
import { GRAPHQL_INTROSPECTION } from '@app/environment.js';
|
||||
import { getServerIps, getUrlForField } from '@app/graphql/resolvers/subscription/network.js';
|
||||
import { getters, store } from '@app/store/index.js';
|
||||
import { FileLoadStatus } from '@app/store/types.js';
|
||||
|
||||
const getAllowedSocks = (): string[] => [
|
||||
// Notifier bridge
|
||||
'/var/run/unraid-notifications.sock',
|
||||
|
||||
// Unraid PHP scripts
|
||||
'/var/run/unraid-php.sock',
|
||||
|
||||
// CLI
|
||||
'/var/run/unraid-cli.sock',
|
||||
];
|
||||
|
||||
const getLocalAccessUrlsForServer = (state: RootState = store.getState()): string[] => {
|
||||
const { emhttp } = state;
|
||||
|
||||
if (emhttp.status !== FileLoadStatus.LOADED) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const { nginx } = emhttp;
|
||||
try {
|
||||
return [
|
||||
getUrlForField({
|
||||
url: 'localhost',
|
||||
port: nginx.httpPort,
|
||||
}).toString(),
|
||||
getUrlForField({
|
||||
url: 'localhost',
|
||||
portSsl: nginx.httpsPort,
|
||||
}).toString(),
|
||||
];
|
||||
} catch (error: unknown) {
|
||||
logger.debug('Caught error in getLocalAccessUrlsForServer: \n%o', error);
|
||||
return [];
|
||||
}
|
||||
};
|
||||
|
||||
const getRemoteAccessUrlsForAllowedOrigins = (state: RootState = store.getState()): string[] => {
|
||||
const { urls } = getServerIps(state);
|
||||
|
||||
if (urls) {
|
||||
return urls.reduce<string[]>((acc, curr) => {
|
||||
if ((curr.ipv4 && curr.ipv6) || curr.ipv4) {
|
||||
acc.push(curr.ipv4.toString());
|
||||
} else if (curr.ipv6) {
|
||||
acc.push(curr.ipv6.toString());
|
||||
}
|
||||
|
||||
return acc;
|
||||
}, []);
|
||||
}
|
||||
|
||||
return [];
|
||||
};
|
||||
|
||||
export const getExtraOrigins = (): string[] => {
|
||||
const { extraOrigins } = getters.config().api;
|
||||
if (extraOrigins) {
|
||||
return extraOrigins
|
||||
.replaceAll(' ', '')
|
||||
.split(',')
|
||||
.filter((origin) => origin.startsWith('http://') || origin.startsWith('https://'));
|
||||
}
|
||||
|
||||
return [];
|
||||
};
|
||||
|
||||
const getConnectOrigins = (): string[] => {
|
||||
const connectMain = 'https://connect.myunraid.net';
|
||||
const connectStaging = 'https://connect-staging.myunraid.net';
|
||||
const connectDev = 'https://dev-my.myunraid.net:4000';
|
||||
|
||||
return [connectMain, connectStaging, connectDev];
|
||||
};
|
||||
|
||||
const getApolloSandbox = (): string[] => {
|
||||
if (GRAPHQL_INTROSPECTION) {
|
||||
return ['https://studio.apollographql.com'];
|
||||
}
|
||||
return [];
|
||||
};
|
||||
|
||||
export const getAllowedOrigins = (state: RootState = store.getState()): string[] =>
|
||||
uniq([
|
||||
...getAllowedSocks(),
|
||||
...getLocalAccessUrlsForServer(state),
|
||||
...getRemoteAccessUrlsForAllowedOrigins(state),
|
||||
...getExtraOrigins(),
|
||||
...getConnectOrigins(),
|
||||
...getApolloSandbox(),
|
||||
]).map((url) => (url.endsWith('/') ? url.slice(0, -1) : url));
|
||||
@@ -1,4 +1,3 @@
|
||||
import { getters } from '@app/store/index.js';
|
||||
import { FileLoadStatus } from '@app/store/types.js';
|
||||
|
||||
/**
|
||||
@@ -6,6 +5,7 @@ import { FileLoadStatus } from '@app/store/types.js';
|
||||
* @returns The current version.
|
||||
*/
|
||||
export const getUnraidVersion = async (): Promise<string> => {
|
||||
const { getters } = await import('@app/store/index.js');
|
||||
const { status, var: emhttpVar } = getters.emhttp();
|
||||
if (status === FileLoadStatus.LOADED) {
|
||||
return emhttpVar.version;
|
||||
|
||||
@@ -79,6 +79,3 @@ export const KEYSERVER_VALIDATION_ENDPOINT = 'https://keys.lime-technology.com/v
|
||||
|
||||
/** Set the max retries for the GraphQL Client */
|
||||
export const MAX_RETRIES_FOR_LINEAR_BACKOFF = 100;
|
||||
|
||||
export const PM2_PATH = join(import.meta.dirname, '../../', 'node_modules', 'pm2', 'bin', 'pm2');
|
||||
export const ECOSYSTEM_PATH = join(import.meta.dirname, '../../', 'ecosystem.config.json');
|
||||
|
||||
@@ -1,36 +1,66 @@
|
||||
import { pino } from 'pino';
|
||||
import pretty from 'pino-pretty';
|
||||
|
||||
import { LOG_TYPE } from '@app/environment.js';
|
||||
import { API_VERSION, LOG_LEVEL, LOG_TYPE, PATHS_LOGS_FILE, SUPPRESS_LOGS } from '@app/environment.js';
|
||||
|
||||
export const levels = ['trace', 'debug', 'info', 'warn', 'error', 'fatal'] as const;
|
||||
|
||||
export type LogLevel = (typeof levels)[number];
|
||||
|
||||
const level =
|
||||
levels[levels.indexOf(process.env.LOG_LEVEL?.toLowerCase() as (typeof levels)[number])] ?? 'info';
|
||||
const level = levels[levels.indexOf(LOG_LEVEL.toLowerCase() as LogLevel)] ?? 'info';
|
||||
|
||||
export const logDestination = pino.destination({
|
||||
const nullDestination = pino.destination({
|
||||
write() {
|
||||
// Suppress all logs
|
||||
},
|
||||
});
|
||||
|
||||
export const logDestination =
|
||||
process.env.SUPPRESS_LOGS === 'true' ? nullDestination : pino.destination();
|
||||
const localFileDestination = pino.destination({
|
||||
dest: PATHS_LOGS_FILE,
|
||||
sync: true,
|
||||
});
|
||||
|
||||
const stream =
|
||||
LOG_TYPE === 'pretty'
|
||||
? pretty({
|
||||
singleLine: true,
|
||||
hideObject: false,
|
||||
colorize: true,
|
||||
ignore: 'hostname,pid',
|
||||
destination: logDestination,
|
||||
})
|
||||
: logDestination;
|
||||
const stream = SUPPRESS_LOGS
|
||||
? nullDestination
|
||||
: LOG_TYPE === 'pretty'
|
||||
? pretty({
|
||||
singleLine: true,
|
||||
hideObject: false,
|
||||
colorize: true,
|
||||
ignore: 'hostname,pid',
|
||||
destination: logDestination,
|
||||
})
|
||||
: logDestination;
|
||||
|
||||
export const logger = pino(
|
||||
{
|
||||
level,
|
||||
timestamp: () => `,"time":"${new Date().toISOString()}"`,
|
||||
timestamp: pino.stdTimeFunctions.isoTime,
|
||||
formatters: {
|
||||
level: (label: string) => ({ level: label }),
|
||||
bindings: (bindings) => ({ ...bindings, apiVersion: API_VERSION }),
|
||||
},
|
||||
redact: {
|
||||
paths: [
|
||||
'*.password',
|
||||
'*.pass',
|
||||
'*.secret',
|
||||
'*.token',
|
||||
'*.key',
|
||||
'*.Password',
|
||||
'*.Pass',
|
||||
'*.Secret',
|
||||
'*.Token',
|
||||
'*.Key',
|
||||
'*.apikey',
|
||||
'*.localApiKey',
|
||||
'*.accesstoken',
|
||||
'*.idtoken',
|
||||
'*.refreshtoken',
|
||||
],
|
||||
censor: '***REDACTED***',
|
||||
},
|
||||
},
|
||||
stream
|
||||
@@ -52,6 +82,7 @@ export const keyServerLogger = logger.child({ logger: 'key-server' });
|
||||
export const remoteAccessLogger = logger.child({ logger: 'remote-access' });
|
||||
export const remoteQueryLogger = logger.child({ logger: 'remote-query' });
|
||||
export const apiLogger = logger.child({ logger: 'api' });
|
||||
export const pluginLogger = logger.child({ logger: 'plugin', stream: localFileDestination });
|
||||
|
||||
export const loggers = [
|
||||
internalLogger,
|
||||
@@ -71,3 +102,19 @@ export const loggers = [
|
||||
remoteQueryLogger,
|
||||
apiLogger,
|
||||
];
|
||||
|
||||
export function sanitizeParams(params: Record<string, any>): Record<string, any> {
|
||||
const SENSITIVE_KEYS = ['password', 'secret', 'token', 'key', 'client_secret'];
|
||||
const mask = (value: any) => (typeof value === 'string' && value.length > 0 ? '***' : value);
|
||||
const sanitized: Record<string, any> = {};
|
||||
for (const k in params) {
|
||||
if (SENSITIVE_KEYS.some((s) => k.toLowerCase().includes(s))) {
|
||||
sanitized[k] = mask(params[k]);
|
||||
} else if (typeof params[k] === 'object' && params[k] !== null && !Array.isArray(params[k])) {
|
||||
sanitized[k] = sanitizeParams(params[k]);
|
||||
} else {
|
||||
sanitized[k] = params[k];
|
||||
}
|
||||
}
|
||||
return sanitized;
|
||||
}
|
||||
|
||||
@@ -1,26 +1,13 @@
|
||||
import EventEmitter from 'events';
|
||||
|
||||
import { GRAPHQL_PUBSUB_CHANNEL } from '@unraid/shared/pubsub/graphql.pubsub.js';
|
||||
import { PubSub } from 'graphql-subscriptions';
|
||||
|
||||
// Allow subscriptions to have 30 connections
|
||||
const eventEmitter = new EventEmitter();
|
||||
eventEmitter.setMaxListeners(30);
|
||||
|
||||
export enum PUBSUB_CHANNEL {
|
||||
ARRAY = 'ARRAY',
|
||||
DASHBOARD = 'DASHBOARD',
|
||||
DISPLAY = 'DISPLAY',
|
||||
INFO = 'INFO',
|
||||
NOTIFICATION = 'NOTIFICATION',
|
||||
NOTIFICATION_ADDED = 'NOTIFICATION_ADDED',
|
||||
NOTIFICATION_OVERVIEW = 'NOTIFICATION_OVERVIEW',
|
||||
OWNER = 'OWNER',
|
||||
SERVERS = 'SERVERS',
|
||||
VMS = 'VMS',
|
||||
REGISTRATION = 'REGISTRATION',
|
||||
LOG_FILE = 'LOG_FILE',
|
||||
PARITY = 'PARITY',
|
||||
}
|
||||
export { GRAPHQL_PUBSUB_CHANNEL as PUBSUB_CHANNEL };
|
||||
|
||||
export const pubsub = new PubSub({ eventEmitter });
|
||||
|
||||
@@ -28,6 +15,6 @@ export const pubsub = new PubSub({ eventEmitter });
|
||||
* Create a pubsub subscription.
|
||||
* @param channel The pubsub channel to subscribe to.
|
||||
*/
|
||||
export const createSubscription = (channel: PUBSUB_CHANNEL) => {
|
||||
export const createSubscription = (channel: GRAPHQL_PUBSUB_CHANNEL) => {
|
||||
return pubsub.asyncIterableIterator(channel);
|
||||
};
|
||||
|
||||
@@ -1,40 +0,0 @@
|
||||
import { isEqual, merge } from 'lodash-es';
|
||||
|
||||
import { getAllowedOrigins } from '@app/common/allowed-origins.js';
|
||||
import { initialState } from '@app/store/modules/config.js';
|
||||
import {
|
||||
MyServersConfig,
|
||||
MyServersConfigMemory,
|
||||
MyServersConfigMemorySchema,
|
||||
MyServersConfigSchema,
|
||||
} from '@app/types/my-servers-config.js';
|
||||
|
||||
// Define ConfigType and ConfigObject
|
||||
export type ConfigType = 'flash' | 'memory';
|
||||
|
||||
/**
|
||||
* Get a writeable configuration based on the mode ('flash' or 'memory').
|
||||
*/
|
||||
export const getWriteableConfig = <T extends ConfigType>(
|
||||
config: T extends 'memory' ? MyServersConfigMemory : MyServersConfig,
|
||||
mode: T
|
||||
): T extends 'memory' ? MyServersConfigMemory : MyServersConfig => {
|
||||
const schema = mode === 'memory' ? MyServersConfigMemorySchema : MyServersConfigSchema;
|
||||
|
||||
const defaultConfig = schema.parse(initialState);
|
||||
// Use a type assertion for the mergedConfig to include `connectionStatus` only if `mode === 'memory`
|
||||
const mergedConfig = merge<
|
||||
MyServersConfig,
|
||||
T extends 'memory' ? MyServersConfigMemory : MyServersConfig
|
||||
>(defaultConfig, config);
|
||||
|
||||
if (mode === 'memory') {
|
||||
(mergedConfig as MyServersConfigMemory).remote.allowedOrigins = getAllowedOrigins().join(', ');
|
||||
(mergedConfig as MyServersConfigMemory).connectionStatus = {
|
||||
...(defaultConfig as MyServersConfigMemory).connectionStatus,
|
||||
...(config as MyServersConfigMemory).connectionStatus,
|
||||
};
|
||||
}
|
||||
|
||||
return schema.parse(mergedConfig) as T extends 'memory' ? MyServersConfigMemory : MyServersConfig; // Narrowing ensures correct typing
|
||||
};
|
||||
@@ -124,7 +124,15 @@ export const parseConfig = <T extends Record<string, any>>(
|
||||
throw new AppError('Invalid Parameters Passed to ParseConfig');
|
||||
}
|
||||
|
||||
const data: Record<string, any> = parseIni(fileContents);
|
||||
let data: Record<string, any>;
|
||||
try {
|
||||
data = parseIni(fileContents);
|
||||
} catch (error) {
|
||||
throw new AppError(
|
||||
`Failed to parse config file: ${error instanceof Error ? error.message : String(error)}`
|
||||
);
|
||||
}
|
||||
|
||||
// Remove quotes around keys
|
||||
const dataWithoutQuoteKeys = Object.fromEntries(
|
||||
Object.entries(data).map(([key, value]) => [key.replace(/^"(.+(?="$))"$/, '$1'), value])
|
||||
|
||||
@@ -1,25 +1,40 @@
|
||||
export const isUnraidApiRunning = async (): Promise<boolean | undefined> => {
|
||||
const { connect, describe, disconnect } = await import('pm2');
|
||||
return new Promise((resolve, reject) => {
|
||||
connect(function (err) {
|
||||
const { PM2_HOME } = await import('@app/environment.js');
|
||||
|
||||
// Set PM2_HOME if not already set
|
||||
if (!process.env.PM2_HOME) {
|
||||
process.env.PM2_HOME = PM2_HOME;
|
||||
}
|
||||
|
||||
const pm2Module = await import('pm2');
|
||||
const pm2 = pm2Module.default || pm2Module;
|
||||
|
||||
const pm2Promise = new Promise<boolean>((resolve) => {
|
||||
pm2.connect(function (err) {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
reject('Could not connect to pm2');
|
||||
// Don't reject here, resolve with false since we can't connect to PM2
|
||||
resolve(false);
|
||||
return;
|
||||
}
|
||||
|
||||
describe('unraid-api', function (err, processDescription) {
|
||||
console.log(err);
|
||||
// Now try to describe unraid-api specifically
|
||||
pm2.describe('unraid-api', function (err, processDescription) {
|
||||
if (err || processDescription.length === 0) {
|
||||
console.log(false); // Service not found or error occurred
|
||||
// Service not found or error occurred
|
||||
resolve(false);
|
||||
} else {
|
||||
const isOnline = processDescription?.[0]?.pm2_env?.status === 'online';
|
||||
console.log(isOnline); // Output true if online, false otherwise
|
||||
resolve(isOnline);
|
||||
}
|
||||
|
||||
disconnect();
|
||||
pm2.disconnect();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
const timeoutPromise = new Promise<boolean>((resolve) => {
|
||||
setTimeout(() => resolve(false), 10000); // 10 second timeout
|
||||
});
|
||||
|
||||
return Promise.race([pm2Promise, timeoutPromise]);
|
||||
};
|
||||
|
||||
437
api/src/core/utils/validation/validation-processor.test.ts
Normal file
437
api/src/core/utils/validation/validation-processor.test.ts
Normal file
@@ -0,0 +1,437 @@
|
||||
import { describe, expect, it } from 'vitest';
|
||||
|
||||
import type { ValidationResult } from '@app/core/utils/validation/validation-processor.js';
|
||||
import {
|
||||
createValidationProcessor,
|
||||
ResultInterpreters,
|
||||
} from '@app/core/utils/validation/validation-processor.js';
|
||||
|
||||
describe('ValidationProcessor', () => {
|
||||
type TestInput = { value: number; text: string };
|
||||
|
||||
it('should process all validation steps when no errors occur', () => {
|
||||
const steps = [
|
||||
{
|
||||
name: 'positiveValue',
|
||||
validator: (input: TestInput) => input.value > 0,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
{
|
||||
name: 'nonEmptyText',
|
||||
validator: (input: TestInput) => input.text.length > 0,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
] as const;
|
||||
|
||||
const processor = createValidationProcessor({
|
||||
steps,
|
||||
});
|
||||
|
||||
const result = processor({ value: 5, text: 'hello' }, { failFast: false });
|
||||
|
||||
expect(result.isValid).toBe(true);
|
||||
expect(result.errors).toEqual({});
|
||||
});
|
||||
|
||||
it('should collect all errors when failFast is disabled', () => {
|
||||
const steps = [
|
||||
{
|
||||
name: 'positiveValue',
|
||||
validator: (input: TestInput) => input.value > 0,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
{
|
||||
name: 'nonEmptyText',
|
||||
validator: (input: TestInput) => input.text.length > 0,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
] as const;
|
||||
|
||||
const processor = createValidationProcessor({
|
||||
steps,
|
||||
});
|
||||
|
||||
const result = processor({ value: -1, text: '' }, { failFast: false });
|
||||
|
||||
expect(result.isValid).toBe(false);
|
||||
expect(result.errors.positiveValue).toBe(false);
|
||||
expect(result.errors.nonEmptyText).toBe(false);
|
||||
});
|
||||
|
||||
it('should stop at first error when failFast is enabled', () => {
|
||||
const steps = [
|
||||
{
|
||||
name: 'positiveValue',
|
||||
validator: (input: TestInput) => input.value > 0,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
{
|
||||
name: 'nonEmptyText',
|
||||
validator: (input: TestInput) => input.text.length > 0,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
] as const;
|
||||
|
||||
const processor = createValidationProcessor({
|
||||
steps,
|
||||
});
|
||||
|
||||
const result = processor({ value: -1, text: '' }, { failFast: true });
|
||||
|
||||
expect(result.isValid).toBe(false);
|
||||
expect(result.errors.positiveValue).toBe(false);
|
||||
expect(result.errors.nonEmptyText).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should always fail fast on steps marked with alwaysFailFast', () => {
|
||||
const steps = [
|
||||
{
|
||||
name: 'criticalCheck',
|
||||
validator: (input: TestInput) => input.value !== 0,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
alwaysFailFast: true,
|
||||
},
|
||||
{
|
||||
name: 'nonEmptyText',
|
||||
validator: (input: TestInput) => input.text.length > 0,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
] as const;
|
||||
|
||||
const processor = createValidationProcessor({
|
||||
steps,
|
||||
});
|
||||
|
||||
const result = processor({ value: 0, text: '' }, { failFast: false });
|
||||
|
||||
expect(result.isValid).toBe(false);
|
||||
expect(result.errors.criticalCheck).toBe(false);
|
||||
expect(result.errors.nonEmptyText).toBeUndefined(); // Should not be executed
|
||||
});
|
||||
|
||||
it('should work with different result interpreters', () => {
|
||||
const steps = [
|
||||
{
|
||||
name: 'arrayResult',
|
||||
validator: (input: TestInput) => [1, 2, 3],
|
||||
isError: ResultInterpreters.errorList,
|
||||
},
|
||||
{
|
||||
name: 'nullableResult',
|
||||
validator: (input: TestInput) => (input.value > 0 ? null : 'error'),
|
||||
isError: ResultInterpreters.nullableIsSuccess,
|
||||
},
|
||||
] as const;
|
||||
|
||||
const processor = createValidationProcessor({
|
||||
steps,
|
||||
});
|
||||
|
||||
const result = processor({ value: -1, text: 'test' }, { failFast: false });
|
||||
|
||||
expect(result.isValid).toBe(false);
|
||||
expect(result.errors.arrayResult).toEqual([1, 2, 3]);
|
||||
expect(result.errors.nullableResult).toBe('error');
|
||||
});
|
||||
|
||||
it('should handle 0-arity validators', () => {
|
||||
const processor = createValidationProcessor({
|
||||
steps: [
|
||||
{
|
||||
name: 'zeroArityValidator',
|
||||
validator: () => true,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
{
|
||||
name: 'zeroArityValidator2',
|
||||
validator: () => false,
|
||||
isError: ResultInterpreters.booleanMeansFailure,
|
||||
},
|
||||
] as const,
|
||||
});
|
||||
|
||||
const result = processor(null);
|
||||
expect(result.isValid).toBe(true);
|
||||
});
|
||||
|
||||
it('should work with custom result interpreter', () => {
|
||||
const steps = [
|
||||
{
|
||||
name: 'customCheck',
|
||||
validator: (input: TestInput) => ({ isOk: input.value > 0, code: 'VALUE_CHECK' }),
|
||||
isError: ResultInterpreters.custom((result: { isOk: boolean }) => !result.isOk),
|
||||
},
|
||||
] as const;
|
||||
|
||||
const processor = createValidationProcessor({ steps });
|
||||
|
||||
const validResult = processor({ value: 5, text: 'test' });
|
||||
expect(validResult.isValid).toBe(true);
|
||||
expect(validResult.errors).toEqual({});
|
||||
|
||||
const invalidResult = processor({ value: -1, text: 'test' });
|
||||
expect(invalidResult.isValid).toBe(false);
|
||||
expect(invalidResult.errors.customCheck).toEqual({ isOk: false, code: 'VALUE_CHECK' });
|
||||
});
|
||||
|
||||
it('should work with validationProcessor result interpreter', () => {
|
||||
const innerProcessor = createValidationProcessor({
|
||||
steps: [
|
||||
{
|
||||
name: 'innerCheck',
|
||||
validator: (val: number) => val > 0,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
] as const,
|
||||
});
|
||||
|
||||
const outerProcessor = createValidationProcessor({
|
||||
steps: [
|
||||
{
|
||||
name: 'nestedValidation',
|
||||
validator: (input: TestInput) => innerProcessor(input.value),
|
||||
isError: ResultInterpreters.validationProcessor,
|
||||
},
|
||||
] as const,
|
||||
});
|
||||
|
||||
const validResult = outerProcessor({ value: 5, text: 'test' });
|
||||
expect(validResult.isValid).toBe(true);
|
||||
|
||||
const invalidResult = outerProcessor({ value: -1, text: 'test' });
|
||||
expect(invalidResult.isValid).toBe(false);
|
||||
expect(invalidResult.errors.nestedValidation).toMatchObject({ isValid: false });
|
||||
});
|
||||
|
||||
it('should handle empty steps array', () => {
|
||||
const processor = createValidationProcessor<readonly []>({
|
||||
steps: [],
|
||||
});
|
||||
|
||||
const result = processor('any input' as never);
|
||||
expect(result.isValid).toBe(true);
|
||||
expect(result.errors).toEqual({});
|
||||
});
|
||||
|
||||
it('should throw when validators throw errors', () => {
|
||||
const steps = [
|
||||
{
|
||||
name: 'throwingValidator',
|
||||
validator: (input: TestInput) => {
|
||||
if (input.value === 0) {
|
||||
throw new Error('Division by zero');
|
||||
}
|
||||
return true;
|
||||
},
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
] as const;
|
||||
|
||||
const processor = createValidationProcessor({ steps });
|
||||
|
||||
expect(() => processor({ value: 0, text: 'test' })).toThrow('Division by zero');
|
||||
});
|
||||
|
||||
describe('complex validation scenarios', () => {
|
||||
it('should handle multi-type validation results', () => {
|
||||
type ComplexInput = {
|
||||
email: string;
|
||||
age: number;
|
||||
tags: string[];
|
||||
};
|
||||
|
||||
const steps = [
|
||||
{
|
||||
name: 'emailFormat',
|
||||
validator: (input: ComplexInput) =>
|
||||
/\S+@\S+\.\S+/.test(input.email) ? null : 'Invalid email format',
|
||||
isError: ResultInterpreters.nullableIsSuccess,
|
||||
},
|
||||
{
|
||||
name: 'ageRange',
|
||||
validator: (input: ComplexInput) => input.age >= 18 && input.age <= 120,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
{
|
||||
name: 'tagValidation',
|
||||
validator: (input: ComplexInput) => {
|
||||
const invalidTags = input.tags.filter((tag) => tag.length < 2);
|
||||
return invalidTags;
|
||||
},
|
||||
isError: ResultInterpreters.errorList,
|
||||
},
|
||||
] as const;
|
||||
|
||||
const processor = createValidationProcessor({ steps });
|
||||
|
||||
const validInput: ComplexInput = {
|
||||
email: 'user@example.com',
|
||||
age: 25,
|
||||
tags: ['valid', 'tags', 'here'],
|
||||
};
|
||||
const validResult = processor(validInput);
|
||||
expect(validResult.isValid).toBe(true);
|
||||
|
||||
const invalidInput: ComplexInput = {
|
||||
email: 'invalid-email',
|
||||
age: 150,
|
||||
tags: ['ok', 'a', 'b', 'valid'],
|
||||
};
|
||||
const invalidResult = processor(invalidInput, { failFast: false });
|
||||
expect(invalidResult.isValid).toBe(false);
|
||||
expect(invalidResult.errors.emailFormat).toBe('Invalid email format');
|
||||
expect(invalidResult.errors.ageRange).toBe(false);
|
||||
expect(invalidResult.errors.tagValidation).toEqual(['a', 'b']);
|
||||
});
|
||||
|
||||
it('should preserve type safety with heterogeneous result types', () => {
|
||||
const steps = [
|
||||
{
|
||||
name: 'stringResult',
|
||||
validator: () => 'error message',
|
||||
isError: (result: string) => result.length > 0,
|
||||
},
|
||||
{
|
||||
name: 'numberResult',
|
||||
validator: () => 42,
|
||||
isError: (result: number) => result !== 0,
|
||||
},
|
||||
{
|
||||
name: 'objectResult',
|
||||
validator: () => ({ code: 'ERR_001', severity: 'high' }),
|
||||
isError: (result: { code: string; severity: string }) => true,
|
||||
},
|
||||
] as const;
|
||||
|
||||
const processor = createValidationProcessor({ steps });
|
||||
const result = processor(null, { failFast: false });
|
||||
|
||||
expect(result.isValid).toBe(false);
|
||||
expect(result.errors.stringResult).toBe('error message');
|
||||
expect(result.errors.numberResult).toBe(42);
|
||||
expect(result.errors.objectResult).toEqual({ code: 'ERR_001', severity: 'high' });
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases', () => {
|
||||
it('should handle undefined vs null in nullable interpreter', () => {
|
||||
const steps = [
|
||||
{
|
||||
name: 'nullCheck',
|
||||
validator: () => null,
|
||||
isError: ResultInterpreters.nullableIsSuccess,
|
||||
},
|
||||
{
|
||||
name: 'undefinedCheck',
|
||||
validator: () => undefined,
|
||||
isError: ResultInterpreters.nullableIsSuccess,
|
||||
},
|
||||
{
|
||||
name: 'zeroCheck',
|
||||
validator: () => 0,
|
||||
isError: ResultInterpreters.nullableIsSuccess,
|
||||
},
|
||||
{
|
||||
name: 'falseCheck',
|
||||
validator: () => false,
|
||||
isError: ResultInterpreters.nullableIsSuccess,
|
||||
},
|
||||
] as const;
|
||||
|
||||
const processor = createValidationProcessor({ steps });
|
||||
const result = processor(null, { failFast: false });
|
||||
|
||||
expect(result.isValid).toBe(false);
|
||||
expect(result.errors.nullCheck).toBeUndefined();
|
||||
expect(result.errors.undefinedCheck).toBeUndefined();
|
||||
expect(result.errors.zeroCheck).toBe(0);
|
||||
expect(result.errors.falseCheck).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle very long validation chains', () => {
|
||||
// Test the real-world scenario of dynamically generated validation steps
|
||||
// Note: This demonstrates a limitation of the current type system -
|
||||
// dynamic step generation loses strict typing but still works at runtime
|
||||
type StepInput = { value: number };
|
||||
|
||||
const steps = Array.from({ length: 50 }, (_, i) => ({
|
||||
name: `step${i}`,
|
||||
validator: (input: StepInput) => input.value > i,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
}));
|
||||
|
||||
// For dynamic steps, we need to use a type assertion since TypeScript
|
||||
// can't infer the literal string union from Array.from()
|
||||
const processor = createValidationProcessor({
|
||||
steps,
|
||||
});
|
||||
|
||||
const result = processor({ value: 25 }, { failFast: false });
|
||||
expect(result.isValid).toBe(false);
|
||||
|
||||
const errorCount = Object.keys(result.errors).length;
|
||||
expect(errorCount).toBe(25);
|
||||
});
|
||||
|
||||
it('should handle validation by sum typing their inputs', () => {
|
||||
const processor = createValidationProcessor({
|
||||
steps: [
|
||||
{
|
||||
name: 'step1',
|
||||
validator: ({ age }: { age: number }) => age > 18,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
{
|
||||
name: 'step2',
|
||||
validator: ({ name }: { name: string }) => name.length > 0,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const result = processor({ age: 25, name: 'John' });
|
||||
expect(result.isValid).toBe(true);
|
||||
|
||||
const result2 = processor({ age: 15, name: '' });
|
||||
expect(result2.isValid).toBe(false);
|
||||
});
|
||||
|
||||
it('should allow wider types as processor inputs', () => {
|
||||
const sumProcessor = createValidationProcessor({
|
||||
steps: [
|
||||
{
|
||||
name: 'step1',
|
||||
validator: ({ age }: { age: number }) => age > 18,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
{
|
||||
name: 'step2',
|
||||
validator: ({ name }: { name: string }) => name.length > 0,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
],
|
||||
});
|
||||
type Person = { age: number; name: string };
|
||||
const groupProcessor = createValidationProcessor({
|
||||
steps: [
|
||||
{
|
||||
name: 'step1',
|
||||
validator: ({ age }: Person) => age > 18,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
{
|
||||
name: 'step2',
|
||||
validator: ({ name }: Person) => name.length > 0,
|
||||
isError: ResultInterpreters.booleanMeansSuccess,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const result = sumProcessor({ age: 25, name: 'John', favoriteColor: 'red' });
|
||||
expect(result.isValid).toBe(true);
|
||||
|
||||
const result2 = groupProcessor({ name: '', favoriteColor: 'red', age: 15 });
|
||||
expect(result2.isValid).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
230
api/src/core/utils/validation/validation-processor.ts
Normal file
230
api/src/core/utils/validation/validation-processor.ts
Normal file
@@ -0,0 +1,230 @@
|
||||
/**
|
||||
* @fileoverview Type-safe sequential validation processor
|
||||
*
|
||||
* This module provides a flexible validation system that allows you to chain multiple
|
||||
* validation steps together in a type-safe manner. It supports both fail-fast and
|
||||
* continue-on-error modes, with comprehensive error collection and reporting.
|
||||
*
|
||||
* Key features:
|
||||
* - Type-safe validation pipeline creation
|
||||
* - Sequential validation step execution
|
||||
* - Configurable fail-fast behavior (global or per-step)
|
||||
* - Comprehensive error collection with typed results
|
||||
* - Helper functions for common validation result interpretations
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const validator = createValidationProcessor({
|
||||
* steps: [
|
||||
* {
|
||||
* name: 'required',
|
||||
* validator: (input: string) => input.length > 0,
|
||||
* isError: ResultInterpreters.booleanMeansSuccess
|
||||
* },
|
||||
* {
|
||||
* name: 'email',
|
||||
* validator: (input: string) => /\S+@\S+\.\S+/.test(input),
|
||||
* isError: ResultInterpreters.booleanMeansSuccess
|
||||
* }
|
||||
* ]
|
||||
* });
|
||||
*
|
||||
* const result = validator('user@example.com');
|
||||
* if (!result.isValid) {
|
||||
* console.log('Validation errors:', result.errors);
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
|
||||
export type ValidationStepConfig<TInput, TResult, TName extends string = string> = {
|
||||
name: TName;
|
||||
validator: (input: TInput) => TResult;
|
||||
isError: (result: TResult) => boolean;
|
||||
alwaysFailFast?: boolean;
|
||||
};
|
||||
|
||||
export interface ValidationPipelineConfig {
|
||||
failFast?: boolean;
|
||||
}
|
||||
|
||||
export type ValidationPipelineDefinition<
|
||||
TInput,
|
||||
TSteps extends readonly ValidationStepConfig<TInput, any, string>[],
|
||||
> = {
|
||||
steps: TSteps;
|
||||
};
|
||||
|
||||
export type ExtractStepResults<TSteps extends readonly ValidationStepConfig<any, any, string>[]> = {
|
||||
[K in TSteps[number]['name']]: Extract<TSteps[number], { name: K }> extends ValidationStepConfig<
|
||||
any,
|
||||
infer R,
|
||||
K
|
||||
>
|
||||
? R
|
||||
: never;
|
||||
};
|
||||
|
||||
export type ValidationResult<TSteps extends readonly ValidationStepConfig<any, any, string>[]> = {
|
||||
isValid: boolean;
|
||||
errors: Partial<ExtractStepResults<TSteps>>;
|
||||
};
|
||||
|
||||
// Util: convert a union to an intersection
|
||||
type UnionToIntersection<U> = (U extends any ? (arg: U) => void : never) extends (arg: infer I) => void
|
||||
? I
|
||||
: never;
|
||||
|
||||
// Extract the *intersection* of all input types required by the steps. This guarantees that
|
||||
// the resulting processor knows about every property that any individual step relies on.
|
||||
// We purposely compute an intersection (not a union) so that all required fields are present.
|
||||
type ExtractInputType<TSteps extends readonly ValidationStepConfig<any, any, string>[]> =
|
||||
UnionToIntersection<
|
||||
TSteps[number] extends ValidationStepConfig<infer TInput, any, string> ? TInput : never
|
||||
>;
|
||||
|
||||
/**
|
||||
* Creates a type-safe validation processor that executes a series of validation steps
|
||||
* sequentially and collects errors from failed validations.
|
||||
*
|
||||
* This function returns a validation processor that can be called with input data
|
||||
* and an optional configuration object. The processor will run each validation step
|
||||
* in order, collecting any errors that occur.
|
||||
*
|
||||
* @template TSteps - A readonly array of validation step configurations that defines
|
||||
* the validation pipeline. The type is constrained to ensure type safety
|
||||
* across all steps and their results.
|
||||
*
|
||||
* @param definition - The validation pipeline definition
|
||||
* @param definition.steps - An array of validation step configurations. Each step must have:
|
||||
* - `name`: A unique string identifier for the step
|
||||
* - `validator`: A function that takes input and returns a validation result
|
||||
* - `isError`: A function that determines if the validation result represents an error
|
||||
* - `alwaysFailFast`: Optional flag to always stop execution on this step's failure
|
||||
*
|
||||
* @returns A validation processor function that accepts:
|
||||
* - `input`: The data to validate (type inferred from the first validation step)
|
||||
* - `config`: Optional configuration object with:
|
||||
* - `failFast`: If true, stops execution on first error (unless overridden by step config)
|
||||
*
|
||||
* @example Basic usage with string validation
|
||||
* ```typescript
|
||||
* const nameValidator = createValidationProcessor({
|
||||
* steps: [
|
||||
* {
|
||||
* name: 'required',
|
||||
* validator: (input: string) => input.trim().length > 0,
|
||||
* isError: ResultInterpreters.booleanMeansSuccess
|
||||
* },
|
||||
* {
|
||||
* name: 'minLength',
|
||||
* validator: (input: string) => input.length >= 2,
|
||||
* isError: ResultInterpreters.booleanMeansSuccess
|
||||
* },
|
||||
* {
|
||||
* name: 'maxLength',
|
||||
* validator: (input: string) => input.length <= 50,
|
||||
* isError: ResultInterpreters.booleanMeansSuccess
|
||||
* }
|
||||
* ]
|
||||
* });
|
||||
*
|
||||
* const result = nameValidator('John');
|
||||
* // result.isValid: boolean
|
||||
* // result.errors: { required?: boolean, minLength?: boolean, maxLength?: boolean }
|
||||
* ```
|
||||
*
|
||||
* @example Complex validation with custom error types
|
||||
* ```typescript
|
||||
* type ValidationError = { message: string; code: string };
|
||||
*
|
||||
* const userValidator = createValidationProcessor({
|
||||
* steps: [
|
||||
* {
|
||||
* name: 'email',
|
||||
* validator: (user: { email: string }) =>
|
||||
* /\S+@\S+\.\S+/.test(user.email)
|
||||
* ? null
|
||||
* : { message: 'Invalid email format', code: 'INVALID_EMAIL' },
|
||||
* isError: (result): result is ValidationError => result !== null
|
||||
* },
|
||||
* {
|
||||
* name: 'age',
|
||||
* validator: (user: { age: number }) =>
|
||||
* user.age >= 18
|
||||
* ? null
|
||||
* : { message: 'Must be 18 or older', code: 'UNDERAGE' },
|
||||
* isError: (result): result is ValidationError => result !== null,
|
||||
* alwaysFailFast: true // Stop immediately if age validation fails
|
||||
* }
|
||||
* ]
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
* @example Using fail-fast mode
|
||||
* ```typescript
|
||||
* const result = validator(input, { failFast: true });
|
||||
* // Stops on first error, even if subsequent steps would also fail
|
||||
* ```
|
||||
*
|
||||
* @since 1.0.0
|
||||
*/
|
||||
export function createValidationProcessor<
|
||||
const TSteps extends readonly ValidationStepConfig<any, any, string>[],
|
||||
>(definition: { steps: TSteps }) {
|
||||
// Determine the base input type required by all steps (intersection).
|
||||
type BaseInput = ExtractInputType<TSteps>;
|
||||
|
||||
// Helper: widen input type for object literals while keeping regular objects assignable.
|
||||
type InputWithExtras = BaseInput extends object
|
||||
? BaseInput | (BaseInput & Record<string, unknown>)
|
||||
: BaseInput;
|
||||
|
||||
return function processValidation(
|
||||
input: InputWithExtras,
|
||||
config: ValidationPipelineConfig = {}
|
||||
): ValidationResult<TSteps> {
|
||||
const errors: Partial<ExtractStepResults<TSteps>> = {};
|
||||
let hasErrors = false;
|
||||
|
||||
for (const step of definition.steps) {
|
||||
const result = step.validator(input as BaseInput);
|
||||
const isError = step.isError(result);
|
||||
|
||||
if (isError) {
|
||||
hasErrors = true;
|
||||
(errors as any)[step.name] = result;
|
||||
|
||||
// Always fail fast for steps marked as such, or when global failFast is enabled
|
||||
if (step.alwaysFailFast || config.failFast) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
isValid: !hasErrors,
|
||||
errors,
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
/** Helper functions for common result interpretations */
|
||||
export const ResultInterpreters = {
|
||||
/** For boolean results: true = success, false = error */
|
||||
booleanMeansSuccess: (result: boolean): boolean => !result,
|
||||
|
||||
/** For boolean results: false = success, true = error */
|
||||
booleanMeansFailure: (result: boolean): boolean => result,
|
||||
|
||||
/** For nullable results: null/undefined = success, anything else = error */
|
||||
nullableIsSuccess: <T>(result: T | null | undefined): boolean => result != null,
|
||||
|
||||
/** For array results: empty = success, non-empty = error */
|
||||
errorList: <T>(result: T[]): boolean => result.length > 0,
|
||||
|
||||
/** For custom predicate */
|
||||
custom: <T>(predicate: (result: T) => boolean) => predicate,
|
||||
|
||||
/** Interpreting the result of a validation processor */
|
||||
validationProcessor: (result: { isValid: boolean }) => !result.isValid,
|
||||
} as const;
|
||||
@@ -9,6 +9,8 @@ const env =
|
||||
override: true,
|
||||
})
|
||||
: config({
|
||||
debug: false,
|
||||
quiet: true,
|
||||
path: '/usr/local/unraid-api/.env',
|
||||
encoding: 'utf-8',
|
||||
});
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
// Defines environment & configuration constants.
|
||||
// Non-function exports from this module are loaded into the NestJS Config at runtime.
|
||||
|
||||
import { readFileSync } from 'node:fs';
|
||||
import { homedir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
@@ -64,6 +67,7 @@ export const getPackageJsonDependencies = (): string[] | undefined => {
|
||||
|
||||
export const API_VERSION = process.env.npm_package_version ?? getPackageJson().version;
|
||||
|
||||
/** Controls how the app is built/run (i.e. in terms of optimization) */
|
||||
export const NODE_ENV =
|
||||
(process.env.NODE_ENV as 'development' | 'test' | 'staging' | 'production') ?? 'production';
|
||||
export const environment = {
|
||||
@@ -73,6 +77,7 @@ export const CHOKIDAR_USEPOLLING = process.env.CHOKIDAR_USEPOLLING === 'true';
|
||||
export const IS_DOCKER = process.env.IS_DOCKER === 'true';
|
||||
export const DEBUG = process.env.DEBUG === 'true';
|
||||
export const INTROSPECTION = process.env.INTROSPECTION === 'true';
|
||||
/** Determines the app-level & business logic environment (i.e. what data & infrastructure is used) */
|
||||
export const ENVIRONMENT = process.env.ENVIRONMENT
|
||||
? (process.env.ENVIRONMENT as 'production' | 'staging' | 'development')
|
||||
: 'production';
|
||||
@@ -87,6 +92,7 @@ export const LOG_LEVEL = process.env.LOG_LEVEL
|
||||
: process.env.ENVIRONMENT === 'production'
|
||||
? 'INFO'
|
||||
: 'DEBUG';
|
||||
export const SUPPRESS_LOGS = process.env.SUPPRESS_LOGS === 'true';
|
||||
export const MOTHERSHIP_GRAPHQL_LINK = process.env.MOTHERSHIP_GRAPHQL_LINK
|
||||
? process.env.MOTHERSHIP_GRAPHQL_LINK
|
||||
: ENVIRONMENT === 'staging'
|
||||
@@ -94,4 +100,11 @@ export const MOTHERSHIP_GRAPHQL_LINK = process.env.MOTHERSHIP_GRAPHQL_LINK
|
||||
: 'https://mothership.unraid.net/ws';
|
||||
|
||||
export const PM2_HOME = process.env.PM2_HOME ?? join(homedir(), '.pm2');
|
||||
export const PATHS_CONFIG_MODULES = process.env.PATHS_CONFIG_MODULES!;
|
||||
export const PM2_PATH = join(import.meta.dirname, '../../', 'node_modules', 'pm2', 'bin', 'pm2');
|
||||
export const ECOSYSTEM_PATH = join(import.meta.dirname, '../../', 'ecosystem.config.json');
|
||||
export const PATHS_LOGS_DIR =
|
||||
process.env.PATHS_LOGS_DIR ?? process.env.LOGS_DIR ?? '/var/log/unraid-api';
|
||||
export const PATHS_LOGS_FILE = process.env.PATHS_LOGS_FILE ?? '/var/log/graphql-api.log';
|
||||
|
||||
export const PATHS_CONFIG_MODULES =
|
||||
process.env.PATHS_CONFIG_MODULES ?? '/boot/config/plugins/dynamix.my.servers/configs';
|
||||
|
||||
@@ -1,77 +0,0 @@
|
||||
import { ApolloClient, HttpLink, InMemoryCache, split } from '@apollo/client/core/index.js';
|
||||
import { onError } from '@apollo/client/link/error/index.js';
|
||||
import { GraphQLWsLink } from '@apollo/client/link/subscriptions/index.js';
|
||||
import { getMainDefinition } from '@apollo/client/utilities/index.js';
|
||||
import { fetch } from 'cross-fetch';
|
||||
import { createClient } from 'graphql-ws';
|
||||
import WebSocket from 'ws';
|
||||
|
||||
import { getInternalApiAddress } from '@app/consts.js';
|
||||
import { graphqlLogger } from '@app/core/log.js';
|
||||
import { getters } from '@app/store/index.js';
|
||||
|
||||
const getWebsocketWithHeaders = () => {
|
||||
return class WebsocketWithOriginHeader extends WebSocket {
|
||||
constructor(address, protocols) {
|
||||
super(address, protocols, {
|
||||
headers: {
|
||||
Origin: '/var/run/unraid-cli.sock',
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
export const getApiApolloClient = ({ localApiKey }: { localApiKey: string }) => {
|
||||
const nginxPort = getters?.emhttp()?.nginx?.httpPort ?? 80;
|
||||
graphqlLogger.debug('Internal GraphQL URL: %s', getInternalApiAddress(true, nginxPort));
|
||||
const httpLink = new HttpLink({
|
||||
uri: getInternalApiAddress(true, nginxPort),
|
||||
fetch,
|
||||
headers: {
|
||||
Origin: '/var/run/unraid-cli.sock',
|
||||
'x-api-key': localApiKey,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
|
||||
// Create the subscription websocket link
|
||||
const wsLink = new GraphQLWsLink(
|
||||
createClient({
|
||||
webSocketImpl: getWebsocketWithHeaders(),
|
||||
url: getInternalApiAddress(false, nginxPort),
|
||||
connectionParams: () => {
|
||||
return { 'x-api-key': localApiKey };
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
const splitLink = split(
|
||||
({ query }) => {
|
||||
const definition = getMainDefinition(query);
|
||||
return definition.kind === 'OperationDefinition' && definition.operation === 'subscription';
|
||||
},
|
||||
wsLink,
|
||||
httpLink
|
||||
);
|
||||
|
||||
const errorLink = onError(({ networkError }) => {
|
||||
if (networkError) {
|
||||
graphqlLogger.warn('[GRAPHQL-CLIENT] NETWORK ERROR ENCOUNTERED %o', networkError);
|
||||
}
|
||||
});
|
||||
|
||||
return new ApolloClient({
|
||||
defaultOptions: {
|
||||
query: {
|
||||
fetchPolicy: 'no-cache',
|
||||
},
|
||||
mutate: {
|
||||
fetchPolicy: 'no-cache',
|
||||
},
|
||||
},
|
||||
cache: new InMemoryCache(),
|
||||
link: errorLink.concat(splitLink),
|
||||
});
|
||||
};
|
||||
@@ -1,35 +0,0 @@
|
||||
export const GET_CLOUD_OBJECT = /* GraphQL */ `
|
||||
query getCloud {
|
||||
cloud {
|
||||
error
|
||||
apiKey {
|
||||
valid
|
||||
error
|
||||
}
|
||||
minigraphql {
|
||||
status
|
||||
timeout
|
||||
error
|
||||
}
|
||||
cloud {
|
||||
status
|
||||
error
|
||||
ip
|
||||
}
|
||||
allowedOrigins
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
export const GET_SERVERS = /* GraphQL */ `
|
||||
query getServers {
|
||||
servers {
|
||||
name
|
||||
guid
|
||||
status
|
||||
owner {
|
||||
username
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
@@ -1,58 +0,0 @@
|
||||
/* eslint-disable */
|
||||
import * as types from './graphql.js';
|
||||
import type { TypedDocumentNode as DocumentNode } from '@graphql-typed-document-node/core';
|
||||
|
||||
/**
|
||||
* Map of all GraphQL operations in the project.
|
||||
*
|
||||
* This map has several performance disadvantages:
|
||||
* 1. It is not tree-shakeable, so it will include all operations in the project.
|
||||
* 2. It is not minifiable, so the string of a GraphQL query will be multiple times inside the bundle.
|
||||
* 3. It does not support dead code elimination, so it will add unused operations.
|
||||
*
|
||||
* Therefore it is highly recommended to use the babel or swc plugin for production.
|
||||
* Learn more about it here: https://the-guild.dev/graphql/codegen/plugins/presets/preset-client#reducing-bundle-size
|
||||
*/
|
||||
type Documents = {
|
||||
"\n mutation sendRemoteGraphQLResponse($input: RemoteGraphQLServerInput!) {\n remoteGraphQLResponse(input: $input)\n }\n": typeof types.SendRemoteGraphQlResponseDocument,
|
||||
"\n fragment RemoteGraphQLEventFragment on RemoteGraphQLEvent {\n remoteGraphQLEventData: data {\n type\n body\n sha256\n }\n }\n": typeof types.RemoteGraphQlEventFragmentFragmentDoc,
|
||||
"\n subscription events {\n events {\n __typename\n ... on ClientConnectedEvent {\n connectedData: data {\n type\n version\n apiKey\n }\n connectedEvent: type\n }\n ... on ClientDisconnectedEvent {\n disconnectedData: data {\n type\n version\n apiKey\n }\n disconnectedEvent: type\n }\n ...RemoteGraphQLEventFragment\n }\n }\n": typeof types.EventsDocument,
|
||||
};
|
||||
const documents: Documents = {
|
||||
"\n mutation sendRemoteGraphQLResponse($input: RemoteGraphQLServerInput!) {\n remoteGraphQLResponse(input: $input)\n }\n": types.SendRemoteGraphQlResponseDocument,
|
||||
"\n fragment RemoteGraphQLEventFragment on RemoteGraphQLEvent {\n remoteGraphQLEventData: data {\n type\n body\n sha256\n }\n }\n": types.RemoteGraphQlEventFragmentFragmentDoc,
|
||||
"\n subscription events {\n events {\n __typename\n ... on ClientConnectedEvent {\n connectedData: data {\n type\n version\n apiKey\n }\n connectedEvent: type\n }\n ... on ClientDisconnectedEvent {\n disconnectedData: data {\n type\n version\n apiKey\n }\n disconnectedEvent: type\n }\n ...RemoteGraphQLEventFragment\n }\n }\n": types.EventsDocument,
|
||||
};
|
||||
|
||||
/**
|
||||
* The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
|
||||
*
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const query = graphql(`query GetUser($id: ID!) { user(id: $id) { name } }`);
|
||||
* ```
|
||||
*
|
||||
* The query argument is unknown!
|
||||
* Please regenerate the types.
|
||||
*/
|
||||
export function graphql(source: string): unknown;
|
||||
|
||||
/**
|
||||
* The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
|
||||
*/
|
||||
export function graphql(source: "\n mutation sendRemoteGraphQLResponse($input: RemoteGraphQLServerInput!) {\n remoteGraphQLResponse(input: $input)\n }\n"): (typeof documents)["\n mutation sendRemoteGraphQLResponse($input: RemoteGraphQLServerInput!) {\n remoteGraphQLResponse(input: $input)\n }\n"];
|
||||
/**
|
||||
* The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
|
||||
*/
|
||||
export function graphql(source: "\n fragment RemoteGraphQLEventFragment on RemoteGraphQLEvent {\n remoteGraphQLEventData: data {\n type\n body\n sha256\n }\n }\n"): (typeof documents)["\n fragment RemoteGraphQLEventFragment on RemoteGraphQLEvent {\n remoteGraphQLEventData: data {\n type\n body\n sha256\n }\n }\n"];
|
||||
/**
|
||||
* The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
|
||||
*/
|
||||
export function graphql(source: "\n subscription events {\n events {\n __typename\n ... on ClientConnectedEvent {\n connectedData: data {\n type\n version\n apiKey\n }\n connectedEvent: type\n }\n ... on ClientDisconnectedEvent {\n disconnectedData: data {\n type\n version\n apiKey\n }\n disconnectedEvent: type\n }\n ...RemoteGraphQLEventFragment\n }\n }\n"): (typeof documents)["\n subscription events {\n events {\n __typename\n ... on ClientConnectedEvent {\n connectedData: data {\n type\n version\n apiKey\n }\n connectedEvent: type\n }\n ... on ClientDisconnectedEvent {\n disconnectedData: data {\n type\n version\n apiKey\n }\n disconnectedEvent: type\n }\n ...RemoteGraphQLEventFragment\n }\n }\n"];
|
||||
|
||||
export function graphql(source: string) {
|
||||
return (documents as any)[source] ?? {};
|
||||
}
|
||||
|
||||
export type DocumentType<TDocumentNode extends DocumentNode<any, any>> = TDocumentNode extends DocumentNode< infer TType, any> ? TType : never;
|
||||
@@ -1,748 +0,0 @@
|
||||
/* eslint-disable */
|
||||
import type { TypedDocumentNode as DocumentNode } from '@graphql-typed-document-node/core';
|
||||
export type Maybe<T> = T | null;
|
||||
export type InputMaybe<T> = Maybe<T>;
|
||||
export type Exact<T extends { [key: string]: unknown }> = { [K in keyof T]: T[K] };
|
||||
export type MakeOptional<T, K extends keyof T> = Omit<T, K> & { [SubKey in K]?: Maybe<T[SubKey]> };
|
||||
export type MakeMaybe<T, K extends keyof T> = Omit<T, K> & { [SubKey in K]: Maybe<T[SubKey]> };
|
||||
export type MakeEmpty<T extends { [key: string]: unknown }, K extends keyof T> = { [_ in K]?: never };
|
||||
export type Incremental<T> = T | { [P in keyof T]?: P extends ' $fragmentName' | '__typename' ? T[P] : never };
|
||||
/** All built-in and custom scalars, mapped to their actual values */
|
||||
export type Scalars = {
|
||||
ID: { input: string; output: string; }
|
||||
String: { input: string; output: string; }
|
||||
Boolean: { input: boolean; output: boolean; }
|
||||
Int: { input: number; output: number; }
|
||||
Float: { input: number; output: number; }
|
||||
/** A date-time string at UTC, such as 2007-12-03T10:15:30Z, compliant with the `date-time` format outlined in section 5.6 of the RFC 3339 profile of the ISO 8601 standard for representation of dates and times using the Gregorian calendar. */
|
||||
DateTime: { input: string; output: string; }
|
||||
/** A field whose value is a IPv4 address: https://en.wikipedia.org/wiki/IPv4. */
|
||||
IPv4: { input: any; output: any; }
|
||||
/** A field whose value is a IPv6 address: https://en.wikipedia.org/wiki/IPv6. */
|
||||
IPv6: { input: any; output: any; }
|
||||
/** The `JSON` scalar type represents JSON values as specified by [ECMA-404](http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf). */
|
||||
JSON: { input: Record<string, any>; output: Record<string, any>; }
|
||||
/** The `Long` scalar type represents 52-bit integers */
|
||||
Long: { input: number; output: number; }
|
||||
/** A field whose value is a valid TCP port within the range of 0 to 65535: https://en.wikipedia.org/wiki/Transmission_Control_Protocol#TCP_ports */
|
||||
Port: { input: number; output: number; }
|
||||
/** A field whose value conforms to the standard URL format as specified in RFC3986: https://www.ietf.org/rfc/rfc3986.txt. */
|
||||
URL: { input: URL; output: URL; }
|
||||
};
|
||||
|
||||
export type AccessUrl = {
|
||||
__typename?: 'AccessUrl';
|
||||
ipv4?: Maybe<Scalars['URL']['output']>;
|
||||
ipv6?: Maybe<Scalars['URL']['output']>;
|
||||
name?: Maybe<Scalars['String']['output']>;
|
||||
type: UrlType;
|
||||
};
|
||||
|
||||
export type AccessUrlInput = {
|
||||
ipv4?: InputMaybe<Scalars['URL']['input']>;
|
||||
ipv6?: InputMaybe<Scalars['URL']['input']>;
|
||||
name?: InputMaybe<Scalars['String']['input']>;
|
||||
type: UrlType;
|
||||
};
|
||||
|
||||
export type ArrayCapacity = {
|
||||
__typename?: 'ArrayCapacity';
|
||||
bytes?: Maybe<ArrayCapacityBytes>;
|
||||
};
|
||||
|
||||
export type ArrayCapacityBytes = {
|
||||
__typename?: 'ArrayCapacityBytes';
|
||||
free?: Maybe<Scalars['Long']['output']>;
|
||||
total?: Maybe<Scalars['Long']['output']>;
|
||||
used?: Maybe<Scalars['Long']['output']>;
|
||||
};
|
||||
|
||||
export type ArrayCapacityBytesInput = {
|
||||
free?: InputMaybe<Scalars['Long']['input']>;
|
||||
total?: InputMaybe<Scalars['Long']['input']>;
|
||||
used?: InputMaybe<Scalars['Long']['input']>;
|
||||
};
|
||||
|
||||
export type ArrayCapacityInput = {
|
||||
bytes?: InputMaybe<ArrayCapacityBytesInput>;
|
||||
};
|
||||
|
||||
export type ClientConnectedEvent = {
|
||||
__typename?: 'ClientConnectedEvent';
|
||||
data: ClientConnectionEventData;
|
||||
type: EventType;
|
||||
};
|
||||
|
||||
export type ClientConnectionEventData = {
|
||||
__typename?: 'ClientConnectionEventData';
|
||||
apiKey: Scalars['String']['output'];
|
||||
type: ClientType;
|
||||
version: Scalars['String']['output'];
|
||||
};
|
||||
|
||||
export type ClientDisconnectedEvent = {
|
||||
__typename?: 'ClientDisconnectedEvent';
|
||||
data: ClientConnectionEventData;
|
||||
type: EventType;
|
||||
};
|
||||
|
||||
export type ClientPingEvent = {
|
||||
__typename?: 'ClientPingEvent';
|
||||
data: PingEventData;
|
||||
type: EventType;
|
||||
};
|
||||
|
||||
export enum ClientType {
|
||||
API = 'API',
|
||||
DASHBOARD = 'DASHBOARD'
|
||||
}
|
||||
|
||||
export type Config = {
|
||||
__typename?: 'Config';
|
||||
error?: Maybe<ConfigErrorState>;
|
||||
valid?: Maybe<Scalars['Boolean']['output']>;
|
||||
};
|
||||
|
||||
export enum ConfigErrorState {
|
||||
INVALID = 'INVALID',
|
||||
NO_KEY_SERVER = 'NO_KEY_SERVER',
|
||||
UNKNOWN_ERROR = 'UNKNOWN_ERROR',
|
||||
WITHDRAWN = 'WITHDRAWN'
|
||||
}
|
||||
|
||||
export type Dashboard = {
|
||||
__typename?: 'Dashboard';
|
||||
apps?: Maybe<DashboardApps>;
|
||||
array?: Maybe<DashboardArray>;
|
||||
config?: Maybe<DashboardConfig>;
|
||||
display?: Maybe<DashboardDisplay>;
|
||||
id: Scalars['ID']['output'];
|
||||
lastPublish?: Maybe<Scalars['DateTime']['output']>;
|
||||
network?: Maybe<Network>;
|
||||
online?: Maybe<Scalars['Boolean']['output']>;
|
||||
os?: Maybe<DashboardOs>;
|
||||
services?: Maybe<Array<Maybe<DashboardService>>>;
|
||||
twoFactor?: Maybe<DashboardTwoFactor>;
|
||||
vars?: Maybe<DashboardVars>;
|
||||
versions?: Maybe<DashboardVersions>;
|
||||
vms?: Maybe<DashboardVms>;
|
||||
};
|
||||
|
||||
export type DashboardApps = {
|
||||
__typename?: 'DashboardApps';
|
||||
installed?: Maybe<Scalars['Int']['output']>;
|
||||
started?: Maybe<Scalars['Int']['output']>;
|
||||
};
|
||||
|
||||
export type DashboardAppsInput = {
|
||||
installed: Scalars['Int']['input'];
|
||||
started: Scalars['Int']['input'];
|
||||
};
|
||||
|
||||
export type DashboardArray = {
|
||||
__typename?: 'DashboardArray';
|
||||
/** Current array capacity */
|
||||
capacity?: Maybe<ArrayCapacity>;
|
||||
/** Current array state */
|
||||
state?: Maybe<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
export type DashboardArrayInput = {
|
||||
/** Current array capacity */
|
||||
capacity: ArrayCapacityInput;
|
||||
/** Current array state */
|
||||
state: Scalars['String']['input'];
|
||||
};
|
||||
|
||||
export type DashboardCase = {
|
||||
__typename?: 'DashboardCase';
|
||||
base64?: Maybe<Scalars['String']['output']>;
|
||||
error?: Maybe<Scalars['String']['output']>;
|
||||
icon?: Maybe<Scalars['String']['output']>;
|
||||
url?: Maybe<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
export type DashboardCaseInput = {
|
||||
base64: Scalars['String']['input'];
|
||||
error?: InputMaybe<Scalars['String']['input']>;
|
||||
icon: Scalars['String']['input'];
|
||||
url: Scalars['String']['input'];
|
||||
};
|
||||
|
||||
export type DashboardConfig = {
|
||||
__typename?: 'DashboardConfig';
|
||||
error?: Maybe<Scalars['String']['output']>;
|
||||
valid?: Maybe<Scalars['Boolean']['output']>;
|
||||
};
|
||||
|
||||
export type DashboardConfigInput = {
|
||||
error?: InputMaybe<Scalars['String']['input']>;
|
||||
valid: Scalars['Boolean']['input'];
|
||||
};
|
||||
|
||||
export type DashboardDisplay = {
|
||||
__typename?: 'DashboardDisplay';
|
||||
case?: Maybe<DashboardCase>;
|
||||
};
|
||||
|
||||
export type DashboardDisplayInput = {
|
||||
case: DashboardCaseInput;
|
||||
};
|
||||
|
||||
export type DashboardInput = {
|
||||
apps: DashboardAppsInput;
|
||||
array: DashboardArrayInput;
|
||||
config: DashboardConfigInput;
|
||||
display: DashboardDisplayInput;
|
||||
os: DashboardOsInput;
|
||||
services: Array<DashboardServiceInput>;
|
||||
twoFactor?: InputMaybe<DashboardTwoFactorInput>;
|
||||
vars: DashboardVarsInput;
|
||||
versions: DashboardVersionsInput;
|
||||
vms: DashboardVmsInput;
|
||||
};
|
||||
|
||||
export type DashboardOs = {
|
||||
__typename?: 'DashboardOs';
|
||||
hostname?: Maybe<Scalars['String']['output']>;
|
||||
uptime?: Maybe<Scalars['DateTime']['output']>;
|
||||
};
|
||||
|
||||
export type DashboardOsInput = {
|
||||
hostname: Scalars['String']['input'];
|
||||
uptime: Scalars['DateTime']['input'];
|
||||
};
|
||||
|
||||
export type DashboardService = {
|
||||
__typename?: 'DashboardService';
|
||||
name?: Maybe<Scalars['String']['output']>;
|
||||
online?: Maybe<Scalars['Boolean']['output']>;
|
||||
uptime?: Maybe<DashboardServiceUptime>;
|
||||
version?: Maybe<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
export type DashboardServiceInput = {
|
||||
name: Scalars['String']['input'];
|
||||
online: Scalars['Boolean']['input'];
|
||||
uptime?: InputMaybe<DashboardServiceUptimeInput>;
|
||||
version: Scalars['String']['input'];
|
||||
};
|
||||
|
||||
export type DashboardServiceUptime = {
|
||||
__typename?: 'DashboardServiceUptime';
|
||||
timestamp?: Maybe<Scalars['DateTime']['output']>;
|
||||
};
|
||||
|
||||
export type DashboardServiceUptimeInput = {
|
||||
timestamp: Scalars['DateTime']['input'];
|
||||
};
|
||||
|
||||
export type DashboardTwoFactor = {
|
||||
__typename?: 'DashboardTwoFactor';
|
||||
local?: Maybe<DashboardTwoFactorLocal>;
|
||||
remote?: Maybe<DashboardTwoFactorRemote>;
|
||||
};
|
||||
|
||||
export type DashboardTwoFactorInput = {
|
||||
local: DashboardTwoFactorLocalInput;
|
||||
remote: DashboardTwoFactorRemoteInput;
|
||||
};
|
||||
|
||||
export type DashboardTwoFactorLocal = {
|
||||
__typename?: 'DashboardTwoFactorLocal';
|
||||
enabled?: Maybe<Scalars['Boolean']['output']>;
|
||||
};
|
||||
|
||||
export type DashboardTwoFactorLocalInput = {
|
||||
enabled: Scalars['Boolean']['input'];
|
||||
};
|
||||
|
||||
export type DashboardTwoFactorRemote = {
|
||||
__typename?: 'DashboardTwoFactorRemote';
|
||||
enabled?: Maybe<Scalars['Boolean']['output']>;
|
||||
};
|
||||
|
||||
export type DashboardTwoFactorRemoteInput = {
|
||||
enabled: Scalars['Boolean']['input'];
|
||||
};
|
||||
|
||||
export type DashboardVars = {
|
||||
__typename?: 'DashboardVars';
|
||||
flashGuid?: Maybe<Scalars['String']['output']>;
|
||||
regState?: Maybe<Scalars['String']['output']>;
|
||||
regTy?: Maybe<Scalars['String']['output']>;
|
||||
serverDescription?: Maybe<Scalars['String']['output']>;
|
||||
serverName?: Maybe<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
export type DashboardVarsInput = {
|
||||
flashGuid: Scalars['String']['input'];
|
||||
regState: Scalars['String']['input'];
|
||||
regTy: Scalars['String']['input'];
|
||||
/** Server description */
|
||||
serverDescription?: InputMaybe<Scalars['String']['input']>;
|
||||
/** Name of the server */
|
||||
serverName?: InputMaybe<Scalars['String']['input']>;
|
||||
};
|
||||
|
||||
export type DashboardVersions = {
|
||||
__typename?: 'DashboardVersions';
|
||||
unraid?: Maybe<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
export type DashboardVersionsInput = {
|
||||
unraid: Scalars['String']['input'];
|
||||
};
|
||||
|
||||
export type DashboardVms = {
|
||||
__typename?: 'DashboardVms';
|
||||
installed?: Maybe<Scalars['Int']['output']>;
|
||||
started?: Maybe<Scalars['Int']['output']>;
|
||||
};
|
||||
|
||||
export type DashboardVmsInput = {
|
||||
installed: Scalars['Int']['input'];
|
||||
started: Scalars['Int']['input'];
|
||||
};
|
||||
|
||||
export type Event = ClientConnectedEvent | ClientDisconnectedEvent | ClientPingEvent | RemoteAccessEvent | RemoteGraphQlEvent | UpdateEvent;
|
||||
|
||||
export enum EventType {
|
||||
CLIENT_CONNECTED_EVENT = 'CLIENT_CONNECTED_EVENT',
|
||||
CLIENT_DISCONNECTED_EVENT = 'CLIENT_DISCONNECTED_EVENT',
|
||||
CLIENT_PING_EVENT = 'CLIENT_PING_EVENT',
|
||||
REMOTE_ACCESS_EVENT = 'REMOTE_ACCESS_EVENT',
|
||||
REMOTE_GRAPHQL_EVENT = 'REMOTE_GRAPHQL_EVENT',
|
||||
UPDATE_EVENT = 'UPDATE_EVENT'
|
||||
}
|
||||
|
||||
export type FullServerDetails = {
|
||||
__typename?: 'FullServerDetails';
|
||||
apiConnectedCount?: Maybe<Scalars['Int']['output']>;
|
||||
apiVersion?: Maybe<Scalars['String']['output']>;
|
||||
connectionTimestamp?: Maybe<Scalars['String']['output']>;
|
||||
dashboard?: Maybe<Dashboard>;
|
||||
lastPublish?: Maybe<Scalars['String']['output']>;
|
||||
network?: Maybe<Network>;
|
||||
online?: Maybe<Scalars['Boolean']['output']>;
|
||||
};
|
||||
|
||||
export enum Importance {
|
||||
ALERT = 'ALERT',
|
||||
INFO = 'INFO',
|
||||
WARNING = 'WARNING'
|
||||
}
|
||||
|
||||
export type KsServerDetails = {
|
||||
__typename?: 'KsServerDetails';
|
||||
accessLabel: Scalars['String']['output'];
|
||||
accessUrl: Scalars['String']['output'];
|
||||
apiKey?: Maybe<Scalars['String']['output']>;
|
||||
description: Scalars['String']['output'];
|
||||
dnsHash: Scalars['String']['output'];
|
||||
flashBackupDate?: Maybe<Scalars['Int']['output']>;
|
||||
flashBackupUrl: Scalars['String']['output'];
|
||||
flashProduct: Scalars['String']['output'];
|
||||
flashVendor: Scalars['String']['output'];
|
||||
guid: Scalars['String']['output'];
|
||||
ipsId?: Maybe<Scalars['String']['output']>;
|
||||
keyType?: Maybe<Scalars['String']['output']>;
|
||||
licenseKey: Scalars['String']['output'];
|
||||
name: Scalars['String']['output'];
|
||||
plgVersion?: Maybe<Scalars['String']['output']>;
|
||||
signedIn: Scalars['Boolean']['output'];
|
||||
};
|
||||
|
||||
export type LegacyService = {
|
||||
__typename?: 'LegacyService';
|
||||
name?: Maybe<Scalars['String']['output']>;
|
||||
online?: Maybe<Scalars['Boolean']['output']>;
|
||||
uptime?: Maybe<Scalars['Int']['output']>;
|
||||
version?: Maybe<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
export type Mutation = {
|
||||
__typename?: 'Mutation';
|
||||
remoteGraphQLResponse: Scalars['Boolean']['output'];
|
||||
remoteMutation: Scalars['String']['output'];
|
||||
remoteSession?: Maybe<Scalars['Boolean']['output']>;
|
||||
sendNotification?: Maybe<Notification>;
|
||||
sendPing?: Maybe<Scalars['Boolean']['output']>;
|
||||
updateDashboard: Dashboard;
|
||||
updateNetwork: Network;
|
||||
};
|
||||
|
||||
|
||||
export type MutationRemoteGraphQlResponseArgs = {
|
||||
input: RemoteGraphQlServerInput;
|
||||
};
|
||||
|
||||
|
||||
export type MutationRemoteMutationArgs = {
|
||||
input: RemoteGraphQlClientInput;
|
||||
};
|
||||
|
||||
|
||||
export type MutationRemoteSessionArgs = {
|
||||
remoteAccess: RemoteAccessInput;
|
||||
};
|
||||
|
||||
|
||||
export type MutationSendNotificationArgs = {
|
||||
notification: NotificationInput;
|
||||
};
|
||||
|
||||
|
||||
export type MutationUpdateDashboardArgs = {
|
||||
data: DashboardInput;
|
||||
};
|
||||
|
||||
|
||||
export type MutationUpdateNetworkArgs = {
|
||||
data: NetworkInput;
|
||||
};
|
||||
|
||||
export type Network = {
|
||||
__typename?: 'Network';
|
||||
accessUrls?: Maybe<Array<AccessUrl>>;
|
||||
};
|
||||
|
||||
export type NetworkInput = {
|
||||
accessUrls: Array<AccessUrlInput>;
|
||||
};
|
||||
|
||||
export type Notification = {
|
||||
__typename?: 'Notification';
|
||||
description?: Maybe<Scalars['String']['output']>;
|
||||
importance?: Maybe<Importance>;
|
||||
link?: Maybe<Scalars['String']['output']>;
|
||||
status: NotificationStatus;
|
||||
subject?: Maybe<Scalars['String']['output']>;
|
||||
title?: Maybe<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
export type NotificationInput = {
|
||||
description?: InputMaybe<Scalars['String']['input']>;
|
||||
importance: Importance;
|
||||
link?: InputMaybe<Scalars['String']['input']>;
|
||||
subject?: InputMaybe<Scalars['String']['input']>;
|
||||
title?: InputMaybe<Scalars['String']['input']>;
|
||||
};
|
||||
|
||||
export enum NotificationStatus {
|
||||
FAILED_TO_SEND = 'FAILED_TO_SEND',
|
||||
NOT_FOUND = 'NOT_FOUND',
|
||||
PENDING = 'PENDING',
|
||||
SENT = 'SENT'
|
||||
}
|
||||
|
||||
export type PingEvent = {
|
||||
__typename?: 'PingEvent';
|
||||
data?: Maybe<Scalars['String']['output']>;
|
||||
type: EventType;
|
||||
};
|
||||
|
||||
export type PingEventData = {
|
||||
__typename?: 'PingEventData';
|
||||
source: PingEventSource;
|
||||
};
|
||||
|
||||
export enum PingEventSource {
|
||||
API = 'API',
|
||||
MOTHERSHIP = 'MOTHERSHIP'
|
||||
}
|
||||
|
||||
export type ProfileModel = {
|
||||
__typename?: 'ProfileModel';
|
||||
avatar?: Maybe<Scalars['String']['output']>;
|
||||
cognito_id?: Maybe<Scalars['String']['output']>;
|
||||
url?: Maybe<Scalars['String']['output']>;
|
||||
userId?: Maybe<Scalars['ID']['output']>;
|
||||
username?: Maybe<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
export type Query = {
|
||||
__typename?: 'Query';
|
||||
apiVersion?: Maybe<Scalars['String']['output']>;
|
||||
dashboard?: Maybe<Dashboard>;
|
||||
ksServers: Array<KsServerDetails>;
|
||||
online?: Maybe<Scalars['Boolean']['output']>;
|
||||
remoteQuery: Scalars['String']['output'];
|
||||
serverStatus: ServerStatusResponse;
|
||||
servers: Array<Maybe<Server>>;
|
||||
status?: Maybe<ServerStatus>;
|
||||
};
|
||||
|
||||
|
||||
export type QueryDashboardArgs = {
|
||||
id: Scalars['String']['input'];
|
||||
};
|
||||
|
||||
|
||||
export type QueryRemoteQueryArgs = {
|
||||
input: RemoteGraphQlClientInput;
|
||||
};
|
||||
|
||||
|
||||
export type QueryServerStatusArgs = {
|
||||
apiKey: Scalars['String']['input'];
|
||||
};
|
||||
|
||||
export enum RegistrationState {
|
||||
/** Basic */
|
||||
BASIC = 'BASIC',
|
||||
/** BLACKLISTED */
|
||||
EBLACKLISTED = 'EBLACKLISTED',
|
||||
/** BLACKLISTED */
|
||||
EBLACKLISTED1 = 'EBLACKLISTED1',
|
||||
/** BLACKLISTED */
|
||||
EBLACKLISTED2 = 'EBLACKLISTED2',
|
||||
/** Trial Expired */
|
||||
EEXPIRED = 'EEXPIRED',
|
||||
/** GUID Error */
|
||||
EGUID = 'EGUID',
|
||||
/** Multiple License Keys Present */
|
||||
EGUID1 = 'EGUID1',
|
||||
/** Trial Requires Internet Connection */
|
||||
ENOCONN = 'ENOCONN',
|
||||
/** No Flash */
|
||||
ENOFLASH = 'ENOFLASH',
|
||||
ENOFLASH1 = 'ENOFLASH1',
|
||||
ENOFLASH2 = 'ENOFLASH2',
|
||||
ENOFLASH3 = 'ENOFLASH3',
|
||||
ENOFLASH4 = 'ENOFLASH4',
|
||||
ENOFLASH5 = 'ENOFLASH5',
|
||||
ENOFLASH6 = 'ENOFLASH6',
|
||||
ENOFLASH7 = 'ENOFLASH7',
|
||||
/** No Keyfile */
|
||||
ENOKEYFILE = 'ENOKEYFILE',
|
||||
/** No Keyfile */
|
||||
ENOKEYFILE1 = 'ENOKEYFILE1',
|
||||
/** Missing key file */
|
||||
ENOKEYFILE2 = 'ENOKEYFILE2',
|
||||
/** Invalid installation */
|
||||
ETRIAL = 'ETRIAL',
|
||||
/** Plus */
|
||||
PLUS = 'PLUS',
|
||||
/** Pro */
|
||||
PRO = 'PRO',
|
||||
/** Trial */
|
||||
TRIAL = 'TRIAL'
|
||||
}
|
||||
|
||||
export type RemoteAccessEvent = {
|
||||
__typename?: 'RemoteAccessEvent';
|
||||
data: RemoteAccessEventData;
|
||||
type: EventType;
|
||||
};
|
||||
|
||||
/** Defines whether remote access event is the initiation (from connect) or the response (from the server) */
|
||||
export enum RemoteAccessEventActionType {
|
||||
ACK = 'ACK',
|
||||
END = 'END',
|
||||
INIT = 'INIT',
|
||||
PING = 'PING'
|
||||
}
|
||||
|
||||
export type RemoteAccessEventData = {
|
||||
__typename?: 'RemoteAccessEventData';
|
||||
apiKey: Scalars['String']['output'];
|
||||
type: RemoteAccessEventActionType;
|
||||
url?: Maybe<AccessUrl>;
|
||||
};
|
||||
|
||||
export type RemoteAccessInput = {
|
||||
apiKey: Scalars['String']['input'];
|
||||
type: RemoteAccessEventActionType;
|
||||
url?: InputMaybe<AccessUrlInput>;
|
||||
};
|
||||
|
||||
export type RemoteGraphQlClientInput = {
|
||||
apiKey: Scalars['String']['input'];
|
||||
body: Scalars['String']['input'];
|
||||
/** Time in milliseconds to wait for a response from the remote server (defaults to 15000) */
|
||||
timeout?: InputMaybe<Scalars['Int']['input']>;
|
||||
/** How long mothership should cache the result of this query in seconds, only valid on queries */
|
||||
ttl?: InputMaybe<Scalars['Int']['input']>;
|
||||
};
|
||||
|
||||
export type RemoteGraphQlEvent = {
|
||||
__typename?: 'RemoteGraphQLEvent';
|
||||
data: RemoteGraphQlEventData;
|
||||
type: EventType;
|
||||
};
|
||||
|
||||
export type RemoteGraphQlEventData = {
|
||||
__typename?: 'RemoteGraphQLEventData';
|
||||
/** Contains mutation / subscription / query data in the form of body: JSON, variables: JSON */
|
||||
body: Scalars['String']['output'];
|
||||
/** sha256 hash of the body */
|
||||
sha256: Scalars['String']['output'];
|
||||
type: RemoteGraphQlEventType;
|
||||
};
|
||||
|
||||
export enum RemoteGraphQlEventType {
|
||||
REMOTE_MUTATION_EVENT = 'REMOTE_MUTATION_EVENT',
|
||||
REMOTE_QUERY_EVENT = 'REMOTE_QUERY_EVENT',
|
||||
REMOTE_SUBSCRIPTION_EVENT = 'REMOTE_SUBSCRIPTION_EVENT',
|
||||
REMOTE_SUBSCRIPTION_EVENT_PING = 'REMOTE_SUBSCRIPTION_EVENT_PING'
|
||||
}
|
||||
|
||||
export type RemoteGraphQlServerInput = {
|
||||
/** Body - contains an object containing data: (GQL response data) or errors: (GQL Errors) */
|
||||
body: Scalars['String']['input'];
|
||||
/** sha256 hash of the body */
|
||||
sha256: Scalars['String']['input'];
|
||||
type: RemoteGraphQlEventType;
|
||||
};
|
||||
|
||||
export type Server = {
|
||||
__typename?: 'Server';
|
||||
apikey?: Maybe<Scalars['String']['output']>;
|
||||
guid?: Maybe<Scalars['String']['output']>;
|
||||
lanip?: Maybe<Scalars['String']['output']>;
|
||||
localurl?: Maybe<Scalars['String']['output']>;
|
||||
name?: Maybe<Scalars['String']['output']>;
|
||||
owner?: Maybe<ProfileModel>;
|
||||
remoteurl?: Maybe<Scalars['String']['output']>;
|
||||
status?: Maybe<ServerStatus>;
|
||||
wanip?: Maybe<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
/** Defines server fields that have a TTL on them, for example last ping */
|
||||
export type ServerFieldsWithTtl = {
|
||||
__typename?: 'ServerFieldsWithTtl';
|
||||
lastPing?: Maybe<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
export type ServerModel = {
|
||||
apikey: Scalars['String']['output'];
|
||||
guid: Scalars['String']['output'];
|
||||
lanip: Scalars['String']['output'];
|
||||
localurl: Scalars['String']['output'];
|
||||
name: Scalars['String']['output'];
|
||||
remoteurl: Scalars['String']['output'];
|
||||
wanip: Scalars['String']['output'];
|
||||
};
|
||||
|
||||
export enum ServerStatus {
|
||||
NEVER_CONNECTED = 'never_connected',
|
||||
OFFLINE = 'offline',
|
||||
ONLINE = 'online'
|
||||
}
|
||||
|
||||
export type ServerStatusResponse = {
|
||||
__typename?: 'ServerStatusResponse';
|
||||
id: Scalars['ID']['output'];
|
||||
lastPublish?: Maybe<Scalars['String']['output']>;
|
||||
online: Scalars['Boolean']['output'];
|
||||
};
|
||||
|
||||
export type Service = {
|
||||
__typename?: 'Service';
|
||||
name?: Maybe<Scalars['String']['output']>;
|
||||
online?: Maybe<Scalars['Boolean']['output']>;
|
||||
uptime?: Maybe<Uptime>;
|
||||
version?: Maybe<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
export type Subscription = {
|
||||
__typename?: 'Subscription';
|
||||
events?: Maybe<Array<Event>>;
|
||||
remoteSubscription: Scalars['String']['output'];
|
||||
servers: Array<Server>;
|
||||
};
|
||||
|
||||
|
||||
export type SubscriptionRemoteSubscriptionArgs = {
|
||||
input: RemoteGraphQlClientInput;
|
||||
};
|
||||
|
||||
export type TwoFactorLocal = {
|
||||
__typename?: 'TwoFactorLocal';
|
||||
enabled?: Maybe<Scalars['Boolean']['output']>;
|
||||
};
|
||||
|
||||
export type TwoFactorRemote = {
|
||||
__typename?: 'TwoFactorRemote';
|
||||
enabled?: Maybe<Scalars['Boolean']['output']>;
|
||||
};
|
||||
|
||||
export type TwoFactorWithToken = {
|
||||
__typename?: 'TwoFactorWithToken';
|
||||
local?: Maybe<TwoFactorLocal>;
|
||||
remote?: Maybe<TwoFactorRemote>;
|
||||
token?: Maybe<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
export type TwoFactorWithoutToken = {
|
||||
__typename?: 'TwoFactorWithoutToken';
|
||||
local?: Maybe<TwoFactorLocal>;
|
||||
remote?: Maybe<TwoFactorRemote>;
|
||||
};
|
||||
|
||||
export enum UrlType {
|
||||
DEFAULT = 'DEFAULT',
|
||||
LAN = 'LAN',
|
||||
MDNS = 'MDNS',
|
||||
WAN = 'WAN',
|
||||
WIREGUARD = 'WIREGUARD'
|
||||
}
|
||||
|
||||
export type UpdateEvent = {
|
||||
__typename?: 'UpdateEvent';
|
||||
data: UpdateEventData;
|
||||
type: EventType;
|
||||
};
|
||||
|
||||
export type UpdateEventData = {
|
||||
__typename?: 'UpdateEventData';
|
||||
apiKey: Scalars['String']['output'];
|
||||
type: UpdateType;
|
||||
};
|
||||
|
||||
export enum UpdateType {
|
||||
DASHBOARD = 'DASHBOARD',
|
||||
NETWORK = 'NETWORK'
|
||||
}
|
||||
|
||||
export type Uptime = {
|
||||
__typename?: 'Uptime';
|
||||
timestamp?: Maybe<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
export type UserProfileModelWithServers = {
|
||||
__typename?: 'UserProfileModelWithServers';
|
||||
profile: ProfileModel;
|
||||
servers: Array<Server>;
|
||||
};
|
||||
|
||||
export type Vars = {
|
||||
__typename?: 'Vars';
|
||||
expireTime?: Maybe<Scalars['DateTime']['output']>;
|
||||
flashGuid?: Maybe<Scalars['String']['output']>;
|
||||
regState?: Maybe<RegistrationState>;
|
||||
regTm2?: Maybe<Scalars['String']['output']>;
|
||||
regTy?: Maybe<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
export type SendRemoteGraphQlResponseMutationVariables = Exact<{
|
||||
input: RemoteGraphQlServerInput;
|
||||
}>;
|
||||
|
||||
|
||||
export type SendRemoteGraphQlResponseMutation = { __typename?: 'Mutation', remoteGraphQLResponse: boolean };
|
||||
|
||||
export type RemoteGraphQlEventFragmentFragment = { __typename?: 'RemoteGraphQLEvent', remoteGraphQLEventData: { __typename?: 'RemoteGraphQLEventData', type: RemoteGraphQlEventType, body: string, sha256: string } } & { ' $fragmentName'?: 'RemoteGraphQlEventFragmentFragment' };
|
||||
|
||||
export type EventsSubscriptionVariables = Exact<{ [key: string]: never; }>;
|
||||
|
||||
|
||||
export type EventsSubscription = { __typename?: 'Subscription', events?: Array<{ __typename: 'ClientConnectedEvent', connectedEvent: EventType, connectedData: { __typename?: 'ClientConnectionEventData', type: ClientType, version: string, apiKey: string } } | { __typename: 'ClientDisconnectedEvent', disconnectedEvent: EventType, disconnectedData: { __typename?: 'ClientConnectionEventData', type: ClientType, version: string, apiKey: string } } | { __typename: 'ClientPingEvent' } | { __typename: 'RemoteAccessEvent' } | (
|
||||
{ __typename: 'RemoteGraphQLEvent' }
|
||||
& { ' $fragmentRefs'?: { 'RemoteGraphQlEventFragmentFragment': RemoteGraphQlEventFragmentFragment } }
|
||||
) | { __typename: 'UpdateEvent' }> | null };
|
||||
|
||||
export const RemoteGraphQlEventFragmentFragmentDoc = {"kind":"Document","definitions":[{"kind":"FragmentDefinition","name":{"kind":"Name","value":"RemoteGraphQLEventFragment"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"RemoteGraphQLEvent"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","alias":{"kind":"Name","value":"remoteGraphQLEventData"},"name":{"kind":"Name","value":"data"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"type"}},{"kind":"Field","name":{"kind":"Name","value":"body"}},{"kind":"Field","name":{"kind":"Name","value":"sha256"}}]}}]}}]} as unknown as DocumentNode<RemoteGraphQlEventFragmentFragment, unknown>;
|
||||
export const SendRemoteGraphQlResponseDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"mutation","name":{"kind":"Name","value":"sendRemoteGraphQLResponse"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"input"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"RemoteGraphQLServerInput"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"remoteGraphQLResponse"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"input"},"value":{"kind":"Variable","name":{"kind":"Name","value":"input"}}}]}]}}]} as unknown as DocumentNode<SendRemoteGraphQlResponseMutation, SendRemoteGraphQlResponseMutationVariables>;
|
||||
export const EventsDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"subscription","name":{"kind":"Name","value":"events"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"events"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"__typename"}},{"kind":"InlineFragment","typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"ClientConnectedEvent"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","alias":{"kind":"Name","value":"connectedData"},"name":{"kind":"Name","value":"data"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"type"}},{"kind":"Field","name":{"kind":"Name","value":"version"}},{"kind":"Field","name":{"kind":"Name","value":"apiKey"}}]}},{"kind":"Field","alias":{"kind":"Name","value":"connectedEvent"},"name":{"kind":"Name","value":"type"}}]}},{"kind":"InlineFragment","typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"ClientDisconnectedEvent"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","alias":{"kind":"Name","value":"disconnectedData"},"name":{"kind":"Name","value":"data"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"type"}},{"kind":"Field","name":{"kind":"Name","value":"version"}},{"kind":"Field","name":{"kind":"Name","value":"apiKey"}}]}},{"kind":"Field","alias":{"kind":"Name","value":"disconnectedEvent"},"name":{"kind":"Name","value":"type"}}]}},{"kind":"FragmentSpread","name":{"kind":"Name","value":"RemoteGraphQLEventFragment"}}]}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"RemoteGraphQLEventFragment"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"RemoteGraphQLEvent"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","alias":{"kind":"Name","value":"remoteGraphQLEventData"},"name":{"kind":"Name","value":"data"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"type"}},{"kind":"Field","name":{"kind":"Name","value":"body"}},{"kind":"Field","name":{"kind":"Name","value":"sha256"}}]}}]}}]} as unknown as DocumentNode<EventsSubscription, EventsSubscriptionVariables>;
|
||||
@@ -1,2 +0,0 @@
|
||||
export * from "./fragment-masking.js";
|
||||
export * from "./gql.js";
|
||||
@@ -1,216 +0,0 @@
|
||||
/* eslint-disable */
|
||||
import { z } from 'zod'
|
||||
import { AccessUrlInput, ArrayCapacityBytesInput, ArrayCapacityInput, ClientType, ConfigErrorState, DashboardAppsInput, DashboardArrayInput, DashboardCaseInput, DashboardConfigInput, DashboardDisplayInput, DashboardInput, DashboardOsInput, DashboardServiceInput, DashboardServiceUptimeInput, DashboardTwoFactorInput, DashboardTwoFactorLocalInput, DashboardTwoFactorRemoteInput, DashboardVarsInput, DashboardVersionsInput, DashboardVmsInput, EventType, Importance, NetworkInput, NotificationInput, NotificationStatus, PingEventSource, RegistrationState, RemoteAccessEventActionType, RemoteAccessInput, RemoteGraphQlClientInput, RemoteGraphQlEventType, RemoteGraphQlServerInput, ServerStatus, UrlType, UpdateType } from '@app/graphql/generated/client/graphql.js'
|
||||
|
||||
type Properties<T> = Required<{
|
||||
[K in keyof T]: z.ZodType<T[K], any, T[K]>;
|
||||
}>;
|
||||
|
||||
type definedNonNullAny = {};
|
||||
|
||||
export const isDefinedNonNullAny = (v: any): v is definedNonNullAny => v !== undefined && v !== null;
|
||||
|
||||
export const definedNonNullAnySchema = z.any().refine((v) => isDefinedNonNullAny(v));
|
||||
|
||||
export const ClientTypeSchema = z.nativeEnum(ClientType);
|
||||
|
||||
export const ConfigErrorStateSchema = z.nativeEnum(ConfigErrorState);
|
||||
|
||||
export const EventTypeSchema = z.nativeEnum(EventType);
|
||||
|
||||
export const ImportanceSchema = z.nativeEnum(Importance);
|
||||
|
||||
export const NotificationStatusSchema = z.nativeEnum(NotificationStatus);
|
||||
|
||||
export const PingEventSourceSchema = z.nativeEnum(PingEventSource);
|
||||
|
||||
export const RegistrationStateSchema = z.nativeEnum(RegistrationState);
|
||||
|
||||
export const RemoteAccessEventActionTypeSchema = z.nativeEnum(RemoteAccessEventActionType);
|
||||
|
||||
export const RemoteGraphQlEventTypeSchema = z.nativeEnum(RemoteGraphQlEventType);
|
||||
|
||||
export const ServerStatusSchema = z.nativeEnum(ServerStatus);
|
||||
|
||||
export const UrlTypeSchema = z.nativeEnum(UrlType);
|
||||
|
||||
export const UpdateTypeSchema = z.nativeEnum(UpdateType);
|
||||
|
||||
export function AccessUrlInputSchema(): z.ZodObject<Properties<AccessUrlInput>> {
|
||||
return z.object({
|
||||
ipv4: z.instanceof(URL).nullish(),
|
||||
ipv6: z.instanceof(URL).nullish(),
|
||||
name: z.string().nullish(),
|
||||
type: UrlTypeSchema
|
||||
})
|
||||
}
|
||||
|
||||
export function ArrayCapacityBytesInputSchema(): z.ZodObject<Properties<ArrayCapacityBytesInput>> {
|
||||
return z.object({
|
||||
free: z.number().nullish(),
|
||||
total: z.number().nullish(),
|
||||
used: z.number().nullish()
|
||||
})
|
||||
}
|
||||
|
||||
export function ArrayCapacityInputSchema(): z.ZodObject<Properties<ArrayCapacityInput>> {
|
||||
return z.object({
|
||||
bytes: z.lazy(() => ArrayCapacityBytesInputSchema().nullish())
|
||||
})
|
||||
}
|
||||
|
||||
export function DashboardAppsInputSchema(): z.ZodObject<Properties<DashboardAppsInput>> {
|
||||
return z.object({
|
||||
installed: z.number(),
|
||||
started: z.number()
|
||||
})
|
||||
}
|
||||
|
||||
export function DashboardArrayInputSchema(): z.ZodObject<Properties<DashboardArrayInput>> {
|
||||
return z.object({
|
||||
capacity: z.lazy(() => ArrayCapacityInputSchema()),
|
||||
state: z.string()
|
||||
})
|
||||
}
|
||||
|
||||
export function DashboardCaseInputSchema(): z.ZodObject<Properties<DashboardCaseInput>> {
|
||||
return z.object({
|
||||
base64: z.string(),
|
||||
error: z.string().nullish(),
|
||||
icon: z.string(),
|
||||
url: z.string()
|
||||
})
|
||||
}
|
||||
|
||||
export function DashboardConfigInputSchema(): z.ZodObject<Properties<DashboardConfigInput>> {
|
||||
return z.object({
|
||||
error: z.string().nullish(),
|
||||
valid: z.boolean()
|
||||
})
|
||||
}
|
||||
|
||||
export function DashboardDisplayInputSchema(): z.ZodObject<Properties<DashboardDisplayInput>> {
|
||||
return z.object({
|
||||
case: z.lazy(() => DashboardCaseInputSchema())
|
||||
})
|
||||
}
|
||||
|
||||
export function DashboardInputSchema(): z.ZodObject<Properties<DashboardInput>> {
|
||||
return z.object({
|
||||
apps: z.lazy(() => DashboardAppsInputSchema()),
|
||||
array: z.lazy(() => DashboardArrayInputSchema()),
|
||||
config: z.lazy(() => DashboardConfigInputSchema()),
|
||||
display: z.lazy(() => DashboardDisplayInputSchema()),
|
||||
os: z.lazy(() => DashboardOsInputSchema()),
|
||||
services: z.array(z.lazy(() => DashboardServiceInputSchema())),
|
||||
twoFactor: z.lazy(() => DashboardTwoFactorInputSchema().nullish()),
|
||||
vars: z.lazy(() => DashboardVarsInputSchema()),
|
||||
versions: z.lazy(() => DashboardVersionsInputSchema()),
|
||||
vms: z.lazy(() => DashboardVmsInputSchema())
|
||||
})
|
||||
}
|
||||
|
||||
export function DashboardOsInputSchema(): z.ZodObject<Properties<DashboardOsInput>> {
|
||||
return z.object({
|
||||
hostname: z.string(),
|
||||
uptime: z.string()
|
||||
})
|
||||
}
|
||||
|
||||
export function DashboardServiceInputSchema(): z.ZodObject<Properties<DashboardServiceInput>> {
|
||||
return z.object({
|
||||
name: z.string(),
|
||||
online: z.boolean(),
|
||||
uptime: z.lazy(() => DashboardServiceUptimeInputSchema().nullish()),
|
||||
version: z.string()
|
||||
})
|
||||
}
|
||||
|
||||
export function DashboardServiceUptimeInputSchema(): z.ZodObject<Properties<DashboardServiceUptimeInput>> {
|
||||
return z.object({
|
||||
timestamp: z.string()
|
||||
})
|
||||
}
|
||||
|
||||
export function DashboardTwoFactorInputSchema(): z.ZodObject<Properties<DashboardTwoFactorInput>> {
|
||||
return z.object({
|
||||
local: z.lazy(() => DashboardTwoFactorLocalInputSchema()),
|
||||
remote: z.lazy(() => DashboardTwoFactorRemoteInputSchema())
|
||||
})
|
||||
}
|
||||
|
||||
export function DashboardTwoFactorLocalInputSchema(): z.ZodObject<Properties<DashboardTwoFactorLocalInput>> {
|
||||
return z.object({
|
||||
enabled: z.boolean()
|
||||
})
|
||||
}
|
||||
|
||||
export function DashboardTwoFactorRemoteInputSchema(): z.ZodObject<Properties<DashboardTwoFactorRemoteInput>> {
|
||||
return z.object({
|
||||
enabled: z.boolean()
|
||||
})
|
||||
}
|
||||
|
||||
export function DashboardVarsInputSchema(): z.ZodObject<Properties<DashboardVarsInput>> {
|
||||
return z.object({
|
||||
flashGuid: z.string(),
|
||||
regState: z.string(),
|
||||
regTy: z.string(),
|
||||
serverDescription: z.string().nullish(),
|
||||
serverName: z.string().nullish()
|
||||
})
|
||||
}
|
||||
|
||||
export function DashboardVersionsInputSchema(): z.ZodObject<Properties<DashboardVersionsInput>> {
|
||||
return z.object({
|
||||
unraid: z.string()
|
||||
})
|
||||
}
|
||||
|
||||
export function DashboardVmsInputSchema(): z.ZodObject<Properties<DashboardVmsInput>> {
|
||||
return z.object({
|
||||
installed: z.number(),
|
||||
started: z.number()
|
||||
})
|
||||
}
|
||||
|
||||
export function NetworkInputSchema(): z.ZodObject<Properties<NetworkInput>> {
|
||||
return z.object({
|
||||
accessUrls: z.array(z.lazy(() => AccessUrlInputSchema()))
|
||||
})
|
||||
}
|
||||
|
||||
export function NotificationInputSchema(): z.ZodObject<Properties<NotificationInput>> {
|
||||
return z.object({
|
||||
description: z.string().nullish(),
|
||||
importance: ImportanceSchema,
|
||||
link: z.string().nullish(),
|
||||
subject: z.string().nullish(),
|
||||
title: z.string().nullish()
|
||||
})
|
||||
}
|
||||
|
||||
export function RemoteAccessInputSchema(): z.ZodObject<Properties<RemoteAccessInput>> {
|
||||
return z.object({
|
||||
apiKey: z.string(),
|
||||
type: RemoteAccessEventActionTypeSchema,
|
||||
url: z.lazy(() => AccessUrlInputSchema().nullish())
|
||||
})
|
||||
}
|
||||
|
||||
export function RemoteGraphQlClientInputSchema(): z.ZodObject<Properties<RemoteGraphQlClientInput>> {
|
||||
return z.object({
|
||||
apiKey: z.string(),
|
||||
body: z.string(),
|
||||
timeout: z.number().nullish(),
|
||||
ttl: z.number().nullish()
|
||||
})
|
||||
}
|
||||
|
||||
export function RemoteGraphQlServerInputSchema(): z.ZodObject<Properties<RemoteGraphQlServerInput>> {
|
||||
return z.object({
|
||||
body: z.string(),
|
||||
sha256: z.string(),
|
||||
type: RemoteGraphQlEventTypeSchema
|
||||
})
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
import { FatalAppError } from '@app/core/errors/fatal-error.js';
|
||||
import { modules } from '@app/core/index.js';
|
||||
|
||||
export const getCoreModule = (moduleName: string) => {
|
||||
if (!Object.keys(modules).includes(moduleName)) {
|
||||
throw new FatalAppError(`"${moduleName}" is not a valid core module.`);
|
||||
}
|
||||
|
||||
return modules[moduleName];
|
||||
};
|
||||
@@ -1,7 +0,0 @@
|
||||
import { logger } from '@app/core/log.js';
|
||||
import { type ApiKeyResponse } from '@app/unraid-api/graph/resolvers/cloud/cloud.model.js';
|
||||
|
||||
export const checkApi = async (): Promise<ApiKeyResponse> => {
|
||||
logger.trace('Cloud endpoint: Checking API');
|
||||
return { valid: true };
|
||||
};
|
||||
@@ -1,104 +0,0 @@
|
||||
import { got } from 'got';
|
||||
|
||||
import { FIVE_DAYS_SECS, ONE_DAY_SECS } from '@app/consts.js';
|
||||
import { logger } from '@app/core/log.js';
|
||||
import { API_VERSION, MOTHERSHIP_GRAPHQL_LINK } from '@app/environment.js';
|
||||
import { checkDNS } from '@app/graphql/resolvers/query/cloud/check-dns.js';
|
||||
import { checkMothershipAuthentication } from '@app/graphql/resolvers/query/cloud/check-mothership-authentication.js';
|
||||
import { getCloudCache, getDnsCache } from '@app/store/getters/index.js';
|
||||
import { getters, store } from '@app/store/index.js';
|
||||
import { setCloudCheck, setDNSCheck } from '@app/store/modules/cache.js';
|
||||
import { CloudResponse, MinigraphStatus } from '@app/unraid-api/graph/resolvers/cloud/cloud.model.js';
|
||||
|
||||
const mothershipBaseUrl = new URL(MOTHERSHIP_GRAPHQL_LINK).origin;
|
||||
|
||||
const createGotOptions = (apiVersion: string, apiKey: string) => ({
|
||||
timeout: {
|
||||
request: 5_000,
|
||||
},
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Accept: 'application/json',
|
||||
'x-unraid-api-version': apiVersion,
|
||||
'x-api-key': apiKey,
|
||||
},
|
||||
});
|
||||
|
||||
/**
|
||||
* This is mainly testing the user's network config
|
||||
* If they cannot resolve this they may have it blocked or have a routing issue
|
||||
*/
|
||||
const checkCanReachMothership = async (apiVersion: string, apiKey: string): Promise<void> => {
|
||||
const mothershipCanBeResolved = await got
|
||||
.head(mothershipBaseUrl, createGotOptions(apiVersion, apiKey))
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
if (!mothershipCanBeResolved) throw new Error(`Unable to connect to ${mothershipBaseUrl}`);
|
||||
};
|
||||
|
||||
/**
|
||||
* Run a more performant cloud check with permanent DNS checking
|
||||
*/
|
||||
const fastCloudCheck = async (): Promise<CloudResponse> => {
|
||||
const result = { status: 'ok', error: null, ip: 'FAST_CHECK_NO_IP_FOUND' };
|
||||
|
||||
const cloudIp = getDnsCache()?.cloudIp ?? null;
|
||||
if (cloudIp) {
|
||||
result.ip = cloudIp;
|
||||
} else {
|
||||
try {
|
||||
result.ip = (await checkDNS()).cloudIp;
|
||||
logger.debug('DNS_CHECK_RESULT', await checkDNS());
|
||||
store.dispatch(setDNSCheck({ cloudIp: result.ip, ttl: FIVE_DAYS_SECS, error: null }));
|
||||
} catch (error: unknown) {
|
||||
logger.warn('Failed to fetch DNS, but Minigraph is connected - continuing');
|
||||
result.ip = `ERROR: ${error instanceof Error ? error.message : 'Unknown Error'}`;
|
||||
// Don't set an error since we're actually connected to the cloud
|
||||
store.dispatch(setDNSCheck({ cloudIp: result.ip, ttl: ONE_DAY_SECS, error: null }));
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
export const checkCloud = async (): Promise<CloudResponse> => {
|
||||
logger.trace('Cloud endpoint: Checking mothership');
|
||||
|
||||
try {
|
||||
const config = getters.config();
|
||||
const apiVersion = API_VERSION;
|
||||
const apiKey = config.remote.apikey;
|
||||
const graphqlStatus = getters.minigraph().status;
|
||||
const result = { status: 'ok', error: null, ip: 'NO_IP_FOUND' };
|
||||
|
||||
// If minigraph is connected, skip the follow cloud checks
|
||||
if (graphqlStatus === MinigraphStatus.CONNECTED) {
|
||||
return await fastCloudCheck();
|
||||
}
|
||||
|
||||
// Check GraphQL Conneciton State, if it's broken, run these checks
|
||||
if (!apiKey) throw new Error('API key is missing');
|
||||
|
||||
const oldCheckResult = getCloudCache();
|
||||
if (oldCheckResult) {
|
||||
logger.trace('Using cached result for cloud check', oldCheckResult);
|
||||
return oldCheckResult;
|
||||
}
|
||||
|
||||
// Check DNS
|
||||
result.ip = (await checkDNS()).cloudIp;
|
||||
// Check if we can reach mothership
|
||||
await checkCanReachMothership(apiVersion, apiKey);
|
||||
|
||||
// Check auth, rate limiting, etc.
|
||||
await checkMothershipAuthentication(apiVersion, apiKey);
|
||||
|
||||
// Cache for 10 minutes
|
||||
store.dispatch(setCloudCheck(result));
|
||||
|
||||
return result;
|
||||
} catch (error: unknown) {
|
||||
if (!(error instanceof Error)) throw new Error(`Unknown Error "${error as string}"`);
|
||||
return { status: 'error', error: error.message };
|
||||
}
|
||||
};
|
||||
@@ -1,70 +0,0 @@
|
||||
import { lookup as lookupDNS, resolve as resolveDNS } from 'dns';
|
||||
import { promisify } from 'util';
|
||||
|
||||
import ip from 'ip';
|
||||
|
||||
import { MOTHERSHIP_GRAPHQL_LINK } from '@app/environment.js';
|
||||
import { getDnsCache } from '@app/store/getters/index.js';
|
||||
import { store } from '@app/store/index.js';
|
||||
import { setDNSCheck } from '@app/store/modules/cache.js';
|
||||
|
||||
const msHostname = new URL(MOTHERSHIP_GRAPHQL_LINK).host;
|
||||
|
||||
/**
|
||||
* Check if the local and network resolvers are able to see mothership
|
||||
*
|
||||
* See: https://nodejs.org/docs/latest/api/dns.html#dns_implementation_considerations
|
||||
*/
|
||||
export const checkDNS = async (hostname = msHostname): Promise<{ cloudIp: string }> => {
|
||||
const dnsCachedResuslt = getDnsCache();
|
||||
if (dnsCachedResuslt) {
|
||||
if (dnsCachedResuslt.cloudIp) {
|
||||
return { cloudIp: dnsCachedResuslt.cloudIp };
|
||||
}
|
||||
|
||||
if (dnsCachedResuslt.error) {
|
||||
throw dnsCachedResuslt.error;
|
||||
}
|
||||
}
|
||||
|
||||
let local: string | null = null;
|
||||
let network: string | null = null;
|
||||
try {
|
||||
// Check the local resolver like "ping" does
|
||||
// Check the DNS server the server has set - does a DNS query on the network
|
||||
const [localRes, networkRes] = await Promise.all([
|
||||
promisify(lookupDNS)(hostname).then(({ address }) => address),
|
||||
promisify(resolveDNS)(hostname).then(([address]) => address),
|
||||
]);
|
||||
local = localRes;
|
||||
network = networkRes;
|
||||
// The user's server and the DNS server they're using are returning different results
|
||||
if (!local.includes(network))
|
||||
throw new Error(
|
||||
`Local and network resolvers showing different IP for "${hostname}". [local="${
|
||||
local ?? 'NOT FOUND'
|
||||
}"] [network="${network ?? 'NOT FOUND'}"]`
|
||||
);
|
||||
|
||||
// The user likely has a PI-hole or something similar running.
|
||||
if (ip.isPrivate(local))
|
||||
throw new Error(
|
||||
`"${hostname}" is being resolved to a private IP. [IP=${local ?? 'NOT FOUND'}]`
|
||||
);
|
||||
} catch (error: unknown) {
|
||||
if (!(error instanceof Error)) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
store.dispatch(setDNSCheck({ cloudIp: null, error }));
|
||||
}
|
||||
|
||||
if (typeof local === 'string' || typeof network === 'string') {
|
||||
const validIp: string = local ?? network ?? '';
|
||||
store.dispatch(setDNSCheck({ cloudIp: validIp, error: null }));
|
||||
|
||||
return { cloudIp: validIp };
|
||||
}
|
||||
|
||||
return { cloudIp: '' };
|
||||
};
|
||||
@@ -1,13 +0,0 @@
|
||||
import { logger } from '@app/core/log.js';
|
||||
import { getters } from '@app/store/index.js';
|
||||
import { MinigraphqlResponse } from '@app/unraid-api/graph/resolvers/cloud/cloud.model.js';
|
||||
|
||||
export const checkMinigraphql = (): MinigraphqlResponse => {
|
||||
logger.trace('Cloud endpoint: Checking mini-graphql');
|
||||
// Do we have a connection to mothership?
|
||||
const { status, error, timeout, timeoutStart } = getters.minigraph();
|
||||
|
||||
const timeoutRemaining = timeout && timeoutStart ? timeout - (Date.now() - timeoutStart) : null;
|
||||
|
||||
return { status, error, timeout: timeoutRemaining };
|
||||
};
|
||||
@@ -1,61 +0,0 @@
|
||||
import { got, HTTPError, TimeoutError } from 'got';
|
||||
|
||||
import { logger } from '@app/core/log.js';
|
||||
import { MOTHERSHIP_GRAPHQL_LINK } from '@app/environment.js';
|
||||
|
||||
const createGotOptions = (apiVersion: string, apiKey: string) => ({
|
||||
timeout: {
|
||||
request: 5_000,
|
||||
},
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Accept: 'application/json',
|
||||
'x-unraid-api-version': apiVersion,
|
||||
'x-api-key': apiKey,
|
||||
},
|
||||
});
|
||||
|
||||
const isHttpError = (error: unknown): error is HTTPError => error instanceof HTTPError;
|
||||
|
||||
// Check if we're rate limited, etc.
|
||||
export const checkMothershipAuthentication = async (apiVersion: string, apiKey: string) => {
|
||||
const msURL = new URL(MOTHERSHIP_GRAPHQL_LINK);
|
||||
const url = `https://${msURL.hostname}${msURL.pathname}`;
|
||||
|
||||
try {
|
||||
const options = createGotOptions(apiVersion, apiKey);
|
||||
|
||||
// This will throw if there is a non 2XX/3XX code
|
||||
await got.head(url, options);
|
||||
} catch (error: unknown) {
|
||||
// HTTP errors
|
||||
if (isHttpError(error)) {
|
||||
switch (error.response.statusCode) {
|
||||
case 429: {
|
||||
const retryAfter = error.response.headers['retry-after'];
|
||||
throw new Error(
|
||||
retryAfter
|
||||
? `${url} is rate limited for another ${retryAfter} seconds`
|
||||
: `${url} is rate limited`
|
||||
);
|
||||
}
|
||||
|
||||
case 401:
|
||||
throw new Error('Invalid credentials');
|
||||
default:
|
||||
throw new Error(
|
||||
`Failed to connect to ${url} with a "${error.response.statusCode}" HTTP error.`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Timeout error
|
||||
if (error instanceof TimeoutError) throw new Error(`Timed-out while connecting to "${url}"`);
|
||||
|
||||
// Unknown error
|
||||
logger.trace('Unknown Error', error);
|
||||
// @TODO: Add in the cause when we move to a newer node version
|
||||
// throw new Error('Unknown Error', { cause: error as Error });
|
||||
throw new Error('Unknown Error');
|
||||
}
|
||||
};
|
||||
@@ -1,14 +0,0 @@
|
||||
export type Cloud = {
|
||||
error: string | null;
|
||||
apiKey: { valid: true; error: null } | { valid: false; error: string };
|
||||
minigraphql: {
|
||||
status: 'connected' | 'disconnected';
|
||||
};
|
||||
cloud: { status: 'ok'; error: null; ip: string } | { status: 'error'; error: string };
|
||||
allowedOrigins: string[];
|
||||
};
|
||||
|
||||
export const createResponse = (cloud: Omit<Cloud, 'error'>): Cloud => ({
|
||||
...cloud,
|
||||
error: cloud.apiKey.error ?? cloud.cloud.error,
|
||||
});
|
||||
@@ -1,423 +0,0 @@
|
||||
import { access } from 'fs/promises';
|
||||
|
||||
import toBytes from 'bytes';
|
||||
import { execa, execaCommandSync } from 'execa';
|
||||
import { isSymlink } from 'path-type';
|
||||
import { cpu, cpuFlags, mem, memLayout, osInfo, versions } from 'systeminformation';
|
||||
|
||||
import type { PciDevice } from '@app/core/types/index.js';
|
||||
import { bootTimestamp } from '@app/common/dashboard/boot-timestamp.js';
|
||||
import { getUnraidVersion } from '@app/common/dashboard/get-unraid-version.js';
|
||||
import { AppError } from '@app/core/errors/app-error.js';
|
||||
import { type DynamixConfig } from '@app/core/types/ini.js';
|
||||
import { toBoolean } from '@app/core/utils/casting.js';
|
||||
import { docker } from '@app/core/utils/clients/docker.js';
|
||||
import { cleanStdout } from '@app/core/utils/misc/clean-stdout.js';
|
||||
import { loadState } from '@app/core/utils/misc/load-state.js';
|
||||
import { sanitizeProduct } from '@app/core/utils/vms/domain/sanitize-product.js';
|
||||
import { sanitizeVendor } from '@app/core/utils/vms/domain/sanitize-vendor.js';
|
||||
import { vmRegExps } from '@app/core/utils/vms/domain/vm-regexps.js';
|
||||
import { filterDevices } from '@app/core/utils/vms/filter-devices.js';
|
||||
import { getPciDevices } from '@app/core/utils/vms/get-pci-devices.js';
|
||||
import { getters } from '@app/store/index.js';
|
||||
import { ThemeName } from '@app/unraid-api/graph/resolvers/customization/theme.model.js';
|
||||
import {
|
||||
Devices,
|
||||
Display,
|
||||
Gpu,
|
||||
InfoApps,
|
||||
InfoCpu,
|
||||
InfoMemory,
|
||||
Os as InfoOs,
|
||||
MemoryLayout,
|
||||
Temperature,
|
||||
Versions,
|
||||
} from '@app/unraid-api/graph/resolvers/info/info.model.js';
|
||||
|
||||
export const generateApps = async (): Promise<InfoApps> => {
|
||||
const installed = await docker
|
||||
.listContainers({ all: true })
|
||||
.catch(() => [])
|
||||
.then((containers) => containers.length);
|
||||
const started = await docker
|
||||
.listContainers()
|
||||
.catch(() => [])
|
||||
.then((containers) => containers.length);
|
||||
return { id: 'info/apps', installed, started };
|
||||
};
|
||||
|
||||
export const generateOs = async (): Promise<InfoOs> => {
|
||||
const os = await osInfo();
|
||||
|
||||
return {
|
||||
id: 'info/os',
|
||||
...os,
|
||||
hostname: getters.emhttp().var.name,
|
||||
uptime: bootTimestamp.toISOString(),
|
||||
};
|
||||
};
|
||||
|
||||
export const generateCpu = async (): Promise<InfoCpu> => {
|
||||
const { cores, physicalCores, speedMin, speedMax, stepping, ...rest } = await cpu();
|
||||
const flags = await cpuFlags()
|
||||
.then((flags) => flags.split(' '))
|
||||
.catch(() => []);
|
||||
|
||||
return {
|
||||
id: 'info/cpu',
|
||||
...rest,
|
||||
cores: physicalCores,
|
||||
threads: cores,
|
||||
flags,
|
||||
stepping: Number(stepping),
|
||||
// @TODO Find out what these should be if they're not defined
|
||||
speedmin: speedMin || -1,
|
||||
speedmax: speedMax || -1,
|
||||
};
|
||||
};
|
||||
|
||||
export const generateDisplay = async (): Promise<Display> => {
|
||||
const filePaths = getters.paths()['dynamix-config'];
|
||||
|
||||
const state = filePaths.reduce<Partial<DynamixConfig>>(
|
||||
(acc, filePath) => {
|
||||
const state = loadState<DynamixConfig>(filePath);
|
||||
return state ? { ...acc, ...state } : acc;
|
||||
},
|
||||
{
|
||||
id: 'dynamix-config/display',
|
||||
}
|
||||
);
|
||||
|
||||
if (!state.display) {
|
||||
return {
|
||||
id: 'dynamix-config/display',
|
||||
};
|
||||
}
|
||||
const { theme, unit, ...display } = state.display;
|
||||
return {
|
||||
id: 'dynamix-config/display',
|
||||
...display,
|
||||
theme: theme as ThemeName,
|
||||
unit: unit as Temperature,
|
||||
scale: toBoolean(display.scale),
|
||||
tabs: toBoolean(display.tabs),
|
||||
resize: toBoolean(display.resize),
|
||||
wwn: toBoolean(display.wwn),
|
||||
total: toBoolean(display.total),
|
||||
usage: toBoolean(display.usage),
|
||||
text: toBoolean(display.text),
|
||||
warning: Number.parseInt(display.warning, 10),
|
||||
critical: Number.parseInt(display.critical, 10),
|
||||
hot: Number.parseInt(display.hot, 10),
|
||||
max: Number.parseInt(display.max, 10),
|
||||
locale: display.locale || 'en_US',
|
||||
};
|
||||
};
|
||||
|
||||
export const generateVersions = async (): Promise<Versions> => {
|
||||
const unraid = await getUnraidVersion();
|
||||
const softwareVersions = await versions();
|
||||
|
||||
return {
|
||||
id: 'info/versions',
|
||||
unraid,
|
||||
...softwareVersions,
|
||||
};
|
||||
};
|
||||
|
||||
export const generateMemory = async (): Promise<InfoMemory> => {
|
||||
const layout = await memLayout()
|
||||
.then((dims) => dims.map((dim) => dim as MemoryLayout))
|
||||
.catch(() => []);
|
||||
const info = await mem();
|
||||
let max = info.total;
|
||||
|
||||
// Max memory
|
||||
try {
|
||||
const memoryInfo = await execa('dmidecode', ['-t', 'memory'])
|
||||
.then(cleanStdout)
|
||||
.catch((error: NodeJS.ErrnoException) => {
|
||||
if (error.code === 'ENOENT') {
|
||||
throw new AppError('The dmidecode cli utility is missing.');
|
||||
}
|
||||
|
||||
throw error;
|
||||
});
|
||||
const lines = memoryInfo.split('\n');
|
||||
const header = lines.find((line) => line.startsWith('Physical Memory Array'));
|
||||
if (header) {
|
||||
const start = lines.indexOf(header);
|
||||
const nextHeaders = lines.slice(start, -1).find((line) => line.startsWith('Handle '));
|
||||
|
||||
if (nextHeaders) {
|
||||
const end = lines.indexOf(nextHeaders);
|
||||
const fields = lines.slice(start, end);
|
||||
|
||||
max =
|
||||
toBytes(
|
||||
fields
|
||||
?.find((line) => line.trim().startsWith('Maximum Capacity'))
|
||||
?.trim()
|
||||
?.split(': ')[1] ?? '0'
|
||||
) ?? 0;
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Ignore errors here
|
||||
}
|
||||
|
||||
return {
|
||||
id: 'info/memory',
|
||||
layout,
|
||||
max,
|
||||
...info,
|
||||
};
|
||||
};
|
||||
|
||||
export const generateDevices = async (): Promise<Devices> => {
|
||||
/**
|
||||
* Set device class to device.
|
||||
* @param device The device to modify.
|
||||
* @returns The same device passed in but with the class modified.
|
||||
*/
|
||||
const addDeviceClass = (device: Readonly<PciDevice>): PciDevice => {
|
||||
const modifiedDevice: PciDevice = {
|
||||
...device,
|
||||
class: 'other',
|
||||
};
|
||||
|
||||
// GPU
|
||||
if (vmRegExps.allowedGpuClassId.test(device.typeid)) {
|
||||
modifiedDevice.class = 'vga';
|
||||
// Specialized product name cleanup for GPU
|
||||
// GF116 [GeForce GTX 550 Ti] --> GeForce GTX 550 Ti
|
||||
const regex = new RegExp(/.+\[(?<gpuName>.+)]/);
|
||||
const productName = regex.exec(device.productname)?.groups?.gpuName;
|
||||
|
||||
if (productName) {
|
||||
modifiedDevice.productname = productName;
|
||||
}
|
||||
|
||||
return modifiedDevice;
|
||||
// Audio
|
||||
}
|
||||
|
||||
if (vmRegExps.allowedAudioClassId.test(device.typeid)) {
|
||||
modifiedDevice.class = 'audio';
|
||||
|
||||
return modifiedDevice;
|
||||
}
|
||||
|
||||
return modifiedDevice;
|
||||
};
|
||||
|
||||
/**
|
||||
* System PCI devices.
|
||||
*/
|
||||
const systemPciDevices = async (): Promise<PciDevice[]> => {
|
||||
const devices = await getPciDevices();
|
||||
const basePath = '/sys/bus/pci/devices/0000:';
|
||||
|
||||
// Remove devices with no IOMMU support
|
||||
const filteredDevices = await Promise.all(
|
||||
devices.map(async (device: Readonly<PciDevice>) => {
|
||||
const exists = await access(`${basePath}${device.id}/iommu_group/`)
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
return exists ? device : null;
|
||||
})
|
||||
).then((devices) => devices.filter((device) => device !== null));
|
||||
|
||||
/**
|
||||
* Run device cleanup
|
||||
*
|
||||
* Tasks:
|
||||
* - Mark disallowed devices
|
||||
* - Add class
|
||||
* - Add whether kernel-bound driver exists
|
||||
* - Cleanup device vendor/product names
|
||||
*/
|
||||
const processedDevices = await filterDevices(filteredDevices).then(async (devices) =>
|
||||
Promise.all(
|
||||
devices
|
||||
.map((device) => addDeviceClass(device as PciDevice))
|
||||
.map(async (device) => {
|
||||
// Attempt to get the current kernel-bound driver for this pci device
|
||||
await isSymlink(`${basePath}${device.id}/driver`).then((symlink) => {
|
||||
if (symlink) {
|
||||
// $strLink = @readlink('/sys/bus/pci/devices/0000:'.$arrMatch['id']. '/driver');
|
||||
// if (!empty($strLink)) {
|
||||
// $strDriver = basename($strLink);
|
||||
// }
|
||||
}
|
||||
});
|
||||
|
||||
// Clean up the vendor and product name
|
||||
device.vendorname = sanitizeVendor(device.vendorname);
|
||||
device.productname = sanitizeProduct(device.productname);
|
||||
|
||||
return device;
|
||||
})
|
||||
)
|
||||
);
|
||||
|
||||
return processedDevices;
|
||||
};
|
||||
|
||||
/**
|
||||
* System GPU Devices
|
||||
*
|
||||
* @name systemGPUDevices
|
||||
* @ignore
|
||||
* @private
|
||||
*/
|
||||
const systemGPUDevices: Promise<Gpu[]> = systemPciDevices()
|
||||
.then((devices) => {
|
||||
return devices
|
||||
.filter((device) => device.class === 'vga' && !device.allowed)
|
||||
.map((entry) => {
|
||||
const gpu: Gpu = {
|
||||
blacklisted: entry.allowed,
|
||||
class: entry.class,
|
||||
id: entry.id,
|
||||
productid: entry.product,
|
||||
typeid: entry.typeid,
|
||||
type: entry.manufacturer,
|
||||
vendorname: entry.vendorname,
|
||||
};
|
||||
return gpu;
|
||||
});
|
||||
})
|
||||
.catch(() => []);
|
||||
|
||||
/**
|
||||
* System usb devices.
|
||||
* @returns Array of USB devices.
|
||||
*/
|
||||
const getSystemUSBDevices = async () => {
|
||||
try {
|
||||
// Get a list of all usb hubs so we can filter the allowed/disallowed
|
||||
const usbHubs = await execa('cat /sys/bus/usb/drivers/hub/*/modalias', { shell: true })
|
||||
.then(({ stdout }) =>
|
||||
stdout.split('\n').map((line) => {
|
||||
const [, id] = line.match(/usb:v(\w{9})/) ?? [];
|
||||
return id.replace('p', ':');
|
||||
})
|
||||
)
|
||||
.catch(() => [] as string[]);
|
||||
|
||||
const emhttp = getters.emhttp();
|
||||
|
||||
// Remove boot drive
|
||||
const filterBootDrive = (device: Readonly<PciDevice>): boolean =>
|
||||
emhttp.var.flashGuid !== device.guid;
|
||||
|
||||
// Remove usb hubs
|
||||
const filterUsbHubs = (device: Readonly<PciDevice>): boolean => !usbHubs.includes(device.id);
|
||||
|
||||
// Clean up the name
|
||||
const sanitizeVendorName = (device: Readonly<PciDevice>) => {
|
||||
const vendorname = sanitizeVendor(device.vendorname || '');
|
||||
return {
|
||||
...device,
|
||||
vendorname,
|
||||
};
|
||||
};
|
||||
|
||||
const parseDeviceLine = (line: Readonly<string>): { value: string; string: string } => {
|
||||
const emptyLine = { value: '', string: '' };
|
||||
|
||||
// If the line is blank return nothing
|
||||
if (!line) {
|
||||
return emptyLine;
|
||||
}
|
||||
|
||||
// Parse the line
|
||||
const [, _] = line.split(/[ \t]{2,}/).filter(Boolean);
|
||||
|
||||
const match = _.match(/^(\S+)\s(.*)/)?.slice(1);
|
||||
|
||||
// If there's no match return nothing
|
||||
if (!match) {
|
||||
return emptyLine;
|
||||
}
|
||||
|
||||
return {
|
||||
value: match[0],
|
||||
string: match[1],
|
||||
};
|
||||
};
|
||||
|
||||
// Add extra fields to device
|
||||
const parseDevice = (device: Readonly<PciDevice>) => {
|
||||
const modifiedDevice: PciDevice = {
|
||||
...device,
|
||||
};
|
||||
const info = execaCommandSync(`lsusb -d ${device.id} -v`).stdout.split('\n');
|
||||
const deviceName = device.name.trim();
|
||||
const iSerial = parseDeviceLine(info.filter((line) => line.includes('iSerial'))[0]);
|
||||
const iProduct = parseDeviceLine(info.filter((line) => line.includes('iProduct'))[0]);
|
||||
const iManufacturer = parseDeviceLine(
|
||||
info.filter((line) => line.includes('iManufacturer'))[0]
|
||||
);
|
||||
const idProduct = parseDeviceLine(info.filter((line) => line.includes('idProduct'))[0]);
|
||||
const idVendor = parseDeviceLine(info.filter((line) => line.includes('idVendor'))[0]);
|
||||
const serial = `${iSerial.string.slice(8).slice(0, 4)}-${iSerial.string
|
||||
.slice(8)
|
||||
.slice(4)}`;
|
||||
const guid = `${idVendor.value.slice(2)}-${idProduct.value.slice(2)}-${serial}`;
|
||||
|
||||
modifiedDevice.serial = iSerial.string;
|
||||
modifiedDevice.product = iProduct.string;
|
||||
modifiedDevice.manufacturer = iManufacturer.string;
|
||||
modifiedDevice.guid = guid;
|
||||
|
||||
// Set name if missing
|
||||
if (deviceName === '') {
|
||||
modifiedDevice.name = `${iProduct.string} ${iManufacturer.string}`.trim();
|
||||
}
|
||||
|
||||
// Name still blank? Replace using fallback default
|
||||
if (deviceName === '') {
|
||||
modifiedDevice.name = '[unnamed device]';
|
||||
}
|
||||
|
||||
// Ensure name is trimmed
|
||||
modifiedDevice.name = device.name.trim();
|
||||
|
||||
return modifiedDevice;
|
||||
};
|
||||
|
||||
const parseUsbDevices = (stdout: string) =>
|
||||
stdout.split('\n').map((line) => {
|
||||
const regex = new RegExp(/^.+: ID (?<id>\S+)(?<name>.*)$/);
|
||||
const result = regex.exec(line);
|
||||
return result?.groups as unknown as PciDevice;
|
||||
}) ?? [];
|
||||
|
||||
// Get all usb devices
|
||||
const usbDevices = await execa('lsusb')
|
||||
.then(async ({ stdout }) =>
|
||||
parseUsbDevices(stdout)
|
||||
.map(parseDevice)
|
||||
.filter(filterBootDrive)
|
||||
.filter(filterUsbHubs)
|
||||
.map(sanitizeVendorName)
|
||||
)
|
||||
.catch(() => []);
|
||||
|
||||
return usbDevices;
|
||||
} catch (error: unknown) {
|
||||
return [];
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
id: 'info/devices',
|
||||
// Scsi: await scsiDevices,
|
||||
gpu: await systemGPUDevices,
|
||||
pci: await systemPciDevices(),
|
||||
usb: await getSystemUSBDevices(),
|
||||
};
|
||||
};
|
||||
@@ -1,233 +0,0 @@
|
||||
import type { RootState } from '@app/store/index.js';
|
||||
import { logger } from '@app/core/log.js';
|
||||
import { type Nginx } from '@app/core/types/states/nginx.js';
|
||||
import { store } from '@app/store/index.js';
|
||||
import { AccessUrl, URL_TYPE } from '@app/unraid-api/graph/resolvers/connect/connect.model.js';
|
||||
|
||||
interface UrlForFieldInput {
|
||||
url: string;
|
||||
port?: number;
|
||||
portSsl?: number;
|
||||
}
|
||||
|
||||
interface UrlForFieldInputSecure extends UrlForFieldInput {
|
||||
url: string;
|
||||
portSsl: number;
|
||||
}
|
||||
interface UrlForFieldInputInsecure extends UrlForFieldInput {
|
||||
url: string;
|
||||
port: number;
|
||||
}
|
||||
|
||||
export const getUrlForField = ({
|
||||
url,
|
||||
port,
|
||||
portSsl,
|
||||
}: UrlForFieldInputInsecure | UrlForFieldInputSecure) => {
|
||||
let portToUse = '';
|
||||
let httpMode = 'https://';
|
||||
|
||||
if (!url || url === '') {
|
||||
throw new Error('No URL Provided');
|
||||
}
|
||||
|
||||
if (port) {
|
||||
portToUse = port === 80 ? '' : `:${port}`;
|
||||
httpMode = 'http://';
|
||||
} else if (portSsl) {
|
||||
portToUse = portSsl === 443 ? '' : `:${portSsl}`;
|
||||
httpMode = 'https://';
|
||||
} else {
|
||||
throw new Error(`No ports specified for URL: ${url}`);
|
||||
}
|
||||
|
||||
const urlString = `${httpMode}${url}${portToUse}`;
|
||||
|
||||
try {
|
||||
return new URL(urlString);
|
||||
} catch (error: unknown) {
|
||||
throw new Error(`Failed to parse URL: ${urlString}`);
|
||||
}
|
||||
};
|
||||
|
||||
const fieldIsFqdn = (field: keyof Nginx) => field?.toLowerCase().includes('fqdn');
|
||||
|
||||
export type NginxUrlFields = Extract<
|
||||
keyof Nginx,
|
||||
'lanIp' | 'lanIp6' | 'lanName' | 'lanMdns' | 'lanFqdn' | 'wanFqdn' | 'wanFqdn6'
|
||||
>;
|
||||
|
||||
/**
|
||||
*
|
||||
* @param nginx Nginx Config File
|
||||
* @param field The field to build the URL from
|
||||
* @returns a URL, created from the combination of inputs
|
||||
* @throws Error when the URL cannot be created or the URL is invalid
|
||||
*/
|
||||
export const getUrlForServer = ({ nginx, field }: { nginx: Nginx; field: NginxUrlFields }): URL => {
|
||||
if (nginx[field]) {
|
||||
if (fieldIsFqdn(field)) {
|
||||
return getUrlForField({
|
||||
url: nginx[field],
|
||||
portSsl: nginx.httpsPort,
|
||||
});
|
||||
}
|
||||
|
||||
if (!nginx.sslEnabled) {
|
||||
// Use SSL = no
|
||||
return getUrlForField({ url: nginx[field], port: nginx.httpPort });
|
||||
}
|
||||
|
||||
if (nginx.sslMode === 'yes') {
|
||||
return getUrlForField({
|
||||
url: nginx[field],
|
||||
portSsl: nginx.httpsPort,
|
||||
});
|
||||
}
|
||||
|
||||
if (nginx.sslMode === 'auto') {
|
||||
throw new Error(`Cannot get IP Based URL for field: "${field}" SSL mode auto`);
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error(
|
||||
`IP URL Resolver: Could not resolve any access URL for field: "${field}", is FQDN?: ${fieldIsFqdn(
|
||||
field
|
||||
)}`
|
||||
);
|
||||
};
|
||||
|
||||
const getUrlTypeFromFqdn = (fqdnType: string): URL_TYPE => {
|
||||
switch (fqdnType) {
|
||||
case 'LAN':
|
||||
return URL_TYPE.LAN;
|
||||
case 'WAN':
|
||||
return URL_TYPE.WAN;
|
||||
case 'WG':
|
||||
return URL_TYPE.WIREGUARD;
|
||||
default:
|
||||
// HACK: This should be added as a new type (e.g. OTHER or CUSTOM)
|
||||
return URL_TYPE.WIREGUARD;
|
||||
}
|
||||
};
|
||||
|
||||
export const getServerIps = (
|
||||
state: RootState = store.getState()
|
||||
): { urls: AccessUrl[]; errors: Error[] } => {
|
||||
const { nginx } = state.emhttp;
|
||||
const {
|
||||
remote: { wanport },
|
||||
} = state.config;
|
||||
if (!nginx || Object.keys(nginx).length === 0) {
|
||||
return { urls: [], errors: [new Error('Nginx Not Loaded')] };
|
||||
}
|
||||
|
||||
const errors: Error[] = [];
|
||||
const urls: AccessUrl[] = [];
|
||||
|
||||
try {
|
||||
// Default URL
|
||||
const defaultUrl = new URL(nginx.defaultUrl);
|
||||
urls.push({
|
||||
name: 'Default',
|
||||
type: URL_TYPE.DEFAULT,
|
||||
ipv4: defaultUrl,
|
||||
ipv6: defaultUrl,
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
errors.push(error);
|
||||
} else {
|
||||
logger.warn('Uncaught error in network resolver', error);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
// Lan IP URL
|
||||
const lanIp4Url = getUrlForServer({ nginx, field: 'lanIp' });
|
||||
urls.push({
|
||||
name: 'LAN IPv4',
|
||||
type: URL_TYPE.LAN,
|
||||
ipv4: lanIp4Url,
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
errors.push(error);
|
||||
} else {
|
||||
logger.warn('Uncaught error in network resolver', error);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
// Lan IP6 URL
|
||||
const lanIp6Url = getUrlForServer({ nginx, field: 'lanIp6' });
|
||||
urls.push({
|
||||
name: 'LAN IPv6',
|
||||
type: URL_TYPE.LAN,
|
||||
ipv4: lanIp6Url,
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
errors.push(error);
|
||||
} else {
|
||||
logger.warn('Uncaught error in network resolver', error);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
// Lan Name URL
|
||||
const lanNameUrl = getUrlForServer({ nginx, field: 'lanName' });
|
||||
urls.push({
|
||||
name: 'LAN Name',
|
||||
type: URL_TYPE.MDNS,
|
||||
ipv4: lanNameUrl,
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
errors.push(error);
|
||||
} else {
|
||||
logger.warn('Uncaught error in network resolver', error);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
// Lan MDNS URL
|
||||
const lanMdnsUrl = getUrlForServer({ nginx, field: 'lanMdns' });
|
||||
urls.push({
|
||||
name: 'LAN MDNS',
|
||||
type: URL_TYPE.MDNS,
|
||||
ipv4: lanMdnsUrl,
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
errors.push(error);
|
||||
} else {
|
||||
logger.warn('Uncaught error in network resolver', error);
|
||||
}
|
||||
}
|
||||
|
||||
// Now Process the FQDN Urls
|
||||
nginx.fqdnUrls.forEach((fqdnUrl) => {
|
||||
try {
|
||||
const urlType = getUrlTypeFromFqdn(fqdnUrl.interface);
|
||||
const fqdnUrlToUse = getUrlForField({
|
||||
url: fqdnUrl.fqdn,
|
||||
portSsl: urlType === URL_TYPE.WAN ? Number(wanport) : nginx.httpsPort,
|
||||
});
|
||||
|
||||
urls.push({
|
||||
name: `FQDN ${fqdnUrl.interface}${fqdnUrl.id !== null ? ` ${fqdnUrl.id}` : ''}`,
|
||||
type: getUrlTypeFromFqdn(fqdnUrl.interface),
|
||||
ipv4: fqdnUrlToUse,
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
errors.push(error);
|
||||
} else {
|
||||
logger.warn('Uncaught error in network resolver', error);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { urls, errors };
|
||||
};
|
||||
@@ -1,86 +0,0 @@
|
||||
import type { RemoteGraphQlEventFragmentFragment } from '@app/graphql/generated/client/graphql.js';
|
||||
import { remoteQueryLogger } from '@app/core/log.js';
|
||||
import { getApiApolloClient } from '@app/graphql/client/api/get-api-client.js';
|
||||
import { RemoteGraphQlEventType } from '@app/graphql/generated/client/graphql.js';
|
||||
import { SEND_REMOTE_QUERY_RESPONSE } from '@app/graphql/mothership/mutations.js';
|
||||
import { parseGraphQLQuery } from '@app/graphql/resolvers/subscription/remote-graphql/remote-graphql-helpers.js';
|
||||
import { GraphQLClient } from '@app/mothership/graphql-client.js';
|
||||
import { getters } from '@app/store/index.js';
|
||||
|
||||
export const executeRemoteGraphQLQuery = async (
|
||||
data: RemoteGraphQlEventFragmentFragment['remoteGraphQLEventData']
|
||||
) => {
|
||||
remoteQueryLogger.debug({ query: data }, 'Executing remote query');
|
||||
const client = GraphQLClient.getInstance();
|
||||
const localApiKey = getters.config().remote.localApiKey;
|
||||
|
||||
if (!localApiKey) {
|
||||
throw new Error('Local API key is missing');
|
||||
}
|
||||
|
||||
const apiKey = localApiKey;
|
||||
const originalBody = data.body;
|
||||
|
||||
try {
|
||||
const parsedQuery = parseGraphQLQuery(originalBody);
|
||||
const localClient = getApiApolloClient({
|
||||
localApiKey: apiKey,
|
||||
});
|
||||
remoteQueryLogger.trace({ query: parsedQuery.query }, '[DEVONLY] Running query');
|
||||
const localResult = await localClient.query({
|
||||
query: parsedQuery.query,
|
||||
variables: parsedQuery.variables,
|
||||
});
|
||||
if (localResult.data) {
|
||||
remoteQueryLogger.trace(
|
||||
{ data: localResult.data },
|
||||
'Got data from remoteQuery request',
|
||||
data.sha256
|
||||
);
|
||||
|
||||
await client?.mutate({
|
||||
mutation: SEND_REMOTE_QUERY_RESPONSE,
|
||||
variables: {
|
||||
input: {
|
||||
sha256: data.sha256,
|
||||
body: JSON.stringify({ data: localResult.data }),
|
||||
type: RemoteGraphQlEventType.REMOTE_QUERY_EVENT,
|
||||
},
|
||||
},
|
||||
errorPolicy: 'none',
|
||||
});
|
||||
} else {
|
||||
// @TODO fix this not sending an error
|
||||
await client?.mutate({
|
||||
mutation: SEND_REMOTE_QUERY_RESPONSE,
|
||||
variables: {
|
||||
input: {
|
||||
sha256: data.sha256,
|
||||
body: JSON.stringify({ errors: localResult.error }),
|
||||
type: RemoteGraphQlEventType.REMOTE_QUERY_EVENT,
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
} catch (err: unknown) {
|
||||
try {
|
||||
await client?.mutate({
|
||||
mutation: SEND_REMOTE_QUERY_RESPONSE,
|
||||
variables: {
|
||||
input: {
|
||||
sha256: data.sha256,
|
||||
body: JSON.stringify({ errors: err }),
|
||||
type: RemoteGraphQlEventType.REMOTE_QUERY_EVENT,
|
||||
},
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
remoteQueryLogger.warn('Could not respond %o', error);
|
||||
}
|
||||
remoteQueryLogger.error(
|
||||
'Error executing remote query %s',
|
||||
err instanceof Error ? err.message : 'Unknown Error'
|
||||
);
|
||||
remoteQueryLogger.trace(err);
|
||||
}
|
||||
};
|
||||
@@ -1,9 +0,0 @@
|
||||
import { type RemoteGraphQlEventFragmentFragment } from '@app/graphql/generated/client/graphql.js';
|
||||
import { addRemoteSubscription } from '@app/store/actions/add-remote-subscription.js';
|
||||
import { store } from '@app/store/index.js';
|
||||
|
||||
export const createRemoteSubscription = async (
|
||||
data: RemoteGraphQlEventFragmentFragment['remoteGraphQLEventData']
|
||||
) => {
|
||||
await store.dispatch(addRemoteSubscription(data));
|
||||
};
|
||||
@@ -1,28 +0,0 @@
|
||||
import { mergeTypeDefs } from '@graphql-tools/merge';
|
||||
|
||||
import { logger } from '@app/core/log.js';
|
||||
|
||||
export const loadTypeDefs = async (additionalTypeDefs: string[] = []) => {
|
||||
// TypeScript now knows this returns Record<string, () => Promise<string>>
|
||||
const typeModules = import.meta.glob('./types/**/*.graphql', { query: '?raw', import: 'default' });
|
||||
|
||||
try {
|
||||
const files = await Promise.all(
|
||||
Object.values(typeModules).map(async (importFn) => {
|
||||
const content = await importFn();
|
||||
if (typeof content !== 'string') {
|
||||
throw new Error('Invalid GraphQL type definition format');
|
||||
}
|
||||
return content;
|
||||
})
|
||||
);
|
||||
if (!files.length) {
|
||||
throw new Error('No GraphQL type definitions found');
|
||||
}
|
||||
files.push(...additionalTypeDefs);
|
||||
return mergeTypeDefs(files);
|
||||
} catch (error) {
|
||||
logger.error('Failed to load GraphQL type definitions:', error);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user