mirror of
https://github.com/unraid/api.git
synced 2026-01-02 14:40:01 -06:00
Compare commits
36 Commits
v4.9.3
...
add-claude
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
86b6c4f85b | ||
|
|
45bd73698b | ||
|
|
fee7d4613e | ||
|
|
b6acf50c0d | ||
|
|
8279531f2b | ||
|
|
0a18b38008 | ||
|
|
23b2b88461 | ||
|
|
f5352e3a26 | ||
|
|
9dfdb8dce7 | ||
|
|
407585cd40 | ||
|
|
05056e7ca1 | ||
|
|
a74d935b56 | ||
|
|
2c62e0ad09 | ||
|
|
1a8da6d92b | ||
|
|
81808ada0f | ||
|
|
eecd9b1017 | ||
|
|
441e1805c1 | ||
|
|
29dcb7d0f0 | ||
|
|
1a7d35d3f6 | ||
|
|
af33e999a0 | ||
|
|
85a35804c1 | ||
|
|
a35c8ff2f1 | ||
|
|
153e7a1e3a | ||
|
|
e73fc356cb | ||
|
|
e1a7a3d22d | ||
|
|
53b05ebe5e | ||
|
|
2ed1308e40 | ||
|
|
6c03df2b97 | ||
|
|
074370c42c | ||
|
|
f34a33bc9f | ||
|
|
c7801a9236 | ||
|
|
dd759d9f0f | ||
|
|
74da8d81ef | ||
|
|
33e0b1ab24 | ||
|
|
ca4e2db1f2 | ||
|
|
ea20d1e211 |
@@ -11,7 +11,34 @@
|
||||
"Bash(pnpm type-check:*)",
|
||||
"Bash(pnpm lint:*)",
|
||||
"Bash(pnpm --filter ./api lint)",
|
||||
"Bash(mv:*)"
|
||||
"Bash(mv:*)",
|
||||
"Bash(ls:*)",
|
||||
"mcp__ide__getDiagnostics",
|
||||
"Bash(pnpm --filter \"*connect*\" test connect-status-writer.service.spec)",
|
||||
"Bash(pnpm storybook:*)",
|
||||
"Bash(pnpm add:*)",
|
||||
"Bash(pnpm install:*)",
|
||||
"Bash(pkill:*)",
|
||||
"Bash(true)",
|
||||
"Bash(timeout 15 pnpm storybook)",
|
||||
"WebFetch(domain:tailwindcss.com)",
|
||||
"Bash(pnpm list:*)",
|
||||
"Bash(pnpm remove:*)",
|
||||
"WebFetch(domain:github.com)",
|
||||
"mcp__browsermcp__browser_navigate",
|
||||
"Bash(clear)",
|
||||
"Bash(git log:*)",
|
||||
"Bash(pnpm --filter ./unraid-ui build)",
|
||||
"Bash(pnpm --filter @unraid/ui build)",
|
||||
"Bash(pnpm --filter @unraid/web build)",
|
||||
"Bash(python3:*)",
|
||||
"Bash(pnpm tailwind:build:*)",
|
||||
"WebFetch(domain:erangrin.github.io)",
|
||||
"Bash(pnpm clean:*)",
|
||||
"Bash(pnpm validate:css:*)",
|
||||
"Bash(node:*)",
|
||||
"Bash(rm:*)",
|
||||
"Bash(pnpm run:*)"
|
||||
]
|
||||
},
|
||||
"enableAllProjectMcpServers": false
|
||||
|
||||
4
.github/workflows/deploy-storybook.yml
vendored
4
.github/workflows/deploy-storybook.yml
vendored
@@ -25,7 +25,7 @@ jobs:
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
node-version: '22.17.0'
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
@@ -33,7 +33,7 @@ jobs:
|
||||
run_install: false
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.4.3
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.1
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential libvirt-daemon-system
|
||||
version: 1.0
|
||||
|
||||
6
.github/workflows/main.yml
vendored
6
.github/workflows/main.yml
vendored
@@ -45,7 +45,7 @@ jobs:
|
||||
node-version-file: ".nvmrc"
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.4.3
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.1
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential libvirt-daemon-system
|
||||
version: 1.0
|
||||
@@ -190,7 +190,7 @@ jobs:
|
||||
${{ runner.os }}-pnpm-store-
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.4.3
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.1
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential
|
||||
version: 1.0
|
||||
@@ -267,7 +267,7 @@ jobs:
|
||||
${{ runner.os }}-pnpm-store-
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.4.3
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.1
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential
|
||||
version: 1.0
|
||||
|
||||
2
.github/workflows/test-libvirt.yml
vendored
2
.github/workflows/test-libvirt.yml
vendored
@@ -31,7 +31,7 @@ jobs:
|
||||
python-version: "3.13.5"
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.4.3
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.1
|
||||
with:
|
||||
packages: libvirt-dev
|
||||
version: 1.0
|
||||
|
||||
@@ -1 +1 @@
|
||||
{".":"4.9.3"}
|
||||
{".":"4.10.0"}
|
||||
|
||||
@@ -135,3 +135,8 @@ Enables GraphQL playground at `http://tower.local/graphql`
|
||||
- Place all mock declarations at the top level
|
||||
- Use factory functions for module mocks to avoid hoisting issues
|
||||
- Clear mocks between tests to ensure isolation
|
||||
|
||||
## Development Memories
|
||||
|
||||
- We are using tailwind v4 we do not need a tailwind config anymore
|
||||
- always search the internet for tailwind v4 documentation when making tailwind related style changes
|
||||
@@ -1,5 +1,39 @@
|
||||
# Changelog
|
||||
|
||||
## [4.10.0](https://github.com/unraid/api/compare/v4.9.5...v4.10.0) (2025-07-15)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* trial extension allowed within 5 days of expiration ([#1490](https://github.com/unraid/api/issues/1490)) ([f34a33b](https://github.com/unraid/api/commit/f34a33bc9f1a7e135d453d9d31888789bfc3f878))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* delay `nginx:reload` file mod effect by 10 seconds ([#1512](https://github.com/unraid/api/issues/1512)) ([af33e99](https://github.com/unraid/api/commit/af33e999a0480a77e3e6b2aa833b17b38b835656))
|
||||
* **deps:** update all non-major dependencies ([#1489](https://github.com/unraid/api/issues/1489)) ([53b05eb](https://github.com/unraid/api/commit/53b05ebe5e2050cb0916fcd65e8d41370aee0624))
|
||||
* ensure no crash if emhttp state configs are missing ([#1514](https://github.com/unraid/api/issues/1514)) ([1a7d35d](https://github.com/unraid/api/commit/1a7d35d3f6972fd8aff58c17b2b0fb79725e660e))
|
||||
* **my.servers:** improve DNS resolution robustness for backup server ([#1518](https://github.com/unraid/api/issues/1518)) ([eecd9b1](https://github.com/unraid/api/commit/eecd9b1017a63651d1dc782feaa224111cdee8b6))
|
||||
* over-eager cloud query from web components ([#1506](https://github.com/unraid/api/issues/1506)) ([074370c](https://github.com/unraid/api/commit/074370c42cdecc4dbc58193ff518aa25735c56b3))
|
||||
* replace myservers.cfg reads in UpdateFlashBackup.php ([#1517](https://github.com/unraid/api/issues/1517)) ([441e180](https://github.com/unraid/api/commit/441e1805c108a6c1cd35ee093246b975a03f8474))
|
||||
* rm short-circuit in `rc.unraid-api` if plugin config dir is absent ([#1515](https://github.com/unraid/api/issues/1515)) ([29dcb7d](https://github.com/unraid/api/commit/29dcb7d0f088937cefc5158055f48680e86e5c36))
|
||||
|
||||
## [4.9.5](https://github.com/unraid/api/compare/v4.9.4...v4.9.5) (2025-07-10)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **connect:** rm eager restart on `ERROR_RETYING` connection status ([#1502](https://github.com/unraid/api/issues/1502)) ([dd759d9](https://github.com/unraid/api/commit/dd759d9f0f841b296f8083bc67c6cd3f7a69aa5b))
|
||||
|
||||
## [4.9.4](https://github.com/unraid/api/compare/v4.9.3...v4.9.4) (2025-07-09)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* backport `<unraid-modals>` upon plg install when necessary ([#1499](https://github.com/unraid/api/issues/1499)) ([33e0b1a](https://github.com/unraid/api/commit/33e0b1ab24bedb6a2c7b376ea73dbe65bc3044be))
|
||||
* DefaultPageLayout patch rollback omits legacy header logo ([#1497](https://github.com/unraid/api/issues/1497)) ([ea20d1e](https://github.com/unraid/api/commit/ea20d1e2116fcafa154090fee78b42ec5d9ba584))
|
||||
* event emitter setup for writing status ([#1496](https://github.com/unraid/api/issues/1496)) ([ca4e2db](https://github.com/unraid/api/commit/ca4e2db1f29126a1fa3784af563832edda64b0ca))
|
||||
|
||||
## [4.9.3](https://github.com/unraid/api/compare/v4.9.2...v4.9.3) (2025-07-09)
|
||||
|
||||
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
{
|
||||
"version": "4.8.0",
|
||||
"version": "4.10.0",
|
||||
"extraOrigins": [
|
||||
"https://google.com",
|
||||
"https://test.com"
|
||||
],
|
||||
"sandbox": true,
|
||||
"ssoSubIds": [],
|
||||
"plugins": ["unraid-api-plugin-connect"]
|
||||
"plugins": [
|
||||
"unraid-api-plugin-connect"
|
||||
]
|
||||
}
|
||||
@@ -1,16 +1,12 @@
|
||||
{
|
||||
"wanaccess": false,
|
||||
"wanport": 0,
|
||||
"wanaccess": true,
|
||||
"wanport": 8443,
|
||||
"upnpEnabled": false,
|
||||
"apikey": "",
|
||||
"localApiKey": "",
|
||||
"email": "",
|
||||
"username": "",
|
||||
"avatar": "",
|
||||
"regWizTime": "",
|
||||
"accesstoken": "",
|
||||
"idtoken": "",
|
||||
"refreshtoken": "",
|
||||
"dynamicRemoteAccessType": "DISABLED",
|
||||
"ssoSubIds": []
|
||||
"apikey": "_______________________BIG_API_KEY_HERE_________________________",
|
||||
"localApiKey": "_______________________LOCAL_API_KEY_HERE_________________________",
|
||||
"email": "test@example.com",
|
||||
"username": "zspearmint",
|
||||
"avatar": "https://via.placeholder.com/200",
|
||||
"regWizTime": "1611175408732_0951-1653-3509-FBA155FA23C0",
|
||||
"dynamicRemoteAccessType": "DISABLED"
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@unraid/api",
|
||||
"version": "4.9.3",
|
||||
"version": "4.10.0",
|
||||
"main": "src/cli/index.ts",
|
||||
"type": "module",
|
||||
"corepack": {
|
||||
@@ -10,7 +10,7 @@
|
||||
"author": "Lime Technology, Inc. <unraid.net>",
|
||||
"license": "GPL-2.0-or-later",
|
||||
"engines": {
|
||||
"pnpm": "10.12.4"
|
||||
"pnpm": "10.13.1"
|
||||
},
|
||||
"scripts": {
|
||||
"// Development": "",
|
||||
@@ -57,7 +57,7 @@
|
||||
"@as-integrations/fastify": "2.1.1",
|
||||
"@fastify/cookie": "11.0.2",
|
||||
"@fastify/helmet": "13.0.1",
|
||||
"@graphql-codegen/client-preset": "4.8.2",
|
||||
"@graphql-codegen/client-preset": "4.8.3",
|
||||
"@graphql-tools/load-files": "7.0.1",
|
||||
"@graphql-tools/merge": "9.0.24",
|
||||
"@graphql-tools/schema": "10.0.23",
|
||||
@@ -82,7 +82,7 @@
|
||||
"accesscontrol": "2.2.1",
|
||||
"bycontract": "2.0.11",
|
||||
"bytes": "3.1.2",
|
||||
"cache-manager": "7.0.0",
|
||||
"cache-manager": "7.0.1",
|
||||
"cacheable-lookup": "7.0.0",
|
||||
"camelcase-keys": "9.1.3",
|
||||
"casbin": "5.38.0",
|
||||
@@ -94,11 +94,11 @@
|
||||
"command-exists": "1.2.9",
|
||||
"convert": "5.12.0",
|
||||
"cookie": "1.0.2",
|
||||
"cron": "4.3.1",
|
||||
"cron": "4.3.2",
|
||||
"cross-fetch": "4.1.0",
|
||||
"diff": "8.0.2",
|
||||
"dockerode": "4.0.7",
|
||||
"dotenv": "17.1.0",
|
||||
"dotenv": "17.2.0",
|
||||
"execa": "9.6.0",
|
||||
"exit-hook": "4.0.0",
|
||||
"fastify": "5.4.0",
|
||||
@@ -112,7 +112,7 @@
|
||||
"graphql-scalars": "1.24.2",
|
||||
"graphql-subscriptions": "3.0.0",
|
||||
"graphql-tag": "2.12.6",
|
||||
"graphql-ws": "6.0.5",
|
||||
"graphql-ws": "6.0.6",
|
||||
"ini": "5.0.0",
|
||||
"ip": "2.0.1",
|
||||
"jose": "6.0.11",
|
||||
@@ -138,11 +138,11 @@
|
||||
"rxjs": "7.8.2",
|
||||
"semver": "7.7.2",
|
||||
"strftime": "0.10.3",
|
||||
"systeminformation": "5.27.6",
|
||||
"systeminformation": "5.27.7",
|
||||
"uuid": "11.1.0",
|
||||
"ws": "8.18.2",
|
||||
"ws": "8.18.3",
|
||||
"zen-observable-ts": "1.1.0",
|
||||
"zod": "3.25.67"
|
||||
"zod": "3.25.76"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"unraid-api-plugin-connect": "workspace:*"
|
||||
@@ -153,35 +153,35 @@
|
||||
}
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/js": "9.29.0",
|
||||
"@eslint/js": "9.31.0",
|
||||
"@graphql-codegen/add": "5.0.3",
|
||||
"@graphql-codegen/cli": "5.0.7",
|
||||
"@graphql-codegen/fragment-matcher": "5.1.0",
|
||||
"@graphql-codegen/import-types-preset": "3.0.1",
|
||||
"@graphql-codegen/typed-document-node": "5.1.1",
|
||||
"@graphql-codegen/typed-document-node": "5.1.2",
|
||||
"@graphql-codegen/typescript": "4.1.6",
|
||||
"@graphql-codegen/typescript-operations": "4.6.1",
|
||||
"@graphql-codegen/typescript-resolvers": "4.5.1",
|
||||
"@graphql-typed-document-node/core": "3.2.0",
|
||||
"@ianvs/prettier-plugin-sort-imports": "4.4.2",
|
||||
"@ianvs/prettier-plugin-sort-imports": "4.5.1",
|
||||
"@nestjs/testing": "11.1.3",
|
||||
"@originjs/vite-plugin-commonjs": "1.0.3",
|
||||
"@rollup/plugin-node-resolve": "16.0.1",
|
||||
"@swc/core": "1.12.4",
|
||||
"@swc/core": "1.12.14",
|
||||
"@types/async-exit-hook": "2.0.2",
|
||||
"@types/bytes": "3.1.5",
|
||||
"@types/cli-table": "0.3.4",
|
||||
"@types/command-exists": "1.2.3",
|
||||
"@types/cors": "2.8.19",
|
||||
"@types/dockerode": "3.3.41",
|
||||
"@types/dockerode": "3.3.42",
|
||||
"@types/graphql-fields": "1.3.9",
|
||||
"@types/graphql-type-uuid": "0.2.6",
|
||||
"@types/ini": "4.1.1",
|
||||
"@types/ip": "1.1.3",
|
||||
"@types/lodash": "4.17.18",
|
||||
"@types/lodash": "4.17.20",
|
||||
"@types/lodash-es": "4.17.12",
|
||||
"@types/mustache": "4.2.6",
|
||||
"@types/node": "22.15.32",
|
||||
"@types/node": "22.16.4",
|
||||
"@types/pify": "6.1.0",
|
||||
"@types/semver": "7.7.0",
|
||||
"@types/sendmail": "1.4.7",
|
||||
@@ -193,27 +193,27 @@
|
||||
"@vitest/coverage-v8": "3.2.4",
|
||||
"@vitest/ui": "3.2.4",
|
||||
"cz-conventional-changelog": "3.3.0",
|
||||
"eslint": "9.29.0",
|
||||
"eslint-plugin-import": "2.31.0",
|
||||
"eslint-plugin-n": "17.20.0",
|
||||
"eslint": "9.31.0",
|
||||
"eslint-plugin-import": "2.32.0",
|
||||
"eslint-plugin-n": "17.21.0",
|
||||
"eslint-plugin-no-relative-import-paths": "1.6.1",
|
||||
"eslint-plugin-prettier": "5.5.0",
|
||||
"eslint-plugin-prettier": "5.5.1",
|
||||
"graphql-codegen-typescript-validation-schema": "0.17.1",
|
||||
"jiti": "2.4.2",
|
||||
"nodemon": "3.1.10",
|
||||
"prettier": "3.5.3",
|
||||
"prettier": "3.6.2",
|
||||
"rollup-plugin-node-externals": "8.0.1",
|
||||
"commit-and-tag-version": "9.5.0",
|
||||
"commit-and-tag-version": "9.6.0",
|
||||
"tsx": "4.20.3",
|
||||
"type-fest": "4.41.0",
|
||||
"typescript": "5.8.3",
|
||||
"typescript-eslint": "8.34.1",
|
||||
"typescript-eslint": "8.37.0",
|
||||
"unplugin-swc": "1.5.5",
|
||||
"vite": "7.0.3",
|
||||
"vite": "7.0.4",
|
||||
"vite-plugin-node": "7.0.0",
|
||||
"vite-tsconfig-paths": "5.1.4",
|
||||
"vitest": "3.2.4",
|
||||
"zx": "8.5.5"
|
||||
"zx": "8.7.1"
|
||||
},
|
||||
"overrides": {
|
||||
"eslint": {
|
||||
@@ -228,5 +228,5 @@
|
||||
}
|
||||
},
|
||||
"private": true,
|
||||
"packageManager": "pnpm@10.12.4"
|
||||
"packageManager": "pnpm@10.13.1"
|
||||
}
|
||||
|
||||
@@ -1,137 +0,0 @@
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { ApiConfigPersistence } from '@app/unraid-api/config/api-config.module.js';
|
||||
import { ConfigPersistenceHelper } from '@app/unraid-api/config/persistence.helper.js';
|
||||
|
||||
describe('ApiConfigPersistence', () => {
|
||||
let service: ApiConfigPersistence;
|
||||
let configService: ConfigService;
|
||||
let persistenceHelper: ConfigPersistenceHelper;
|
||||
|
||||
beforeEach(() => {
|
||||
configService = {
|
||||
get: vi.fn(),
|
||||
set: vi.fn(),
|
||||
} as any;
|
||||
|
||||
persistenceHelper = {} as ConfigPersistenceHelper;
|
||||
service = new ApiConfigPersistence(configService, persistenceHelper);
|
||||
});
|
||||
|
||||
describe('convertLegacyConfig', () => {
|
||||
it('should migrate sandbox from string "yes" to boolean true', () => {
|
||||
const legacyConfig = {
|
||||
local: { sandbox: 'yes' },
|
||||
api: { extraOrigins: '' },
|
||||
remote: { ssoSubIds: '' },
|
||||
};
|
||||
|
||||
const result = service.convertLegacyConfig(legacyConfig);
|
||||
|
||||
expect(result.sandbox).toBe(true);
|
||||
});
|
||||
|
||||
it('should migrate sandbox from string "no" to boolean false', () => {
|
||||
const legacyConfig = {
|
||||
local: { sandbox: 'no' },
|
||||
api: { extraOrigins: '' },
|
||||
remote: { ssoSubIds: '' },
|
||||
};
|
||||
|
||||
const result = service.convertLegacyConfig(legacyConfig);
|
||||
|
||||
expect(result.sandbox).toBe(false);
|
||||
});
|
||||
|
||||
it('should migrate extraOrigins from comma-separated string to array', () => {
|
||||
const legacyConfig = {
|
||||
local: { sandbox: 'no' },
|
||||
api: { extraOrigins: 'https://example.com,https://test.com' },
|
||||
remote: { ssoSubIds: '' },
|
||||
};
|
||||
|
||||
const result = service.convertLegacyConfig(legacyConfig);
|
||||
|
||||
expect(result.extraOrigins).toEqual(['https://example.com', 'https://test.com']);
|
||||
});
|
||||
|
||||
it('should filter out non-HTTP origins from extraOrigins', () => {
|
||||
const legacyConfig = {
|
||||
local: { sandbox: 'no' },
|
||||
api: {
|
||||
extraOrigins: 'https://example.com,invalid-origin,http://test.com,ftp://bad.com',
|
||||
},
|
||||
remote: { ssoSubIds: '' },
|
||||
};
|
||||
|
||||
const result = service.convertLegacyConfig(legacyConfig);
|
||||
|
||||
expect(result.extraOrigins).toEqual(['https://example.com', 'http://test.com']);
|
||||
});
|
||||
|
||||
it('should handle empty extraOrigins string', () => {
|
||||
const legacyConfig = {
|
||||
local: { sandbox: 'no' },
|
||||
api: { extraOrigins: '' },
|
||||
remote: { ssoSubIds: '' },
|
||||
};
|
||||
|
||||
const result = service.convertLegacyConfig(legacyConfig);
|
||||
|
||||
expect(result.extraOrigins).toEqual([]);
|
||||
});
|
||||
|
||||
it('should migrate ssoSubIds from comma-separated string to array', () => {
|
||||
const legacyConfig = {
|
||||
local: { sandbox: 'no' },
|
||||
api: { extraOrigins: '' },
|
||||
remote: { ssoSubIds: 'user1,user2,user3' },
|
||||
};
|
||||
|
||||
const result = service.convertLegacyConfig(legacyConfig);
|
||||
|
||||
expect(result.ssoSubIds).toEqual(['user1', 'user2', 'user3']);
|
||||
});
|
||||
|
||||
it('should handle empty ssoSubIds string', () => {
|
||||
const legacyConfig = {
|
||||
local: { sandbox: 'no' },
|
||||
api: { extraOrigins: '' },
|
||||
remote: { ssoSubIds: '' },
|
||||
};
|
||||
|
||||
const result = service.convertLegacyConfig(legacyConfig);
|
||||
|
||||
expect(result.ssoSubIds).toEqual([]);
|
||||
});
|
||||
|
||||
it('should handle undefined config sections', () => {
|
||||
const legacyConfig = {};
|
||||
|
||||
const result = service.convertLegacyConfig(legacyConfig);
|
||||
|
||||
expect(result.sandbox).toBe(false);
|
||||
expect(result.extraOrigins).toEqual([]);
|
||||
expect(result.ssoSubIds).toEqual([]);
|
||||
});
|
||||
|
||||
it('should handle complete migration with all fields', () => {
|
||||
const legacyConfig = {
|
||||
local: { sandbox: 'yes' },
|
||||
api: { extraOrigins: 'https://app1.example.com,https://app2.example.com' },
|
||||
remote: { ssoSubIds: 'sub1,sub2,sub3' },
|
||||
};
|
||||
|
||||
const result = service.convertLegacyConfig(legacyConfig);
|
||||
|
||||
expect(result.sandbox).toBe(true);
|
||||
expect(result.extraOrigins).toEqual([
|
||||
'https://app1.example.com',
|
||||
'https://app2.example.com',
|
||||
]);
|
||||
expect(result.ssoSubIds).toEqual(['sub1', 'sub2', 'sub3']);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -17,7 +17,6 @@ exports[`Returns paths 1`] = `
|
||||
"myservers-base",
|
||||
"myservers-config",
|
||||
"myservers-config-states",
|
||||
"myservers-env",
|
||||
"myservers-keepalive",
|
||||
"keyfile-base",
|
||||
"machine-id",
|
||||
|
||||
@@ -24,7 +24,6 @@ test('Returns paths', async () => {
|
||||
'myservers-base': '/boot/config/plugins/dynamix.my.servers/',
|
||||
'myservers-config': expect.stringContaining('api/dev/Unraid.net/myservers.cfg'),
|
||||
'myservers-config-states': expect.stringContaining('api/dev/states/myservers.cfg'),
|
||||
'myservers-env': '/boot/config/plugins/dynamix.my.servers/env',
|
||||
'myservers-keepalive': './dev/Unraid.net/fb_keepalive',
|
||||
'keyfile-base': expect.stringContaining('api/dev/Unraid.net'),
|
||||
'machine-id': expect.stringContaining('api/dev/data/machine-id'),
|
||||
|
||||
@@ -67,6 +67,7 @@ export const getPackageJsonDependencies = (): string[] | undefined => {
|
||||
|
||||
export const API_VERSION = process.env.npm_package_version ?? getPackageJson().version;
|
||||
|
||||
/** Controls how the app is built/run (i.e. in terms of optimization) */
|
||||
export const NODE_ENV =
|
||||
(process.env.NODE_ENV as 'development' | 'test' | 'staging' | 'production') ?? 'production';
|
||||
export const environment = {
|
||||
@@ -76,6 +77,7 @@ export const CHOKIDAR_USEPOLLING = process.env.CHOKIDAR_USEPOLLING === 'true';
|
||||
export const IS_DOCKER = process.env.IS_DOCKER === 'true';
|
||||
export const DEBUG = process.env.DEBUG === 'true';
|
||||
export const INTROSPECTION = process.env.INTROSPECTION === 'true';
|
||||
/** Determines the app-level & business logic environment (i.e. what data & infrastructure is used) */
|
||||
export const ENVIRONMENT = process.env.ENVIRONMENT
|
||||
? (process.env.ENVIRONMENT as 'production' | 'staging' | 'development')
|
||||
: 'production';
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { writeFileSync } from 'fs';
|
||||
import { writeFile } from 'fs/promises';
|
||||
|
||||
import type { ConfigType } from '@app/core/utils/files/config-file-normalizer.js';
|
||||
import { logger } from '@app/core/log.js';
|
||||
@@ -17,6 +17,6 @@ export const enableConfigFileListener = (mode: ConfigType) => () =>
|
||||
const writeableConfig = getWriteableConfig(config, mode);
|
||||
const serializedConfig = safelySerializeObjectToIni(writeableConfig);
|
||||
logger.debug('Writing updated config to %s', pathToWrite);
|
||||
writeFileSync(pathToWrite, serializedConfig);
|
||||
await writeFile(pathToWrite, serializedConfig);
|
||||
},
|
||||
});
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { F_OK } from 'constants';
|
||||
import { writeFileSync } from 'fs';
|
||||
import { access } from 'fs/promises';
|
||||
import { access, writeFile } from 'fs/promises';
|
||||
|
||||
import type { PayloadAction } from '@reduxjs/toolkit';
|
||||
import { createAsyncThunk, createSlice, isAnyOf } from '@reduxjs/toolkit';
|
||||
@@ -140,7 +139,7 @@ export const loadConfigFile = createAsyncThunk<
|
||||
const newConfig = getWriteableConfig(initialState, 'flash');
|
||||
newConfig.remote.wanaccess = 'no';
|
||||
const serializedConfig = safelySerializeObjectToIni(newConfig);
|
||||
writeFileSync(getState().paths['myservers-config'], serializedConfig);
|
||||
await writeFile(getState().paths['myservers-config'], serializedConfig);
|
||||
return rejectWithValue({
|
||||
type: CONFIG_LOAD_ERROR.CONFIG_CORRUPTED,
|
||||
error: error instanceof Error ? error : new Error('Unknown Error'),
|
||||
|
||||
@@ -49,7 +49,6 @@ const initialState = {
|
||||
resolvePath(process.env.PATHS_STATES ?? ('/usr/local/emhttp/state/' as const)),
|
||||
'myservers.cfg' as const
|
||||
),
|
||||
'myservers-env': '/boot/config/plugins/dynamix.my.servers/env' as const,
|
||||
'myservers-keepalive':
|
||||
process.env.PATHS_MY_SERVERS_FB ??
|
||||
('/boot/config/plugins/dynamix.my.servers/fb_keepalive' as const),
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { writeFileSync } from 'fs';
|
||||
import { join } from 'path';
|
||||
|
||||
import { ensureWriteSync } from '@unraid/shared/util/file.js';
|
||||
import { isEqual } from 'lodash-es';
|
||||
|
||||
import type { RootState } from '@app/store/index.js';
|
||||
@@ -27,8 +27,11 @@ export const startStoreSync = async () => {
|
||||
!isEqual(state, lastState) &&
|
||||
state.paths['myservers-config-states']
|
||||
) {
|
||||
writeFileSync(join(state.paths.states, 'config.log'), JSON.stringify(state.config, null, 2));
|
||||
writeFileSync(
|
||||
ensureWriteSync(
|
||||
join(state.paths.states, 'config.log'),
|
||||
JSON.stringify(state.config, null, 2)
|
||||
);
|
||||
ensureWriteSync(
|
||||
join(state.paths.states, 'graphql.log'),
|
||||
JSON.stringify(state.minigraph, null, 2)
|
||||
);
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { existsSync } from 'node:fs';
|
||||
import { mkdir, rm } from 'node:fs/promises';
|
||||
import { join } from 'node:path';
|
||||
|
||||
import type { Options, Result, ResultPromise } from 'execa';
|
||||
import { execa, ExecaError } from 'execa';
|
||||
|
||||
import { fileExists } from '@app/core/utils/files/file-exists.js';
|
||||
import { LOGS_DIR, PM2_HOME, PM2_PATH } from '@app/environment.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
|
||||
@@ -90,7 +90,7 @@ export class PM2Service {
|
||||
}
|
||||
|
||||
async deletePm2Home() {
|
||||
if (existsSync(PM2_HOME) && existsSync(join(PM2_HOME, 'pm2.log'))) {
|
||||
if ((await fileExists(PM2_HOME)) && (await fileExists(join(PM2_HOME, 'pm2.log')))) {
|
||||
await rm(PM2_HOME, { recursive: true, force: true });
|
||||
this.logger.trace('PM2 home directory cleared.');
|
||||
} else {
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
import { copyFile, readFile, writeFile } from 'fs/promises';
|
||||
import { copyFile } from 'fs/promises';
|
||||
import { join } from 'path';
|
||||
|
||||
import { Command, CommandRunner, Option } from 'nest-commander';
|
||||
|
||||
import { cliLogger } from '@app/core/log.js';
|
||||
import { fileExistsSync } from '@app/core/utils/files/file-exists.js';
|
||||
import { ENVIRONMENT } from '@app/environment.js';
|
||||
import { getters } from '@app/store/index.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
import { StartCommand } from '@app/unraid-api/cli/start.command.js';
|
||||
import { StopCommand } from '@app/unraid-api/cli/stop.command.js';
|
||||
import { RestartCommand } from '@app/unraid-api/cli/restart.command.js';
|
||||
|
||||
interface SwitchEnvOptions {
|
||||
environment?: 'staging' | 'production';
|
||||
@@ -31,60 +31,43 @@ export class SwitchEnvCommand extends CommandRunner {
|
||||
|
||||
constructor(
|
||||
private readonly logger: LogService,
|
||||
private readonly stopCommand: StopCommand,
|
||||
private readonly startCommand: StartCommand
|
||||
private readonly restartCommand: RestartCommand
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
private async getEnvironmentFromFile(path: string): Promise<'production' | 'staging'> {
|
||||
const envFile = await readFile(path, 'utf-8').catch(() => '');
|
||||
this.logger.debug(`Checking ${path} for current ENV, found ${envFile}`);
|
||||
|
||||
// Match the env file env="production" which would be [0] = env="production", [1] = env and [2] = production
|
||||
const matchArray = /([a-zA-Z]+)=["]*([a-zA-Z]+)["]*/.exec(envFile);
|
||||
// Get item from index 2 of the regex match or return production
|
||||
const [, , currentEnvInFile] = matchArray && matchArray.length === 3 ? matchArray : [];
|
||||
return this.parseStringToEnv(currentEnvInFile);
|
||||
}
|
||||
|
||||
private switchToOtherEnv(environment: 'production' | 'staging'): 'production' | 'staging' {
|
||||
if (environment === 'production') {
|
||||
return 'staging';
|
||||
}
|
||||
return 'production';
|
||||
}
|
||||
|
||||
async run(_, options: SwitchEnvOptions): Promise<void> {
|
||||
const paths = getters.paths();
|
||||
const basePath = paths['unraid-api-base'];
|
||||
const envFlashFilePath = paths['myservers-env'];
|
||||
const currentEnvPath = join(basePath, '.env');
|
||||
|
||||
this.logger.warn('Stopping the Unraid API');
|
||||
try {
|
||||
await this.stopCommand.run([], { delete: false });
|
||||
} catch (err) {
|
||||
this.logger.warn('Failed to stop the Unraid API (maybe already stopped?)');
|
||||
// Determine target environment
|
||||
const currentEnv = ENVIRONMENT;
|
||||
const targetEnv = options.environment ?? 'production';
|
||||
|
||||
this.logger.info(`Switching environment from ${currentEnv} to ${targetEnv}`);
|
||||
|
||||
// Check if target environment file exists
|
||||
const sourceEnvPath = join(basePath, `.env.${targetEnv}`);
|
||||
if (!fileExistsSync(sourceEnvPath)) {
|
||||
this.logger.error(
|
||||
`Environment file ${sourceEnvPath} does not exist. Cannot switch to ${targetEnv} environment.`
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const newEnv =
|
||||
options.environment ??
|
||||
this.switchToOtherEnv(await this.getEnvironmentFromFile(envFlashFilePath));
|
||||
this.logger.info(`Setting environment to ${newEnv}`);
|
||||
// Copy the target environment file to .env
|
||||
this.logger.debug(`Copying ${sourceEnvPath} to ${currentEnvPath}`);
|
||||
try {
|
||||
await copyFile(sourceEnvPath, currentEnvPath);
|
||||
this.logger.info(`Successfully switched to ${targetEnv} environment`);
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to copy environment file: ${error}`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Write new env to flash
|
||||
const newEnvLine = `env="${newEnv}"`;
|
||||
this.logger.debug('Writing %s to %s', newEnvLine, envFlashFilePath);
|
||||
await writeFile(envFlashFilePath, newEnvLine);
|
||||
|
||||
// Copy the new env over to live location before restarting
|
||||
const source = join(basePath, `.env.${newEnv}`);
|
||||
const destination = join(basePath, '.env');
|
||||
|
||||
cliLogger.debug('Copying %s to %s', source, destination);
|
||||
await copyFile(source, destination);
|
||||
|
||||
cliLogger.info('Now using %s', newEnv);
|
||||
await this.startCommand.run([], {});
|
||||
// Restart the API to pick up the new environment
|
||||
this.logger.info('Restarting Unraid API to apply environment changes...');
|
||||
await this.restartCommand.run();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +1,12 @@
|
||||
import { Injectable, Logger, Module } from '@nestjs/common';
|
||||
import { ConfigService, registerAs } from '@nestjs/config';
|
||||
import path from 'path';
|
||||
|
||||
import type { ApiConfig } from '@unraid/shared/services/api-config.js';
|
||||
import { ConfigFilePersister } from '@unraid/shared/services/config-file.js';
|
||||
import { csvStringToArray } from '@unraid/shared/util/data.js';
|
||||
import { fileExists } from '@unraid/shared/util/file.js';
|
||||
import { bufferTime } from 'rxjs/operators';
|
||||
|
||||
import { API_VERSION } from '@app/environment.js';
|
||||
import { ApiStateConfig } from '@app/unraid-api/config/factory/api-state.model.js';
|
||||
import { ConfigPersistenceHelper } from '@app/unraid-api/config/persistence.helper.js';
|
||||
import { API_VERSION, PATHS_CONFIG_MODULES } from '@app/environment.js';
|
||||
|
||||
export { type ApiConfig };
|
||||
|
||||
@@ -22,123 +20,72 @@ const createDefaultConfig = (): ApiConfig => ({
|
||||
plugins: [],
|
||||
});
|
||||
|
||||
export const persistApiConfig = async (config: ApiConfig) => {
|
||||
const apiConfig = new ApiStateConfig<ApiConfig>(
|
||||
{
|
||||
name: 'api',
|
||||
defaultConfig: config,
|
||||
parse: (data) => data as ApiConfig,
|
||||
},
|
||||
new ConfigPersistenceHelper()
|
||||
);
|
||||
return await apiConfig.persist(config);
|
||||
};
|
||||
|
||||
/**
|
||||
* Simple file-based config loading for plugin discovery (outside of nestjs DI container).
|
||||
* This avoids complex DI container instantiation during module loading.
|
||||
*/
|
||||
export const loadApiConfig = async () => {
|
||||
const defaultConfig = createDefaultConfig();
|
||||
const apiHandler = new ApiConfigPersistence(new ConfigService()).getFileHandler();
|
||||
|
||||
let diskConfig: Partial<ApiConfig> = {};
|
||||
try {
|
||||
const defaultConfig = createDefaultConfig();
|
||||
const apiConfig = new ApiStateConfig<ApiConfig>(
|
||||
{
|
||||
name: 'api',
|
||||
defaultConfig,
|
||||
parse: (data) => data as ApiConfig,
|
||||
},
|
||||
new ConfigPersistenceHelper()
|
||||
);
|
||||
|
||||
let diskConfig: ApiConfig | undefined;
|
||||
try {
|
||||
diskConfig = await apiConfig.parseConfig();
|
||||
} catch (error) {
|
||||
logger.error('Failed to load API config from disk, using defaults:', error);
|
||||
diskConfig = undefined;
|
||||
|
||||
// Try to overwrite the invalid config with defaults to fix the issue
|
||||
try {
|
||||
const configToWrite = {
|
||||
...defaultConfig,
|
||||
version: API_VERSION,
|
||||
};
|
||||
|
||||
const writeSuccess = await apiConfig.persist(configToWrite);
|
||||
if (writeSuccess) {
|
||||
logger.log('Successfully overwrote invalid config file with defaults.');
|
||||
} else {
|
||||
logger.error(
|
||||
'Failed to overwrite invalid config file. Continuing with defaults in memory only.'
|
||||
);
|
||||
}
|
||||
} catch (persistError) {
|
||||
logger.error('Error during config file repair:', persistError);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
...defaultConfig,
|
||||
...diskConfig,
|
||||
version: API_VERSION,
|
||||
};
|
||||
} catch (outerError) {
|
||||
// This should never happen, but ensures the config factory never throws
|
||||
logger.error('Critical error in loadApiConfig, using minimal defaults:', outerError);
|
||||
return createDefaultConfig();
|
||||
diskConfig = await apiHandler.loadConfig();
|
||||
} catch (error) {
|
||||
logger.warn('Failed to load API config from disk:', error);
|
||||
}
|
||||
|
||||
return {
|
||||
...defaultConfig,
|
||||
...diskConfig,
|
||||
// diskConfig's version may be older, but we still want to use the correct version
|
||||
version: API_VERSION,
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Loads the API config from disk. If not found, returns the default config, but does not persist it.
|
||||
* This is used in the root config module to register the api config.
|
||||
*/
|
||||
export const apiConfig = registerAs<ApiConfig>('api', loadApiConfig);
|
||||
|
||||
@Injectable()
|
||||
export class ApiConfigPersistence {
|
||||
private configModel: ApiStateConfig<ApiConfig>;
|
||||
private logger = new Logger(ApiConfigPersistence.name);
|
||||
get filePath() {
|
||||
return this.configModel.filePath;
|
||||
}
|
||||
get config() {
|
||||
return this.configService.getOrThrow('api');
|
||||
export class ApiConfigPersistence extends ConfigFilePersister<ApiConfig> {
|
||||
constructor(configService: ConfigService) {
|
||||
super(configService);
|
||||
}
|
||||
|
||||
constructor(
|
||||
private readonly configService: ConfigService,
|
||||
private readonly persistenceHelper: ConfigPersistenceHelper
|
||||
) {
|
||||
this.configModel = new ApiStateConfig<ApiConfig>(
|
||||
{
|
||||
name: 'api',
|
||||
defaultConfig: createDefaultConfig(),
|
||||
parse: (data) => data as ApiConfig,
|
||||
},
|
||||
this.persistenceHelper
|
||||
);
|
||||
fileName(): string {
|
||||
return 'api.json';
|
||||
}
|
||||
|
||||
async onModuleInit() {
|
||||
try {
|
||||
if (!(await fileExists(this.filePath))) {
|
||||
this.migrateFromMyServersConfig();
|
||||
}
|
||||
await this.persistenceHelper.persistIfChanged(this.filePath, this.config);
|
||||
this.configService.changes$.pipe(bufferTime(25)).subscribe({
|
||||
next: async (changes) => {
|
||||
if (changes.some((change) => change.path.startsWith('api'))) {
|
||||
this.logger.verbose(`API Config changed ${JSON.stringify(changes)}`);
|
||||
try {
|
||||
await this.persistenceHelper.persistIfChanged(this.filePath, this.config);
|
||||
} catch (persistError) {
|
||||
this.logger.error('Error persisting config changes:', persistError);
|
||||
}
|
||||
}
|
||||
},
|
||||
error: (err) => {
|
||||
this.logger.error('Error receiving config changes:', err);
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
this.logger.error('Error during API config module initialization:', error);
|
||||
}
|
||||
configKey(): string {
|
||||
return 'api';
|
||||
}
|
||||
|
||||
/**
|
||||
* @override
|
||||
* Since the api config is read outside of the nestjs DI container,
|
||||
* we need to provide an explicit path instead of relying on the
|
||||
* default prefix from the configService.
|
||||
*
|
||||
* @returns The path to the api config file
|
||||
*/
|
||||
configPath(): string {
|
||||
return path.join(PATHS_CONFIG_MODULES, this.fileName());
|
||||
}
|
||||
|
||||
defaultConfig(): ApiConfig {
|
||||
return createDefaultConfig();
|
||||
}
|
||||
|
||||
async migrateConfig(): Promise<ApiConfig> {
|
||||
const legacyConfig = this.configService.get('store.config', {});
|
||||
const migrated = this.convertLegacyConfig(legacyConfig);
|
||||
return {
|
||||
...this.defaultConfig(),
|
||||
...migrated,
|
||||
};
|
||||
}
|
||||
|
||||
convertLegacyConfig(
|
||||
@@ -156,18 +103,11 @@ export class ApiConfigPersistence {
|
||||
ssoSubIds: csvStringToArray(config?.remote?.ssoSubIds ?? ''),
|
||||
};
|
||||
}
|
||||
|
||||
migrateFromMyServersConfig() {
|
||||
const legacyConfig = this.configService.get('store.config', {});
|
||||
const { sandbox, extraOrigins, ssoSubIds } = this.convertLegacyConfig(legacyConfig);
|
||||
this.configService.set('api.sandbox', sandbox);
|
||||
this.configService.set('api.extraOrigins', extraOrigins);
|
||||
this.configService.set('api.ssoSubIds', ssoSubIds);
|
||||
}
|
||||
}
|
||||
|
||||
// apiConfig should be registered in root config in app.module.ts, not here.
|
||||
@Module({
|
||||
providers: [ApiConfigPersistence, ConfigPersistenceHelper],
|
||||
providers: [ApiConfigPersistence],
|
||||
exports: [ApiConfigPersistence],
|
||||
})
|
||||
export class ApiConfigModule {}
|
||||
|
||||
@@ -4,14 +4,12 @@ import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { fileExists } from '@app/core/utils/files/file-exists.js';
|
||||
import { ApiConfigPersistence, loadApiConfig } from '@app/unraid-api/config/api-config.module.js';
|
||||
import { ConfigPersistenceHelper } from '@app/unraid-api/config/persistence.helper.js';
|
||||
|
||||
// Mock the core file-exists utility used by ApiStateConfig
|
||||
// Mock file utilities
|
||||
vi.mock('@app/core/utils/files/file-exists.js', () => ({
|
||||
fileExists: vi.fn(),
|
||||
}));
|
||||
|
||||
// Mock the shared file-exists utility used by ConfigPersistenceHelper
|
||||
vi.mock('@unraid/shared/util/file.js', () => ({
|
||||
fileExists: vi.fn(),
|
||||
}));
|
||||
@@ -25,16 +23,56 @@ vi.mock('fs/promises', () => ({
|
||||
describe('ApiConfigPersistence', () => {
|
||||
let service: ApiConfigPersistence;
|
||||
let configService: ConfigService;
|
||||
let persistenceHelper: ConfigPersistenceHelper;
|
||||
|
||||
beforeEach(() => {
|
||||
configService = {
|
||||
get: vi.fn(),
|
||||
set: vi.fn(),
|
||||
getOrThrow: vi.fn().mockReturnValue('test-config-path'),
|
||||
} as any;
|
||||
|
||||
persistenceHelper = {} as ConfigPersistenceHelper;
|
||||
service = new ApiConfigPersistence(configService, persistenceHelper);
|
||||
service = new ApiConfigPersistence(configService);
|
||||
});
|
||||
|
||||
describe('required ConfigFilePersister methods', () => {
|
||||
it('should return correct file name', () => {
|
||||
expect(service.fileName()).toBe('api.json');
|
||||
});
|
||||
|
||||
it('should return correct config key', () => {
|
||||
expect(service.configKey()).toBe('api');
|
||||
});
|
||||
|
||||
it('should return default config', () => {
|
||||
const defaultConfig = service.defaultConfig();
|
||||
expect(defaultConfig).toEqual({
|
||||
version: expect.any(String),
|
||||
extraOrigins: [],
|
||||
sandbox: false,
|
||||
ssoSubIds: [],
|
||||
plugins: [],
|
||||
});
|
||||
});
|
||||
|
||||
it('should migrate config from legacy format', async () => {
|
||||
const mockLegacyConfig = {
|
||||
local: { sandbox: 'yes' },
|
||||
api: { extraOrigins: 'https://example.com,https://test.com' },
|
||||
remote: { ssoSubIds: 'sub1,sub2' },
|
||||
};
|
||||
|
||||
vi.mocked(configService.get).mockReturnValue(mockLegacyConfig);
|
||||
|
||||
const result = await service.migrateConfig();
|
||||
|
||||
expect(result).toEqual({
|
||||
version: expect.any(String),
|
||||
extraOrigins: ['https://example.com', 'https://test.com'],
|
||||
sandbox: true,
|
||||
ssoSubIds: ['sub1', 'sub2'],
|
||||
plugins: [],
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('convertLegacyConfig', () => {
|
||||
@@ -154,23 +192,11 @@ describe('ApiConfigPersistence', () => {
|
||||
});
|
||||
|
||||
describe('loadApiConfig', () => {
|
||||
let readFile: any;
|
||||
let writeFile: any;
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.clearAllMocks();
|
||||
// Reset modules to ensure fresh imports
|
||||
vi.resetModules();
|
||||
|
||||
// Get mocked functions
|
||||
const fsMocks = await import('fs/promises');
|
||||
readFile = fsMocks.readFile;
|
||||
writeFile = fsMocks.writeFile;
|
||||
});
|
||||
|
||||
it('should return default config when file does not exist', async () => {
|
||||
vi.mocked(fileExists).mockResolvedValue(false);
|
||||
|
||||
it('should return default config with current API_VERSION', async () => {
|
||||
const result = await loadApiConfig();
|
||||
|
||||
expect(result).toEqual({
|
||||
@@ -182,39 +208,9 @@ describe('loadApiConfig', () => {
|
||||
});
|
||||
});
|
||||
|
||||
it('should merge disk config with defaults when file exists', async () => {
|
||||
const diskConfig = {
|
||||
extraOrigins: ['https://example.com'],
|
||||
sandbox: true,
|
||||
ssoSubIds: ['sub1', 'sub2'],
|
||||
};
|
||||
|
||||
vi.mocked(fileExists).mockResolvedValue(true);
|
||||
vi.mocked(readFile).mockResolvedValue(JSON.stringify(diskConfig));
|
||||
|
||||
it('should handle errors gracefully and return defaults', async () => {
|
||||
const result = await loadApiConfig();
|
||||
|
||||
expect(result).toEqual({
|
||||
version: expect.any(String),
|
||||
extraOrigins: ['https://example.com'],
|
||||
sandbox: true,
|
||||
ssoSubIds: ['sub1', 'sub2'],
|
||||
plugins: [],
|
||||
});
|
||||
});
|
||||
|
||||
it('should use default config and overwrite file when JSON parsing fails', async () => {
|
||||
const { fileExists: sharedFileExists } = await import('@unraid/shared/util/file.js');
|
||||
|
||||
vi.mocked(fileExists).mockResolvedValue(true);
|
||||
vi.mocked(readFile).mockResolvedValue('{ invalid json }');
|
||||
vi.mocked(sharedFileExists).mockResolvedValue(false); // For persist operation
|
||||
vi.mocked(writeFile).mockResolvedValue(undefined);
|
||||
|
||||
const result = await loadApiConfig();
|
||||
|
||||
// Error logging is handled by NestJS Logger, just verify the config is returned
|
||||
expect(writeFile).toHaveBeenCalled();
|
||||
expect(result).toEqual({
|
||||
version: expect.any(String),
|
||||
extraOrigins: [],
|
||||
@@ -223,56 +219,4 @@ describe('loadApiConfig', () => {
|
||||
plugins: [],
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle write failure gracefully when JSON parsing fails', async () => {
|
||||
const { fileExists: sharedFileExists } = await import('@unraid/shared/util/file.js');
|
||||
|
||||
vi.mocked(fileExists).mockResolvedValue(true);
|
||||
vi.mocked(readFile).mockResolvedValue('{ invalid json }');
|
||||
vi.mocked(sharedFileExists).mockResolvedValue(false); // For persist operation
|
||||
vi.mocked(writeFile).mockRejectedValue(new Error('Permission denied'));
|
||||
|
||||
const result = await loadApiConfig();
|
||||
|
||||
// Error logging is handled by NestJS Logger, just verify the config is returned
|
||||
expect(writeFile).toHaveBeenCalled();
|
||||
expect(result).toEqual({
|
||||
version: expect.any(String),
|
||||
extraOrigins: [],
|
||||
sandbox: false,
|
||||
ssoSubIds: [],
|
||||
plugins: [],
|
||||
});
|
||||
});
|
||||
|
||||
it('should use default config when file is empty', async () => {
|
||||
vi.mocked(fileExists).mockResolvedValue(true);
|
||||
vi.mocked(readFile).mockResolvedValue('');
|
||||
|
||||
const result = await loadApiConfig();
|
||||
|
||||
// No error logging expected for empty files
|
||||
expect(result).toEqual({
|
||||
version: expect.any(String),
|
||||
extraOrigins: [],
|
||||
sandbox: false,
|
||||
ssoSubIds: [],
|
||||
plugins: [],
|
||||
});
|
||||
});
|
||||
|
||||
it('should always override version with current API_VERSION', async () => {
|
||||
const diskConfig = {
|
||||
version: 'old-version',
|
||||
extraOrigins: ['https://example.com'],
|
||||
};
|
||||
|
||||
vi.mocked(fileExists).mockResolvedValue(true);
|
||||
vi.mocked(readFile).mockResolvedValue(JSON.stringify(diskConfig));
|
||||
|
||||
const result = await loadApiConfig();
|
||||
|
||||
expect(result.version).not.toBe('old-version');
|
||||
expect(result.version).toBeTruthy();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,364 +0,0 @@
|
||||
import { Logger } from '@nestjs/common';
|
||||
import { readFile } from 'node:fs/promises';
|
||||
import { join } from 'path';
|
||||
|
||||
import type { Mock } from 'vitest';
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { fileExists } from '@app/core/utils/files/file-exists.js';
|
||||
import { ApiStateConfig } from '@app/unraid-api/config/factory/api-state.model.js';
|
||||
import { ConfigPersistenceHelper } from '@app/unraid-api/config/persistence.helper.js';
|
||||
|
||||
vi.mock('node:fs/promises');
|
||||
vi.mock('@app/core/utils/files/file-exists.js');
|
||||
vi.mock('@app/environment.js', () => ({
|
||||
PATHS_CONFIG_MODULES: '/test/config/path',
|
||||
}));
|
||||
|
||||
describe('ApiStateConfig', () => {
|
||||
let mockPersistenceHelper: ConfigPersistenceHelper;
|
||||
let mockLogger: Logger;
|
||||
|
||||
interface TestConfig {
|
||||
name: string;
|
||||
value: number;
|
||||
enabled: boolean;
|
||||
}
|
||||
|
||||
const defaultConfig: TestConfig = {
|
||||
name: 'test',
|
||||
value: 42,
|
||||
enabled: true,
|
||||
};
|
||||
|
||||
const parseFunction = (data: unknown): TestConfig => {
|
||||
if (!data || typeof data !== 'object') {
|
||||
throw new Error('Invalid config format');
|
||||
}
|
||||
return data as TestConfig;
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
|
||||
mockPersistenceHelper = {
|
||||
persistIfChanged: vi.fn().mockResolvedValue(true),
|
||||
} as any;
|
||||
|
||||
mockLogger = {
|
||||
log: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
} as any;
|
||||
|
||||
vi.spyOn(Logger.prototype, 'log').mockImplementation(mockLogger.log);
|
||||
vi.spyOn(Logger.prototype, 'warn').mockImplementation(mockLogger.warn);
|
||||
vi.spyOn(Logger.prototype, 'error').mockImplementation(mockLogger.error);
|
||||
vi.spyOn(Logger.prototype, 'debug').mockImplementation(mockLogger.debug);
|
||||
});
|
||||
|
||||
describe('constructor', () => {
|
||||
it('should initialize with cloned default config', () => {
|
||||
const config = new ApiStateConfig(
|
||||
{
|
||||
name: 'test-config',
|
||||
defaultConfig,
|
||||
parse: parseFunction,
|
||||
},
|
||||
mockPersistenceHelper
|
||||
);
|
||||
|
||||
expect(config.config).toEqual(defaultConfig);
|
||||
expect(config.config).not.toBe(defaultConfig);
|
||||
});
|
||||
});
|
||||
|
||||
describe('token', () => {
|
||||
it('should generate correct token', () => {
|
||||
const config = new ApiStateConfig(
|
||||
{
|
||||
name: 'my-config',
|
||||
defaultConfig,
|
||||
parse: parseFunction,
|
||||
},
|
||||
mockPersistenceHelper
|
||||
);
|
||||
|
||||
expect(config.token).toBe('ApiConfig.my-config');
|
||||
});
|
||||
});
|
||||
|
||||
describe('file paths', () => {
|
||||
it('should generate correct file name', () => {
|
||||
const config = new ApiStateConfig(
|
||||
{
|
||||
name: 'test-config',
|
||||
defaultConfig,
|
||||
parse: parseFunction,
|
||||
},
|
||||
mockPersistenceHelper
|
||||
);
|
||||
|
||||
expect(config.fileName).toBe('test-config.json');
|
||||
});
|
||||
|
||||
it('should generate correct file path', () => {
|
||||
const config = new ApiStateConfig(
|
||||
{
|
||||
name: 'test-config',
|
||||
defaultConfig,
|
||||
parse: parseFunction,
|
||||
},
|
||||
mockPersistenceHelper
|
||||
);
|
||||
|
||||
expect(config.filePath).toBe(join('/test/config/path', 'test-config.json'));
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseConfig', () => {
|
||||
let config: ApiStateConfig<TestConfig>;
|
||||
|
||||
beforeEach(() => {
|
||||
config = new ApiStateConfig(
|
||||
{
|
||||
name: 'test-config',
|
||||
defaultConfig,
|
||||
parse: parseFunction,
|
||||
},
|
||||
mockPersistenceHelper
|
||||
);
|
||||
});
|
||||
|
||||
it('should return undefined when file does not exist', async () => {
|
||||
(fileExists as Mock).mockResolvedValue(false);
|
||||
|
||||
const result = await config.parseConfig();
|
||||
|
||||
expect(result).toBeUndefined();
|
||||
expect(readFile).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should parse valid JSON config', async () => {
|
||||
const validConfig = { name: 'custom', value: 100, enabled: false };
|
||||
(fileExists as Mock).mockResolvedValue(true);
|
||||
(readFile as Mock).mockResolvedValue(JSON.stringify(validConfig));
|
||||
|
||||
const result = await config.parseConfig();
|
||||
|
||||
expect(result).toEqual(validConfig);
|
||||
expect(readFile).toHaveBeenCalledWith(config.filePath, 'utf8');
|
||||
});
|
||||
|
||||
it('should return undefined for empty file', async () => {
|
||||
(fileExists as Mock).mockResolvedValue(true);
|
||||
(readFile as Mock).mockResolvedValue('');
|
||||
|
||||
const result = await config.parseConfig();
|
||||
|
||||
expect(result).toBeUndefined();
|
||||
expect(mockLogger.warn).toHaveBeenCalledWith(expect.stringContaining('is empty'));
|
||||
});
|
||||
|
||||
it('should return undefined for whitespace-only file', async () => {
|
||||
(fileExists as Mock).mockResolvedValue(true);
|
||||
(readFile as Mock).mockResolvedValue(' \n\t ');
|
||||
|
||||
const result = await config.parseConfig();
|
||||
|
||||
expect(result).toBeUndefined();
|
||||
expect(mockLogger.warn).toHaveBeenCalledWith(expect.stringContaining('is empty'));
|
||||
});
|
||||
|
||||
it('should throw error for invalid JSON', async () => {
|
||||
(fileExists as Mock).mockResolvedValue(true);
|
||||
(readFile as Mock).mockResolvedValue('{ invalid json }');
|
||||
|
||||
await expect(config.parseConfig()).rejects.toThrow();
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Failed to parse JSON')
|
||||
);
|
||||
expect(mockLogger.debug).toHaveBeenCalledWith(expect.stringContaining('{ invalid json }'));
|
||||
});
|
||||
|
||||
it('should throw error for incomplete JSON', async () => {
|
||||
(fileExists as Mock).mockResolvedValue(true);
|
||||
(readFile as Mock).mockResolvedValue('{ "name": "test"');
|
||||
|
||||
await expect(config.parseConfig()).rejects.toThrow();
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Failed to parse JSON')
|
||||
);
|
||||
});
|
||||
|
||||
it('should use custom file path when provided', async () => {
|
||||
const customPath = '/custom/path/config.json';
|
||||
(fileExists as Mock).mockResolvedValue(true);
|
||||
(readFile as Mock).mockResolvedValue(JSON.stringify(defaultConfig));
|
||||
|
||||
await config.parseConfig({ filePath: customPath });
|
||||
|
||||
expect(fileExists).toHaveBeenCalledWith(customPath);
|
||||
expect(readFile).toHaveBeenCalledWith(customPath, 'utf8');
|
||||
});
|
||||
});
|
||||
|
||||
describe('persist', () => {
|
||||
let config: ApiStateConfig<TestConfig>;
|
||||
|
||||
beforeEach(() => {
|
||||
config = new ApiStateConfig(
|
||||
{
|
||||
name: 'test-config',
|
||||
defaultConfig,
|
||||
parse: parseFunction,
|
||||
},
|
||||
mockPersistenceHelper
|
||||
);
|
||||
});
|
||||
|
||||
it('should persist current config when no argument provided', async () => {
|
||||
const result = await config.persist();
|
||||
|
||||
expect(result).toBe(true);
|
||||
expect(mockPersistenceHelper.persistIfChanged).toHaveBeenCalledWith(
|
||||
config.filePath,
|
||||
defaultConfig
|
||||
);
|
||||
});
|
||||
|
||||
it('should persist provided config', async () => {
|
||||
const customConfig = { name: 'custom', value: 999, enabled: false };
|
||||
|
||||
const result = await config.persist(customConfig);
|
||||
|
||||
expect(result).toBe(true);
|
||||
expect(mockPersistenceHelper.persistIfChanged).toHaveBeenCalledWith(
|
||||
config.filePath,
|
||||
customConfig
|
||||
);
|
||||
});
|
||||
|
||||
it('should return false and log error on persistence failure', async () => {
|
||||
(mockPersistenceHelper.persistIfChanged as Mock).mockResolvedValue(false);
|
||||
|
||||
const result = await config.persist();
|
||||
|
||||
expect(result).toBe(false);
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Could not write config')
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('load', () => {
|
||||
let config: ApiStateConfig<TestConfig>;
|
||||
|
||||
beforeEach(() => {
|
||||
config = new ApiStateConfig(
|
||||
{
|
||||
name: 'test-config',
|
||||
defaultConfig,
|
||||
parse: parseFunction,
|
||||
},
|
||||
mockPersistenceHelper
|
||||
);
|
||||
});
|
||||
|
||||
it('should load config from file when it exists', async () => {
|
||||
const savedConfig = { name: 'saved', value: 200, enabled: true };
|
||||
(fileExists as Mock).mockResolvedValue(true);
|
||||
(readFile as Mock).mockResolvedValue(JSON.stringify(savedConfig));
|
||||
|
||||
await config.load();
|
||||
|
||||
expect(config.config).toEqual(savedConfig);
|
||||
});
|
||||
|
||||
it('should create default config when file does not exist', async () => {
|
||||
(fileExists as Mock).mockResolvedValue(false);
|
||||
|
||||
await config.load();
|
||||
|
||||
expect(config.config).toEqual(defaultConfig);
|
||||
expect(mockLogger.log).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Config file does not exist')
|
||||
);
|
||||
expect(mockPersistenceHelper.persistIfChanged).toHaveBeenCalledWith(
|
||||
config.filePath,
|
||||
defaultConfig
|
||||
);
|
||||
});
|
||||
|
||||
it('should not modify config when file is invalid', async () => {
|
||||
(fileExists as Mock).mockResolvedValue(true);
|
||||
(readFile as Mock).mockResolvedValue('invalid json');
|
||||
|
||||
await config.load();
|
||||
|
||||
expect(config.config).toEqual(defaultConfig);
|
||||
expect(mockLogger.warn).toHaveBeenCalledWith(
|
||||
expect.any(Error),
|
||||
expect.stringContaining('is invalid')
|
||||
);
|
||||
});
|
||||
|
||||
it('should not throw even when persist fails', async () => {
|
||||
(fileExists as Mock).mockResolvedValue(false);
|
||||
(mockPersistenceHelper.persistIfChanged as Mock).mockResolvedValue(false);
|
||||
|
||||
await expect(config.load()).resolves.not.toThrow();
|
||||
|
||||
expect(config.config).toEqual(defaultConfig);
|
||||
});
|
||||
});
|
||||
|
||||
describe('update', () => {
|
||||
let config: ApiStateConfig<TestConfig>;
|
||||
|
||||
beforeEach(() => {
|
||||
config = new ApiStateConfig(
|
||||
{
|
||||
name: 'test-config',
|
||||
defaultConfig,
|
||||
parse: parseFunction,
|
||||
},
|
||||
mockPersistenceHelper
|
||||
);
|
||||
});
|
||||
|
||||
it('should update config with partial values', () => {
|
||||
config.update({ value: 123 });
|
||||
|
||||
expect(config.config).toEqual({
|
||||
name: 'test',
|
||||
value: 123,
|
||||
enabled: true,
|
||||
});
|
||||
});
|
||||
|
||||
it('should return self for chaining', () => {
|
||||
const result = config.update({ enabled: false });
|
||||
|
||||
expect(result).toBe(config);
|
||||
});
|
||||
|
||||
it('should validate updated config through parse function', () => {
|
||||
const badParseFunction = vi.fn().mockImplementation(() => {
|
||||
throw new Error('Validation failed');
|
||||
});
|
||||
|
||||
const strictConfig = new ApiStateConfig(
|
||||
{
|
||||
name: 'strict-config',
|
||||
defaultConfig,
|
||||
parse: badParseFunction,
|
||||
},
|
||||
mockPersistenceHelper
|
||||
);
|
||||
|
||||
expect(() => strictConfig.update({ value: -1 })).toThrow('Validation failed');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,122 +0,0 @@
|
||||
import { Logger } from '@nestjs/common';
|
||||
import { readFile } from 'node:fs/promises';
|
||||
import { join } from 'path';
|
||||
|
||||
import { fileExists } from '@app/core/utils/files/file-exists.js';
|
||||
import { PATHS_CONFIG_MODULES } from '@app/environment.js';
|
||||
import { makeConfigToken } from '@app/unraid-api/config/factory/config.injection.js';
|
||||
import { ConfigPersistenceHelper } from '@app/unraid-api/config/persistence.helper.js';
|
||||
|
||||
export interface ApiStateConfigOptions<T> {
|
||||
/**
|
||||
* The name of the config.
|
||||
*
|
||||
* - Must be unique.
|
||||
* - Should be the key representing this config in the `ConfigFeatures` interface.
|
||||
* - Used for logging and dependency injection.
|
||||
*/
|
||||
name: string;
|
||||
defaultConfig: T;
|
||||
parse: (data: unknown) => T;
|
||||
}
|
||||
|
||||
export class ApiStateConfig<T> {
|
||||
#config: T;
|
||||
private logger: Logger;
|
||||
|
||||
constructor(
|
||||
readonly options: ApiStateConfigOptions<T>,
|
||||
readonly persistenceHelper: ConfigPersistenceHelper
|
||||
) {
|
||||
// avoid sharing a reference with the given default config. This allows us to re-use it.
|
||||
this.#config = structuredClone(options.defaultConfig);
|
||||
this.logger = new Logger(this.token);
|
||||
}
|
||||
|
||||
/** Unique token for this config. Used for Dependency Injection & logging. */
|
||||
get token() {
|
||||
return makeConfigToken(this.options.name);
|
||||
}
|
||||
|
||||
get fileName() {
|
||||
return `${this.options.name}.json`;
|
||||
}
|
||||
|
||||
get filePath() {
|
||||
return join(PATHS_CONFIG_MODULES, this.fileName);
|
||||
}
|
||||
|
||||
get config() {
|
||||
return this.#config;
|
||||
}
|
||||
|
||||
/**
|
||||
* Persists the config to the file system. Will never throw.
|
||||
* @param config - The config to persist.
|
||||
* @returns True if the config was written successfully, false otherwise.
|
||||
*/
|
||||
async persist(config = this.#config) {
|
||||
const success = await this.persistenceHelper.persistIfChanged(this.filePath, config);
|
||||
if (!success) {
|
||||
this.logger.error(`Could not write config to ${this.filePath}.`);
|
||||
}
|
||||
return success;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads the config from a path (defaults to the default file path of the config).
|
||||
* @param opts - The options for the read operation.
|
||||
* @param opts.filePath - The path to the config file.
|
||||
* @returns The parsed config or undefined if the file does not exist.
|
||||
* @throws If the file exists but is invalid.
|
||||
*/
|
||||
async parseConfig(opts: { filePath?: string } = {}): Promise<T | undefined> {
|
||||
const { filePath = this.filePath } = opts;
|
||||
if (!(await fileExists(filePath))) return undefined;
|
||||
|
||||
const fileContent = await readFile(filePath, 'utf8');
|
||||
|
||||
if (!fileContent || fileContent.trim() === '') {
|
||||
this.logger.warn(`Config file '${filePath}' is empty.`);
|
||||
return undefined;
|
||||
}
|
||||
|
||||
try {
|
||||
const rawConfig = JSON.parse(fileContent);
|
||||
return this.options.parse(rawConfig);
|
||||
} catch (error) {
|
||||
this.logger.error(
|
||||
`Failed to parse JSON from '${filePath}': ${error instanceof Error ? error.message : String(error)}`
|
||||
);
|
||||
this.logger.debug(`File content: ${fileContent.substring(0, 100)}...`);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads config from the file system. If the file does not exist, it will be created with the default config.
|
||||
* If the config is invalid or corrupt, no action will be taken. The error will be logged.
|
||||
*
|
||||
* Will never throw.
|
||||
*/
|
||||
async load() {
|
||||
try {
|
||||
const config = await this.parseConfig();
|
||||
if (config) {
|
||||
this.#config = config;
|
||||
} else {
|
||||
this.logger.log(`Config file does not exist. Writing default config.`);
|
||||
this.#config = this.options.defaultConfig;
|
||||
await this.persist();
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.warn(error, `Config file '${this.filePath}' is invalid. Not modifying config.`);
|
||||
}
|
||||
}
|
||||
|
||||
update(config: Partial<T>) {
|
||||
const proposedConfig = this.options.parse({ ...this.#config, ...config });
|
||||
this.#config = proposedConfig;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
@@ -1,54 +0,0 @@
|
||||
import type { DynamicModule, Provider } from '@nestjs/common';
|
||||
import { SchedulerRegistry } from '@nestjs/schedule';
|
||||
|
||||
import type { ApiStateConfigOptions } from '@app/unraid-api/config/factory/api-state.model.js';
|
||||
import type { ApiStateConfigPersistenceOptions } from '@app/unraid-api/config/factory/api-state.service.js';
|
||||
import { ApiStateConfig } from '@app/unraid-api/config/factory/api-state.model.js';
|
||||
import { ScheduledConfigPersistence } from '@app/unraid-api/config/factory/api-state.service.js';
|
||||
import { makeConfigToken } from '@app/unraid-api/config/factory/config.injection.js';
|
||||
import { ConfigPersistenceHelper } from '@app/unraid-api/config/persistence.helper.js';
|
||||
|
||||
type ApiStateRegisterOptions<ConfigType> = ApiStateConfigOptions<ConfigType> & {
|
||||
persistence?: ApiStateConfigPersistenceOptions;
|
||||
};
|
||||
|
||||
export class ApiStateConfigModule {
|
||||
static async register<ConfigType>(
|
||||
options: ApiStateRegisterOptions<ConfigType>
|
||||
): Promise<DynamicModule> {
|
||||
const { persistence, ...configOptions } = options;
|
||||
const configToken = makeConfigToken(options.name);
|
||||
const persistenceToken = makeConfigToken(options.name, ScheduledConfigPersistence.name);
|
||||
const ConfigProvider = {
|
||||
provide: configToken,
|
||||
useFactory: async (helper: ConfigPersistenceHelper) => {
|
||||
const config = new ApiStateConfig(configOptions, helper);
|
||||
await config.load();
|
||||
return config;
|
||||
},
|
||||
inject: [ConfigPersistenceHelper],
|
||||
};
|
||||
|
||||
const providers: Provider[] = [ConfigProvider, ConfigPersistenceHelper];
|
||||
const exports = [configToken];
|
||||
if (persistence) {
|
||||
providers.push({
|
||||
provide: persistenceToken,
|
||||
useFactory: (
|
||||
schedulerRegistry: SchedulerRegistry,
|
||||
config: ApiStateConfig<ConfigType>
|
||||
) => {
|
||||
return new ScheduledConfigPersistence(schedulerRegistry, config, persistence);
|
||||
},
|
||||
inject: [SchedulerRegistry, configToken],
|
||||
});
|
||||
exports.push(persistenceToken);
|
||||
}
|
||||
|
||||
return {
|
||||
module: ApiStateConfigModule,
|
||||
providers,
|
||||
exports,
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -1,82 +0,0 @@
|
||||
import type { OnModuleDestroy, OnModuleInit } from '@nestjs/common';
|
||||
import { Logger } from '@nestjs/common';
|
||||
import { SchedulerRegistry } from '@nestjs/schedule';
|
||||
|
||||
import type { ApiStateConfig } from '@app/unraid-api/config/factory/api-state.model.js';
|
||||
import { makeConfigToken } from '@app/unraid-api/config/factory/config.injection.js';
|
||||
|
||||
export interface ApiStateConfigPersistenceOptions {
|
||||
/** How often to persist the config to the file system, in milliseconds. Defaults to 10 seconds. */
|
||||
intervalMs?: number;
|
||||
/** How many consecutive failed persistence attempts to tolerate before stopping. Defaults to 5. */
|
||||
maxConsecutiveFailures?: number;
|
||||
/** By default, the config will be persisted to the file system when the module is initialized and destroyed.
|
||||
* Set this to true to disable this behavior.
|
||||
*/
|
||||
disableLifecycleHooks?: boolean;
|
||||
}
|
||||
|
||||
export class ScheduledConfigPersistence<T> implements OnModuleInit, OnModuleDestroy {
|
||||
private consecutiveFailures = 0;
|
||||
private logger: Logger;
|
||||
|
||||
constructor(
|
||||
private readonly schedulerRegistry: SchedulerRegistry,
|
||||
private readonly config: ApiStateConfig<T>,
|
||||
private readonly options: ApiStateConfigPersistenceOptions
|
||||
) {
|
||||
this.logger = new Logger(this.token);
|
||||
}
|
||||
|
||||
get token() {
|
||||
return makeConfigToken(this.configName, ScheduledConfigPersistence.name);
|
||||
}
|
||||
|
||||
get configName() {
|
||||
return this.config.options.name;
|
||||
}
|
||||
|
||||
onModuleInit() {
|
||||
if (this.options.disableLifecycleHooks) return;
|
||||
this.setup();
|
||||
}
|
||||
|
||||
async onModuleDestroy() {
|
||||
if (this.options.disableLifecycleHooks) return;
|
||||
this.stop();
|
||||
await this.config.persist();
|
||||
}
|
||||
|
||||
stop() {
|
||||
if (this.schedulerRegistry.getInterval(this.token)) {
|
||||
this.schedulerRegistry.deleteInterval(this.token);
|
||||
}
|
||||
}
|
||||
|
||||
setup() {
|
||||
const interval = this.schedulerRegistry.getInterval(this.token);
|
||||
if (interval) {
|
||||
this.logger.warn(`Persistence interval for '${this.token}' already exists. Aborting setup.`);
|
||||
return;
|
||||
}
|
||||
const ONE_MINUTE = 60_000;
|
||||
const { intervalMs = ONE_MINUTE, maxConsecutiveFailures = 3 } = this.options;
|
||||
|
||||
const callback = async () => {
|
||||
const success = await this.config.persist();
|
||||
if (success) {
|
||||
this.consecutiveFailures = 0;
|
||||
return;
|
||||
}
|
||||
this.consecutiveFailures++;
|
||||
if (this.consecutiveFailures > maxConsecutiveFailures) {
|
||||
this.logger.warn(
|
||||
`Failed to persist '${this.configName}' too many times in a row (${this.consecutiveFailures} attempts). Disabling persistence.`
|
||||
);
|
||||
this.schedulerRegistry.deleteInterval(this.token);
|
||||
}
|
||||
};
|
||||
|
||||
this.schedulerRegistry.addInterval(this.token, setInterval(callback, intervalMs));
|
||||
}
|
||||
}
|
||||
@@ -1,22 +0,0 @@
|
||||
import { Inject } from '@nestjs/common';
|
||||
|
||||
import type { ConfigFeatures } from '@app/unraid-api/config/factory/config.interface.js';
|
||||
|
||||
/**
|
||||
* Creates a string token representation of the arguements. Pure function.
|
||||
*
|
||||
* @param configName - The name of the config.
|
||||
* @returns A colon-separated string
|
||||
*/
|
||||
export function makeConfigToken(configName: string, ...details: string[]) {
|
||||
return ['ApiConfig', configName, ...details].join('.');
|
||||
}
|
||||
|
||||
/**
|
||||
* Custom decorator to inject a config by name.
|
||||
* @param feature - The name of the config to inject.
|
||||
* @returns Dependency injector for the config.
|
||||
*/
|
||||
export function InjectConfig<K extends keyof ConfigFeatures>(feature: K) {
|
||||
return Inject(makeConfigToken(feature));
|
||||
}
|
||||
@@ -1,15 +0,0 @@
|
||||
/**
|
||||
* Container record of config names to their types. Used for type completion on registered configs.
|
||||
* Config authors should redeclare/merge this interface with their config names as the keys
|
||||
* and implementation models as the types.
|
||||
*/
|
||||
export interface ConfigFeatures {}
|
||||
|
||||
export interface ConfigMetadata<T = unknown> {
|
||||
/** Unique token for this config. Used for Dependency Injection, logging, etc. */
|
||||
token: string;
|
||||
/** The path to the config file. */
|
||||
filePath?: string;
|
||||
/** Validates a config of type `T`. */
|
||||
validate: (config: unknown) => Promise<T>;
|
||||
}
|
||||
@@ -1,70 +0,0 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { readFile, writeFile } from 'fs/promises';
|
||||
|
||||
import { fileExists } from '@unraid/shared/util/file.js';
|
||||
import { isEqual } from 'lodash-es';
|
||||
|
||||
@Injectable()
|
||||
export class ConfigPersistenceHelper {
|
||||
/**
|
||||
* Persist the config to disk if the given data is different from the data on-disk.
|
||||
* This helps preserve the boot flash drive's life by avoiding unnecessary writes.
|
||||
*
|
||||
* @param filePath - The path to the config file.
|
||||
* @param data - The data to persist.
|
||||
* @returns `true` if the config was persisted, `false` if no changes were needed or if persistence failed.
|
||||
*
|
||||
* This method is designed to never throw errors. If the existing file is corrupted or unreadable,
|
||||
* it will attempt to overwrite it with the new data. If write operations fail, it returns false
|
||||
* but does not crash the application.
|
||||
*/
|
||||
async persistIfChanged(filePath: string, data: unknown): Promise<boolean> {
|
||||
if (!(await fileExists(filePath))) {
|
||||
try {
|
||||
const jsonString = JSON.stringify(data ?? {}, null, 2);
|
||||
await writeFile(filePath, jsonString);
|
||||
return true;
|
||||
} catch (error) {
|
||||
// JSON serialization or write failed, but don't crash - just return false
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
let currentData: unknown;
|
||||
try {
|
||||
const fileContent = await readFile(filePath, 'utf8');
|
||||
currentData = JSON.parse(fileContent);
|
||||
} catch (error) {
|
||||
// If existing file is corrupted, treat it as if it doesn't exist
|
||||
// and write the new data
|
||||
try {
|
||||
const jsonString = JSON.stringify(data ?? {}, null, 2);
|
||||
await writeFile(filePath, jsonString);
|
||||
return true;
|
||||
} catch (writeError) {
|
||||
// JSON serialization or write failed, but don't crash - just return false
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
let stagedData: unknown;
|
||||
try {
|
||||
stagedData = JSON.parse(JSON.stringify(data));
|
||||
} catch (error) {
|
||||
// If data can't be serialized to JSON, we can't persist it
|
||||
return false;
|
||||
}
|
||||
|
||||
if (isEqual(currentData, stagedData)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
await writeFile(filePath, JSON.stringify(stagedData, null, 2));
|
||||
return true;
|
||||
} catch (error) {
|
||||
// Write failed, but don't crash - just return false
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { Cron } from '@nestjs/schedule';
|
||||
import { existsSync } from 'fs';
|
||||
import { stat } from 'fs/promises';
|
||||
|
||||
import { execa } from 'execa';
|
||||
|
||||
@@ -8,25 +8,26 @@ import { execa } from 'execa';
|
||||
export class LogRotateService {
|
||||
private readonly logger = new Logger(LogRotateService.name);
|
||||
|
||||
logRotatePath: string = '/usr/sbin/logrotate';
|
||||
configPath: string = '/etc/logrotate.conf';
|
||||
@Cron('0 * * * *')
|
||||
private readonly logFilePath = '/var/log/graphql-api.log';
|
||||
private readonly maxSizeBytes = 5 * 1024 * 1024; // 5MB
|
||||
|
||||
@Cron('*/20 * * * *') // Every 20 minutes
|
||||
async handleCron() {
|
||||
try {
|
||||
if (!existsSync(this.logRotatePath)) {
|
||||
throw new Error(`Logrotate binary not found at ${this.logRotatePath}`);
|
||||
const stats = await stat(this.logFilePath);
|
||||
if (stats.size > this.maxSizeBytes) {
|
||||
this.logger.debug(`Log file size (${stats.size} bytes) exceeds limit, truncating`);
|
||||
await execa('truncate', ['-s', '0', this.logFilePath]);
|
||||
this.logger.debug('Log file truncated successfully');
|
||||
} else {
|
||||
this.logger.debug(`Log file size (${stats.size} bytes) within limit`);
|
||||
}
|
||||
if (!existsSync(this.configPath)) {
|
||||
throw new Error(`Logrotate config not found at ${this.configPath}`);
|
||||
}
|
||||
this.logger.debug('Running logrotate');
|
||||
const result = await execa(this.logRotatePath, [this.configPath]);
|
||||
if (result.failed) {
|
||||
throw new Error(`Logrotate execution failed: ${result.stderr}`);
|
||||
}
|
||||
this.logger.debug('Logrotate completed successfully');
|
||||
} catch (error) {
|
||||
this.logger.debug('Failed to run logrotate with error' + error);
|
||||
if (error instanceof Error && 'code' in error && error.code === 'ENOENT') {
|
||||
this.logger.debug('Log file does not exist, skipping truncation');
|
||||
} else {
|
||||
this.logger.debug('Failed to check/truncate log file: ' + error);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { existsSync } from 'node:fs';
|
||||
import { readFile } from 'node:fs/promises';
|
||||
import { join } from 'node:path';
|
||||
|
||||
import { type DynamixConfig } from '@app/core/types/ini.js';
|
||||
import { toBoolean } from '@app/core/utils/casting.js';
|
||||
import { fileExists } from '@app/core/utils/files/file-exists.js';
|
||||
import { loadState } from '@app/core/utils/misc/load-state.js';
|
||||
import { getters } from '@app/store/index.js';
|
||||
import { ThemeName } from '@app/unraid-api/graph/resolvers/customization/theme.model.js';
|
||||
@@ -80,7 +80,7 @@ export class DisplayService {
|
||||
|
||||
// If the config file doesn't exist then it's a new OS install
|
||||
// Default to "default"
|
||||
if (!existsSync(configFilePath)) {
|
||||
if (!(await fileExists(configFilePath))) {
|
||||
return states.default;
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,228 @@
|
||||
// Unit Test File for NotificationsService: loadNotificationFile
|
||||
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { NotificationIni } from '@app/core/types/states/notification.js';
|
||||
import {
|
||||
Notification,
|
||||
NotificationImportance,
|
||||
NotificationType,
|
||||
} from '@app/unraid-api/graph/resolvers/notifications/notifications.model.js';
|
||||
import { NotificationsService } from '@app/unraid-api/graph/resolvers/notifications/notifications.service.js';
|
||||
|
||||
// Only mock getters.dynamix and Logger
|
||||
vi.mock('@app/store/index.js', () => ({
|
||||
getters: {
|
||||
dynamix: vi.fn().mockReturnValue({
|
||||
notify: { path: '/test/notifications' },
|
||||
display: {
|
||||
date: 'Y-m-d',
|
||||
time: 'H:i:s',
|
||||
},
|
||||
}),
|
||||
},
|
||||
}));
|
||||
|
||||
vi.mock('@nestjs/common', async (importOriginal) => {
|
||||
const original = await importOriginal<typeof import('@nestjs/common')>();
|
||||
return {
|
||||
...original,
|
||||
Logger: vi.fn(() => ({
|
||||
log: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
verbose: vi.fn(),
|
||||
})),
|
||||
};
|
||||
});
|
||||
|
||||
describe('NotificationsService - loadNotificationFile (minimal mocks)', () => {
|
||||
let service: NotificationsService;
|
||||
|
||||
beforeEach(() => {
|
||||
service = new NotificationsService();
|
||||
});
|
||||
|
||||
it('should load and validate a valid notification file', async () => {
|
||||
const mockNotificationIni: NotificationIni = {
|
||||
timestamp: '1609459200',
|
||||
event: 'Test Event',
|
||||
subject: 'Test Subject',
|
||||
description: 'Test Description',
|
||||
importance: 'alert',
|
||||
link: 'http://example.com',
|
||||
};
|
||||
|
||||
vi.spyOn(await import('@app/core/utils/misc/parse-config.js'), 'parseConfig').mockReturnValue(
|
||||
mockNotificationIni
|
||||
);
|
||||
|
||||
const result = await (service as any).loadNotificationFile(
|
||||
'/test/path/test.notify',
|
||||
NotificationType.UNREAD
|
||||
);
|
||||
expect(result).toEqual(
|
||||
expect.objectContaining({
|
||||
id: 'test.notify',
|
||||
type: NotificationType.UNREAD,
|
||||
title: 'Test Event',
|
||||
subject: 'Test Subject',
|
||||
description: 'Test Description',
|
||||
importance: NotificationImportance.ALERT,
|
||||
link: 'http://example.com',
|
||||
timestamp: '2021-01-01T00:00:00.000Z',
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it('should return masked warning notification on validation error (missing required fields)', async () => {
|
||||
const invalidNotificationIni: Omit<NotificationIni, 'event'> = {
|
||||
timestamp: '1609459200',
|
||||
// event: 'Missing Event', // missing required field
|
||||
subject: 'Test Subject',
|
||||
description: 'Test Description',
|
||||
importance: 'alert',
|
||||
};
|
||||
|
||||
vi.spyOn(await import('@app/core/utils/misc/parse-config.js'), 'parseConfig').mockReturnValue(
|
||||
invalidNotificationIni
|
||||
);
|
||||
|
||||
const result = await (service as any).loadNotificationFile(
|
||||
'/test/path/invalid.notify',
|
||||
NotificationType.UNREAD
|
||||
);
|
||||
expect(result.id).toBe('invalid.notify');
|
||||
expect(result.importance).toBe(NotificationImportance.WARNING);
|
||||
expect(result.description).toContain('invalid and cannot be displayed');
|
||||
});
|
||||
|
||||
it('should handle invalid enum values', async () => {
|
||||
const invalidNotificationIni: NotificationIni = {
|
||||
timestamp: '1609459200',
|
||||
event: 'Test Event',
|
||||
subject: 'Test Subject',
|
||||
description: 'Test Description',
|
||||
importance: 'not-a-valid-enum' as any,
|
||||
};
|
||||
|
||||
vi.spyOn(await import('@app/core/utils/misc/parse-config.js'), 'parseConfig').mockReturnValue(
|
||||
invalidNotificationIni
|
||||
);
|
||||
|
||||
const result = await (service as any).loadNotificationFile(
|
||||
'/test/path/invalid-enum.notify',
|
||||
NotificationType.UNREAD
|
||||
);
|
||||
expect(result.id).toBe('invalid-enum.notify');
|
||||
// Implementation falls back to INFO for unknown importance
|
||||
expect(result.importance).toBe(NotificationImportance.INFO);
|
||||
// Should not be a masked warning notification, just fallback to INFO
|
||||
expect(result.description).toBe('Test Description');
|
||||
});
|
||||
|
||||
it('should handle missing description field (should return masked warning notification)', async () => {
|
||||
const mockNotificationIni: Omit<NotificationIni, 'description'> = {
|
||||
timestamp: '1609459200',
|
||||
event: 'Test Event',
|
||||
subject: 'Test Subject',
|
||||
importance: 'normal',
|
||||
};
|
||||
|
||||
vi.spyOn(await import('@app/core/utils/misc/parse-config.js'), 'parseConfig').mockReturnValue(
|
||||
mockNotificationIni
|
||||
);
|
||||
|
||||
const result = await (service as any).loadNotificationFile(
|
||||
'/test/path/test.notify',
|
||||
NotificationType.UNREAD
|
||||
);
|
||||
// Should be a masked warning notification
|
||||
expect(result.description).toContain('invalid and cannot be displayed');
|
||||
expect(result.importance).toBe(NotificationImportance.WARNING);
|
||||
});
|
||||
|
||||
it('should preserve passthrough data from notification file (only known fields)', async () => {
|
||||
const mockNotificationIni: NotificationIni & { customField: string } = {
|
||||
timestamp: '1609459200',
|
||||
event: 'Test Event',
|
||||
subject: 'Test Subject',
|
||||
description: 'Test Description',
|
||||
importance: 'normal',
|
||||
link: 'http://example.com',
|
||||
customField: 'custom value',
|
||||
};
|
||||
|
||||
vi.spyOn(await import('@app/core/utils/misc/parse-config.js'), 'parseConfig').mockReturnValue(
|
||||
mockNotificationIni
|
||||
);
|
||||
|
||||
const result = await (service as any).loadNotificationFile(
|
||||
'/test/path/test.notify',
|
||||
NotificationType.UNREAD
|
||||
);
|
||||
expect(result).toEqual(
|
||||
expect.objectContaining({
|
||||
link: 'http://example.com',
|
||||
// customField should NOT be present
|
||||
description: 'Test Description',
|
||||
id: 'test.notify',
|
||||
type: NotificationType.UNREAD,
|
||||
title: 'Test Event',
|
||||
subject: 'Test Subject',
|
||||
importance: NotificationImportance.INFO,
|
||||
timestamp: '2021-01-01T00:00:00.000Z',
|
||||
})
|
||||
);
|
||||
expect((result as any).customField).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should handle missing timestamp field gracefully', async () => {
|
||||
const mockNotificationIni: Omit<NotificationIni, 'timestamp'> = {
|
||||
// timestamp is missing
|
||||
event: 'Test Event',
|
||||
subject: 'Test Subject',
|
||||
description: 'Test Description',
|
||||
importance: 'alert',
|
||||
};
|
||||
|
||||
vi.spyOn(await import('@app/core/utils/misc/parse-config.js'), 'parseConfig').mockReturnValue(
|
||||
mockNotificationIni
|
||||
);
|
||||
|
||||
const result = await (service as any).loadNotificationFile(
|
||||
'/test/path/missing-timestamp.notify',
|
||||
NotificationType.UNREAD
|
||||
);
|
||||
expect(result.id).toBe('missing-timestamp.notify');
|
||||
expect(result.importance).toBe(NotificationImportance.ALERT);
|
||||
expect(result.description).toBe('Test Description');
|
||||
expect(result.timestamp).toBeUndefined(); // Missing timestamp results in undefined
|
||||
expect(result.formattedTimestamp).toBe(undefined); // Also undefined since timestamp is missing
|
||||
});
|
||||
|
||||
it('should handle malformed timestamp field gracefully', async () => {
|
||||
const mockNotificationIni: NotificationIni = {
|
||||
timestamp: 'not-a-timestamp',
|
||||
event: 'Test Event',
|
||||
subject: 'Test Subject',
|
||||
description: 'Test Description',
|
||||
importance: 'alert',
|
||||
};
|
||||
|
||||
vi.spyOn(await import('@app/core/utils/misc/parse-config.js'), 'parseConfig').mockReturnValue(
|
||||
mockNotificationIni
|
||||
);
|
||||
|
||||
const result = await (service as any).loadNotificationFile(
|
||||
'/test/path/malformed-timestamp.notify',
|
||||
NotificationType.UNREAD
|
||||
);
|
||||
expect(result.id).toBe('malformed-timestamp.notify');
|
||||
expect(result.importance).toBe(NotificationImportance.ALERT);
|
||||
expect(result.description).toBe('Test Description');
|
||||
expect(result.timestamp).toBeUndefined(); // Malformed timestamp results in undefined
|
||||
expect(result.formattedTimestamp).toBe('not-a-timestamp'); // Returns original string when parsing fails
|
||||
});
|
||||
});
|
||||
@@ -1,3 +1,11 @@
|
||||
// Integration Test File for NotificationsService
|
||||
// ------------------------------------------------
|
||||
// This file contains integration-style tests for the NotificationsService.
|
||||
// It uses the full NestJS TestingModule, mocks only the minimum required dependencies,
|
||||
// and interacts with the real filesystem (in /tmp/test/notifications).
|
||||
// These tests cover end-to-end service behavior, including notification creation,
|
||||
// archiving, unarchiving, deletion, and legacy CLI compatibility.
|
||||
|
||||
import type { TestingModule } from '@nestjs/testing';
|
||||
import { Test } from '@nestjs/testing';
|
||||
import { existsSync } from 'fs';
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { statSync } from 'fs';
|
||||
import { readdir, rename, unlink, writeFile } from 'fs/promises';
|
||||
import { readdir, rename, stat, unlink, writeFile } from 'fs/promises';
|
||||
import { basename, join } from 'path';
|
||||
|
||||
import type { Stats } from 'fs';
|
||||
import { FSWatcher, watch } from 'chokidar';
|
||||
import { ValidationError } from 'class-validator';
|
||||
import { execa } from 'execa';
|
||||
import { emptyDir } from 'fs-extra';
|
||||
import { encode as encodeIni } from 'ini';
|
||||
@@ -581,12 +581,15 @@ export class NotificationsService {
|
||||
sortFn: SortFn<Stats> = (fileA, fileB) => fileB.birthtimeMs - fileA.birthtimeMs // latest first
|
||||
): Promise<string[]> {
|
||||
const contents = narrowContent(await readdir(folderPath));
|
||||
return contents
|
||||
.map((content) => {
|
||||
const contentStats = await Promise.all(
|
||||
contents.map(async (content) => {
|
||||
// pre-map each file's stats to avoid excess calls during sorting
|
||||
const path = join(folderPath, content);
|
||||
return { path, stats: statSync(path) };
|
||||
const stats = await stat(path);
|
||||
return { path, stats };
|
||||
})
|
||||
);
|
||||
return contentStats
|
||||
.sort((fileA, fileB) => sortFn(fileA.stats, fileB.stats))
|
||||
.map(({ path }) => path);
|
||||
}
|
||||
@@ -635,10 +638,14 @@ export class NotificationsService {
|
||||
* Loads a notification file from disk, parses it to a Notification object, and
|
||||
* validates the object against the NotificationSchema.
|
||||
*
|
||||
* If the file contains invalid data (doesn't conform to the Notification schema),
|
||||
* instead of throwing, returns a masked warning notification with details masked,
|
||||
* and logs a warning. This allows the system to gracefully handle corrupt or malformed notifications.
|
||||
*
|
||||
* @param path The path to the notification file on disk.
|
||||
* @param type The type of the notification that is being loaded.
|
||||
* @returns A parsed Notification object, or throws an error if the object is invalid.
|
||||
* @throws An error if the object is invalid (doesn't conform to the graphql NotificationSchema).
|
||||
* @returns A parsed Notification object, or a masked warning notification if invalid.
|
||||
* @throws File system errors (file not found, permission issues) or unexpected validation errors.
|
||||
*/
|
||||
private async loadNotificationFile(path: string, type: NotificationType): Promise<Notification> {
|
||||
const notificationFile = parseConfig<NotificationIni>({
|
||||
@@ -656,8 +663,28 @@ export class NotificationsService {
|
||||
// The contents of the file, and therefore the notification, may not always be a valid notification.
|
||||
// so we parse it through the schema to make sure it is
|
||||
|
||||
const validatedNotification = await validateObject(Notification, notification);
|
||||
return validatedNotification;
|
||||
try {
|
||||
const validatedNotification = await validateObject(Notification, notification);
|
||||
return validatedNotification;
|
||||
} catch (error) {
|
||||
if (!(error instanceof ValidationError)) {
|
||||
throw error;
|
||||
}
|
||||
const errorsToLog = error.children?.length ? error.children : error;
|
||||
this.logger.warn(errorsToLog, `notification file at ${path} is invalid. Will mask.`);
|
||||
const nameMask = this.getIdFromPath(path);
|
||||
const dateMask = new Date();
|
||||
return {
|
||||
id: nameMask,
|
||||
type,
|
||||
title: nameMask,
|
||||
subject: nameMask,
|
||||
description: `This notification is invalid and cannot be displayed! For details, see the logs and the notification file at ${path}`,
|
||||
importance: NotificationImportance.WARNING,
|
||||
timestamp: dateMask.toISOString(),
|
||||
formattedTimestamp: this.formatDatetime(dateMask),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private getIdFromPath(path: string) {
|
||||
@@ -729,19 +756,22 @@ export class NotificationsService {
|
||||
}
|
||||
|
||||
private formatTimestamp(timestamp: string) {
|
||||
const { display: settings } = getters.dynamix();
|
||||
const date = this.parseNotificationDateToIsoDate(timestamp);
|
||||
if (!date) {
|
||||
this.logger.warn(`[formatTimestamp] Could not parse date from timestamp: ${date}`);
|
||||
return timestamp;
|
||||
}
|
||||
return this.formatDatetime(date);
|
||||
}
|
||||
|
||||
private formatDatetime(date: Date) {
|
||||
const { display: settings } = getters.dynamix();
|
||||
if (!settings) {
|
||||
this.logger.warn(
|
||||
'[formatTimestamp] Dynamix display settings not found. Cannot apply user settings.'
|
||||
);
|
||||
return timestamp;
|
||||
} else if (!date) {
|
||||
this.logger.warn(`[formatTimestamp] Could not parse date from timestamp: ${date}`);
|
||||
return timestamp;
|
||||
return date.toISOString();
|
||||
}
|
||||
// this.logger.debug(`[formatTimestamp] ${settings.date} :: ${settings.time} :: ${date}`);
|
||||
return formatDatetime(date, {
|
||||
dateFormat: settings.date,
|
||||
timeFormat: settings.time,
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { Injectable, Logger, OnModuleDestroy, OnModuleInit } from '@nestjs/common';
|
||||
import crypto from 'crypto';
|
||||
import { ChildProcess } from 'node:child_process';
|
||||
import { existsSync } from 'node:fs';
|
||||
import { mkdir, rm, writeFile } from 'node:fs/promises';
|
||||
import { dirname, join } from 'node:path';
|
||||
|
||||
@@ -10,6 +9,7 @@ import got, { HTTPError } from 'got';
|
||||
import pRetry from 'p-retry';
|
||||
|
||||
import { sanitizeParams } from '@app/core/log.js';
|
||||
import { fileExists } from '@app/core/utils/files/file-exists.js';
|
||||
import {
|
||||
CreateRCloneRemoteDto,
|
||||
DeleteRCloneRemoteDto,
|
||||
@@ -104,7 +104,7 @@ export class RCloneApiService implements OnModuleInit, OnModuleDestroy {
|
||||
private async startRcloneSocket(socketPath: string, logFilePath: string): Promise<boolean> {
|
||||
try {
|
||||
// Make log file exists
|
||||
if (!existsSync(logFilePath)) {
|
||||
if (!(await fileExists(logFilePath))) {
|
||||
this.logger.debug(`Creating log file: ${logFilePath}`);
|
||||
await mkdir(dirname(logFilePath), { recursive: true });
|
||||
await writeFile(logFilePath, '', 'utf-8');
|
||||
@@ -187,7 +187,7 @@ export class RCloneApiService implements OnModuleInit, OnModuleDestroy {
|
||||
}
|
||||
|
||||
// Clean up the socket file if it exists
|
||||
if (this.rcloneSocketPath && existsSync(this.rcloneSocketPath)) {
|
||||
if (this.rcloneSocketPath && (await fileExists(this.rcloneSocketPath))) {
|
||||
this.logger.log(`Removing RClone socket file: ${this.rcloneSocketPath}`);
|
||||
try {
|
||||
await rm(this.rcloneSocketPath, { force: true });
|
||||
@@ -201,7 +201,7 @@ export class RCloneApiService implements OnModuleInit, OnModuleDestroy {
|
||||
* Checks if the RClone socket exists
|
||||
*/
|
||||
private async checkRcloneSocketExists(socketPath: string): Promise<boolean> {
|
||||
const socketExists = existsSync(socketPath);
|
||||
const socketExists = await fileExists(socketPath);
|
||||
if (!socketExists) {
|
||||
this.logger.warn(`RClone socket does not exist at: ${socketPath}`);
|
||||
return false;
|
||||
|
||||
@@ -10,6 +10,7 @@ export class NginxService {
|
||||
async reload() {
|
||||
try {
|
||||
await execa('/etc/rc.d/rc.nginx', ['reload']);
|
||||
this.logger.log('Nginx reloaded');
|
||||
return true;
|
||||
} catch (err: unknown) {
|
||||
this.logger.warn('Failed to reload Nginx with error: ', err);
|
||||
|
||||
@@ -4,13 +4,14 @@ import { ConfigService } from '@nestjs/config';
|
||||
import { ApiConfig } from '@unraid/shared/services/api-config.js';
|
||||
|
||||
import { DependencyService } from '@app/unraid-api/app/dependency.service.js';
|
||||
import { persistApiConfig } from '@app/unraid-api/config/api-config.module.js';
|
||||
import { ApiConfigPersistence } from '@app/unraid-api/config/api-config.module.js';
|
||||
|
||||
@Injectable()
|
||||
export class PluginManagementService {
|
||||
constructor(
|
||||
private readonly configService: ConfigService<{ api: ApiConfig }, true>,
|
||||
private readonly dependencyService: DependencyService
|
||||
private readonly dependencyService: DependencyService,
|
||||
private readonly apiConfigPersistence: ApiConfigPersistence
|
||||
) {}
|
||||
|
||||
get plugins() {
|
||||
@@ -111,6 +112,6 @@ export class PluginManagementService {
|
||||
}
|
||||
|
||||
private async persistConfig() {
|
||||
return await persistApiConfig(this.configService.get('api', { infer: true }));
|
||||
return await this.apiConfigPersistence.persist();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { DynamicModule, Logger, Module } from '@nestjs/common';
|
||||
|
||||
import { DependencyService } from '@app/unraid-api/app/dependency.service.js';
|
||||
import { ApiConfigModule } from '@app/unraid-api/config/api-config.module.js';
|
||||
import { ResolversModule } from '@app/unraid-api/graph/resolvers/resolvers.module.js';
|
||||
import { GlobalDepsModule } from '@app/unraid-api/plugin/global-deps.module.js';
|
||||
import { PluginManagementService } from '@app/unraid-api/plugin/plugin-management.service.js';
|
||||
@@ -22,7 +23,7 @@ export class PluginModule {
|
||||
|
||||
return {
|
||||
module: PluginModule,
|
||||
imports: [GlobalDepsModule, ResolversModule, ...apiModules],
|
||||
imports: [GlobalDepsModule, ResolversModule, ApiConfigModule, ...apiModules],
|
||||
providers: [PluginService, PluginManagementService, DependencyService, PluginResolver],
|
||||
exports: [PluginService, PluginManagementService, DependencyService, GlobalDepsModule],
|
||||
};
|
||||
@@ -44,7 +45,7 @@ export class PluginCliModule {
|
||||
|
||||
return {
|
||||
module: PluginCliModule,
|
||||
imports: [GlobalDepsModule, ...cliModules],
|
||||
imports: [GlobalDepsModule, ApiConfigModule, ...cliModules],
|
||||
providers: [PluginManagementService, DependencyService],
|
||||
exports: [PluginManagementService, DependencyService, GlobalDepsModule],
|
||||
};
|
||||
|
||||
@@ -1,14 +1,18 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
|
||||
import { ONE_SECOND_MS } from '@app/consts.js';
|
||||
import { NginxService } from '@app/unraid-api/nginx/nginx.service.js';
|
||||
import { ModificationEffect } from '@app/unraid-api/unraid-file-modifier/file-modification.js';
|
||||
|
||||
@Injectable()
|
||||
export class FileModificationEffectService {
|
||||
private readonly logger = new Logger(FileModificationEffectService.name);
|
||||
constructor(private readonly nginxService: NginxService) {}
|
||||
async runEffect(effect: ModificationEffect): Promise<void> {
|
||||
switch (effect) {
|
||||
case 'nginx:reload':
|
||||
this.logger.log('Reloading Nginx in 10 seconds...');
|
||||
await new Promise((resolve) => setTimeout(resolve, 10 * ONE_SECOND_MS));
|
||||
await this.nginxService.reload();
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -1 +1 @@
|
||||
1751630630443
|
||||
1753135397044
|
||||
@@ -1 +1 @@
|
||||
1751630630198
|
||||
1753135396799
|
||||
@@ -1 +1 @@
|
||||
1751630630343
|
||||
1753135396931
|
||||
@@ -1 +1 @@
|
||||
1751630630571
|
||||
1753135397144
|
||||
@@ -1 +0,0 @@
|
||||
999999999999999999999999999999999999
|
||||
@@ -1 +1 @@
|
||||
1751630630810
|
||||
1753135397303
|
||||
@@ -8,7 +8,6 @@ import { describe, expect, test, vi } from 'vitest';
|
||||
import { FileModification } from '@app/unraid-api/unraid-file-modifier/file-modification.js';
|
||||
import AuthRequestModification from '@app/unraid-api/unraid-file-modifier/modifications/auth-request.modification.js';
|
||||
import DefaultPageLayoutModification from '@app/unraid-api/unraid-file-modifier/modifications/default-page-layout.modification.js';
|
||||
import LogRotateModification from '@app/unraid-api/unraid-file-modifier/modifications/log-rotate.modification.js';
|
||||
import NotificationsPageModification from '@app/unraid-api/unraid-file-modifier/modifications/notifications-page.modification.js';
|
||||
import RcNginxModification from '@app/unraid-api/unraid-file-modifier/modifications/rc-nginx.modification.js';
|
||||
import SSOFileModification from '@app/unraid-api/unraid-file-modifier/modifications/sso.modification.js';
|
||||
@@ -56,13 +55,7 @@ const patchTestCases: ModificationTestCase[] = [
|
||||
];
|
||||
|
||||
/** Modifications that simply add a new file & remove it on rollback. */
|
||||
const simpleTestCases: ModificationTestCase[] = [
|
||||
{
|
||||
ModificationClass: LogRotateModification,
|
||||
fileUrl: 'logrotate.conf',
|
||||
fileName: 'logrotate.conf',
|
||||
},
|
||||
];
|
||||
const simpleTestCases: ModificationTestCase[] = [];
|
||||
|
||||
const downloadOrRetrieveOriginalFile = async (filePath: string, fileUrl: string): Promise<string> => {
|
||||
let originalContent = '';
|
||||
|
||||
@@ -711,7 +711,7 @@ $.ajaxPrefilter(function(s, orig, xhr){
|
||||
<div class="upgrade_notice" style="display:none"></div>
|
||||
<div id="header" class="<?=$display['banner']?>">
|
||||
<div class="logo">
|
||||
<a href="https://unraid.net" target="_blank"><?readfile("$docroot/webGui/images/UN-logotype-gradient.svg")?></a>
|
||||
|
||||
<unraid-i18n-host><unraid-header-os-version></unraid-header-os-version></unraid-i18n-host>
|
||||
</div>
|
||||
<?include "$docroot/plugins/dynamix.my.servers/include/myservers2.php"?>
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { existsSync } from 'fs';
|
||||
import { readFile } from 'fs/promises';
|
||||
import { join } from 'node:path';
|
||||
|
||||
import { fileExists } from '@app/core/utils/files/file-exists.js';
|
||||
import {
|
||||
FileModification,
|
||||
ShouldApplyWithReason,
|
||||
@@ -45,7 +45,7 @@ export default class AuthRequestModification extends FileModification {
|
||||
|
||||
const filesToAdd = [getters.paths().webgui.logo.assetPath, ...jsFiles];
|
||||
|
||||
if (!existsSync(this.filePath)) {
|
||||
if (!(await fileExists(this.filePath))) {
|
||||
throw new Error(`File ${this.filePath} not found.`);
|
||||
}
|
||||
|
||||
|
||||
@@ -66,8 +66,19 @@ if (is_localhost() && !is_good_session()) {
|
||||
}
|
||||
|
||||
private addModalsWebComponent(source: string): string {
|
||||
if (source.includes('<unraid-modals>')) {
|
||||
return source;
|
||||
}
|
||||
return source.replace('<body>', '<body>\n<unraid-modals></unraid-modals>');
|
||||
}
|
||||
|
||||
private hideHeaderLogo(source: string): string {
|
||||
return source.replace(
|
||||
'<a href="https://unraid.net" target="_blank"><?readfile("$docroot/webGui/images/UN-logotype-gradient.svg")?></a>',
|
||||
''
|
||||
);
|
||||
}
|
||||
|
||||
private applyToSource(fileContent: string): string {
|
||||
const transformers = [
|
||||
this.removeNotificationBell.bind(this),
|
||||
@@ -75,6 +86,7 @@ if (is_localhost() && !is_good_session()) {
|
||||
this.addToaster.bind(this),
|
||||
this.patchGuiBootAuth.bind(this),
|
||||
this.addModalsWebComponent.bind(this),
|
||||
this.hideHeaderLogo.bind(this),
|
||||
];
|
||||
|
||||
return transformers.reduce((content, transformer) => transformer(content), fileContent);
|
||||
|
||||
@@ -1,70 +0,0 @@
|
||||
import { Logger } from '@nestjs/common';
|
||||
import { readFile, rm, writeFile } from 'node:fs/promises';
|
||||
|
||||
import { fileExists } from '@app/core/utils/files/file-exists.js';
|
||||
import {
|
||||
FileModification,
|
||||
ShouldApplyWithReason,
|
||||
} from '@app/unraid-api/unraid-file-modifier/file-modification.js';
|
||||
|
||||
export default class LogRotateModification extends FileModification {
|
||||
id: string = 'log-rotate';
|
||||
public readonly filePath: string = '/etc/logrotate.d/unraid-api' as const;
|
||||
private readonly logRotateConfig: string = `
|
||||
/var/log/unraid-api/*.log {
|
||||
rotate 1
|
||||
missingok
|
||||
size 1M
|
||||
su root root
|
||||
compress
|
||||
delaycompress
|
||||
copytruncate
|
||||
create 0640 root root
|
||||
}
|
||||
/var/log/graphql-api.log {
|
||||
rotate 1
|
||||
missingok
|
||||
size 1M
|
||||
su root root
|
||||
compress
|
||||
delaycompress
|
||||
copytruncate
|
||||
create 0640 root root
|
||||
}
|
||||
`.trimStart();
|
||||
|
||||
constructor(logger: Logger) {
|
||||
super(logger);
|
||||
}
|
||||
|
||||
protected async generatePatch(overridePath?: string): Promise<string> {
|
||||
const currentContent = (await fileExists(this.filePath))
|
||||
? await readFile(this.filePath, 'utf8')
|
||||
: '';
|
||||
|
||||
return this.createPatchWithDiff(
|
||||
overridePath ?? this.filePath,
|
||||
currentContent,
|
||||
this.logRotateConfig
|
||||
);
|
||||
}
|
||||
|
||||
async shouldApply(): Promise<ShouldApplyWithReason> {
|
||||
const alreadyConfigured = await fileExists(this.filePath);
|
||||
if (alreadyConfigured) {
|
||||
return { shouldApply: false, reason: 'LogRotate configuration already exists' };
|
||||
}
|
||||
return { shouldApply: true, reason: 'No LogRotate config for the API configured yet' };
|
||||
}
|
||||
|
||||
async apply(): Promise<string> {
|
||||
await this.rollback();
|
||||
await writeFile(this.filePath, this.logRotateConfig, { mode: 0o644 });
|
||||
return this.logRotateConfig;
|
||||
}
|
||||
|
||||
async rollback(): Promise<void> {
|
||||
await rm(this.getPathToAppliedPatch(), { force: true });
|
||||
await rm(this.filePath, { force: true });
|
||||
}
|
||||
}
|
||||
@@ -53,7 +53,7 @@ Index: /usr/local/emhttp/plugins/dynamix/include/DefaultPageLayout.php
|
||||
}
|
||||
|
||||
function closeNotifier() {
|
||||
@@ -695,10 +704,11 @@
|
||||
@@ -695,15 +704,16 @@
|
||||
});
|
||||
</script>
|
||||
<?include "$docroot/plugins/dynamix.my.servers/include/myservers1.php"?>
|
||||
@@ -64,7 +64,13 @@ Index: /usr/local/emhttp/plugins/dynamix/include/DefaultPageLayout.php
|
||||
<div class="upgrade_notice" style="display:none"></div>
|
||||
<div id="header" class="<?=$display['banner']?>">
|
||||
<div class="logo">
|
||||
<a href="https://unraid.net" target="_blank"><?readfile("$docroot/webGui/images/UN-logotype-gradient.svg")?></a>
|
||||
- <a href="https://unraid.net" target="_blank"><?readfile("$docroot/webGui/images/UN-logotype-gradient.svg")?></a>
|
||||
+
|
||||
<unraid-i18n-host><unraid-header-os-version></unraid-header-os-version></unraid-i18n-host>
|
||||
</div>
|
||||
<?include "$docroot/plugins/dynamix.my.servers/include/myservers2.php"?>
|
||||
</div>
|
||||
<a href="#" class="move_to_end" title="<?=_('Move To End')?>"><i class="fa fa-arrow-circle-down"></i></a>
|
||||
@@ -748,12 +758,12 @@
|
||||
}
|
||||
// create list of nchan scripts to be started
|
||||
|
||||
@@ -1,25 +0,0 @@
|
||||
Index: /etc/logrotate.d/unraid-api
|
||||
===================================================================
|
||||
--- /etc/logrotate.d/unraid-api original
|
||||
+++ /etc/logrotate.d/unraid-api modified
|
||||
@@ -0,0 +1,20 @@
|
||||
+/var/log/unraid-api/*.log {
|
||||
+ rotate 1
|
||||
+ missingok
|
||||
+ size 1M
|
||||
+ su root root
|
||||
+ compress
|
||||
+ delaycompress
|
||||
+ copytruncate
|
||||
+ create 0640 root root
|
||||
+}
|
||||
+/var/log/graphql-api.log {
|
||||
+ rotate 1
|
||||
+ missingok
|
||||
+ size 1M
|
||||
+ su root root
|
||||
+ compress
|
||||
+ delaycompress
|
||||
+ copytruncate
|
||||
+ create 0640 root root
|
||||
+}
|
||||
@@ -1,6 +1,6 @@
|
||||
import { existsSync } from 'fs';
|
||||
import { readFile } from 'fs/promises';
|
||||
|
||||
import { fileExists } from '@app/core/utils/files/file-exists.js';
|
||||
import {
|
||||
FileModification,
|
||||
ShouldApplyWithReason,
|
||||
@@ -25,7 +25,7 @@ export default class RcNginxModification extends FileModification {
|
||||
* @returns The patch for the rc.nginx file
|
||||
*/
|
||||
protected async generatePatch(overridePath?: string): Promise<string> {
|
||||
if (!existsSync(this.filePath)) {
|
||||
if (!(await fileExists(this.filePath))) {
|
||||
throw new Error(`File ${this.filePath} not found.`);
|
||||
}
|
||||
const fileContent = await readFile(this.filePath, 'utf8');
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "unraid-monorepo",
|
||||
"private": true,
|
||||
"version": "4.9.3",
|
||||
"version": "4.10.0",
|
||||
"scripts": {
|
||||
"build": "pnpm -r build",
|
||||
"build:watch": " pnpm -r --parallel build:watch",
|
||||
@@ -26,6 +26,7 @@
|
||||
"@nestjs/core",
|
||||
"@parcel/watcher",
|
||||
"@swc/core",
|
||||
"@tailwindcss/oxide",
|
||||
"@unraid/libvirt",
|
||||
"core-js",
|
||||
"cpu-features",
|
||||
@@ -33,10 +34,12 @@
|
||||
"esbuild",
|
||||
"nestjs-pino",
|
||||
"protobufjs",
|
||||
"sharp",
|
||||
"simple-git-hooks",
|
||||
"ssh2",
|
||||
"unrs-resolver",
|
||||
"vue-demi"
|
||||
"vue-demi",
|
||||
"workerd"
|
||||
]
|
||||
},
|
||||
"dependencies": {
|
||||
@@ -57,5 +60,5 @@
|
||||
"pnpm lint:fix"
|
||||
]
|
||||
},
|
||||
"packageManager": "pnpm@10.12.4"
|
||||
"packageManager": "pnpm@10.13.1"
|
||||
}
|
||||
|
||||
@@ -25,10 +25,10 @@
|
||||
"description": "Unraid Connect plugin for Unraid API",
|
||||
"devDependencies": {
|
||||
"@apollo/client": "3.13.8",
|
||||
"@faker-js/faker": "9.8.0",
|
||||
"@faker-js/faker": "9.9.0",
|
||||
"@graphql-codegen/cli": "5.0.7",
|
||||
"@graphql-typed-document-node/core": "3.2.0",
|
||||
"@ianvs/prettier-plugin-sort-imports": "4.4.2",
|
||||
"@ianvs/prettier-plugin-sort-imports": "4.5.1",
|
||||
"@jsonforms/core": "3.6.0",
|
||||
"@nestjs/apollo": "13.1.0",
|
||||
"@nestjs/common": "11.1.3",
|
||||
@@ -41,29 +41,29 @@
|
||||
"@types/ini": "4.1.1",
|
||||
"@types/ip": "1.1.3",
|
||||
"@types/lodash-es": "4.17.12",
|
||||
"@types/node": "22.15.32",
|
||||
"@types/node": "22.16.4",
|
||||
"@types/ws": "8.18.1",
|
||||
"camelcase-keys": "9.1.3",
|
||||
"class-transformer": "0.5.1",
|
||||
"class-validator": "0.14.2",
|
||||
"execa": "9.6.0",
|
||||
"fast-check": "4.1.1",
|
||||
"fast-check": "4.2.0",
|
||||
"got": "14.4.7",
|
||||
"graphql": "16.11.0",
|
||||
"graphql-scalars": "1.24.2",
|
||||
"graphql-subscriptions": "3.0.0",
|
||||
"graphql-ws": "6.0.5",
|
||||
"graphql-ws": "6.0.6",
|
||||
"ini": "5.0.0",
|
||||
"jose": "6.0.11",
|
||||
"lodash-es": "4.17.21",
|
||||
"nest-authz": "2.17.0",
|
||||
"prettier": "3.5.3",
|
||||
"prettier": "3.6.2",
|
||||
"rimraf": "6.0.1",
|
||||
"rxjs": "7.8.2",
|
||||
"type-fest": "4.41.0",
|
||||
"typescript": "5.8.3",
|
||||
"vitest": "3.2.4",
|
||||
"ws": "8.18.2",
|
||||
"ws": "8.18.3",
|
||||
"zen-observable-ts": "1.1.0"
|
||||
},
|
||||
"dependencies": {
|
||||
@@ -91,13 +91,13 @@
|
||||
"graphql": "16.11.0",
|
||||
"graphql-scalars": "1.24.2",
|
||||
"graphql-subscriptions": "3.0.0",
|
||||
"graphql-ws": "6.0.5",
|
||||
"graphql-ws": "6.0.6",
|
||||
"ini": "5.0.0",
|
||||
"jose": "6.0.11",
|
||||
"lodash-es": "4.17.21",
|
||||
"nest-authz": "2.17.0",
|
||||
"rxjs": "7.8.2",
|
||||
"ws": "^8.18.0",
|
||||
"ws": "8.18.3",
|
||||
"zen-observable-ts": "1.1.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,269 @@
|
||||
import { EventEmitter2 } from '@nestjs/event-emitter';
|
||||
|
||||
import { PubSub } from 'graphql-subscriptions';
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { MinigraphStatus } from '../config/connect.config.js';
|
||||
import { EVENTS, GRAPHQL_PUBSUB_CHANNEL } from '../helper/nest-tokens.js';
|
||||
import { MothershipConnectionService } from '../mothership-proxy/connection.service.js';
|
||||
import { MothershipController } from '../mothership-proxy/mothership.controller.js';
|
||||
import { MothershipHandler } from '../mothership-proxy/mothership.events.js';
|
||||
|
||||
describe('MothershipHandler - Behavioral Tests', () => {
|
||||
let handler: MothershipHandler;
|
||||
let connectionService: MothershipConnectionService;
|
||||
let mothershipController: MothershipController;
|
||||
let pubSub: PubSub;
|
||||
let eventEmitter: EventEmitter2;
|
||||
|
||||
// Track actual state changes and effects
|
||||
let connectionAttempts: Array<{ timestamp: number; reason: string }> = [];
|
||||
let publishedMessages: Array<{ channel: string; data: any }> = [];
|
||||
let controllerStops: Array<{ timestamp: number; reason?: string }> = [];
|
||||
|
||||
beforeEach(() => {
|
||||
// Reset tracking arrays
|
||||
connectionAttempts = [];
|
||||
publishedMessages = [];
|
||||
controllerStops = [];
|
||||
|
||||
// Create real event emitter for integration testing
|
||||
eventEmitter = new EventEmitter2();
|
||||
|
||||
// Mock connection service with realistic behavior
|
||||
connectionService = {
|
||||
getIdentityState: vi.fn(),
|
||||
getConnectionState: vi.fn(),
|
||||
} as any;
|
||||
|
||||
// Mock controller that tracks behavior instead of just method calls
|
||||
mothershipController = {
|
||||
initOrRestart: vi.fn().mockImplementation(() => {
|
||||
connectionAttempts.push({
|
||||
timestamp: Date.now(),
|
||||
reason: 'initOrRestart called',
|
||||
});
|
||||
return Promise.resolve();
|
||||
}),
|
||||
stop: vi.fn().mockImplementation(() => {
|
||||
controllerStops.push({
|
||||
timestamp: Date.now(),
|
||||
});
|
||||
return Promise.resolve();
|
||||
}),
|
||||
} as any;
|
||||
|
||||
// Mock PubSub that tracks published messages
|
||||
pubSub = {
|
||||
publish: vi.fn().mockImplementation((channel: string, data: any) => {
|
||||
publishedMessages.push({ channel, data });
|
||||
return Promise.resolve();
|
||||
}),
|
||||
} as any;
|
||||
|
||||
handler = new MothershipHandler(connectionService, mothershipController, pubSub);
|
||||
});
|
||||
|
||||
describe('Connection Recovery Behavior', () => {
|
||||
it('should attempt reconnection when ping fails', async () => {
|
||||
// Given: Connection is in ping failure state
|
||||
vi.mocked(connectionService.getConnectionState).mockReturnValue({
|
||||
status: MinigraphStatus.PING_FAILURE,
|
||||
error: 'Ping timeout after 3 minutes',
|
||||
});
|
||||
|
||||
// When: Connection status change event occurs
|
||||
await handler.onMothershipConnectionStatusChanged();
|
||||
|
||||
// Then: System should attempt to recover the connection
|
||||
expect(connectionAttempts).toHaveLength(1);
|
||||
expect(connectionAttempts[0].reason).toBe('initOrRestart called');
|
||||
});
|
||||
|
||||
it('should NOT interfere with exponential backoff during error retry state', async () => {
|
||||
// Given: Connection is in error retry state (GraphQL client managing backoff)
|
||||
vi.mocked(connectionService.getConnectionState).mockReturnValue({
|
||||
status: MinigraphStatus.ERROR_RETRYING,
|
||||
error: 'Network error',
|
||||
timeout: 20000,
|
||||
timeoutStart: Date.now(),
|
||||
});
|
||||
|
||||
// When: Connection status change event occurs
|
||||
await handler.onMothershipConnectionStatusChanged();
|
||||
|
||||
// Then: System should NOT interfere with ongoing retry logic
|
||||
expect(connectionAttempts).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should remain stable during normal connection states', async () => {
|
||||
const stableStates = [MinigraphStatus.CONNECTED, MinigraphStatus.CONNECTING];
|
||||
|
||||
for (const status of stableStates) {
|
||||
// Reset for each test
|
||||
connectionAttempts.length = 0;
|
||||
|
||||
// Given: Connection is in a stable state
|
||||
vi.mocked(connectionService.getConnectionState).mockReturnValue({
|
||||
status,
|
||||
error: null,
|
||||
});
|
||||
|
||||
// When: Connection status change event occurs
|
||||
await handler.onMothershipConnectionStatusChanged();
|
||||
|
||||
// Then: System should not trigger unnecessary reconnection attempts
|
||||
expect(connectionAttempts).toHaveLength(0);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('Identity-Based Connection Behavior', () => {
|
||||
it('should establish connection when valid API key becomes available', async () => {
|
||||
// Given: Valid API key is present
|
||||
vi.mocked(connectionService.getIdentityState).mockReturnValue({
|
||||
state: {
|
||||
apiKey: 'valid-unraid-key-12345',
|
||||
unraidVersion: '6.12.0',
|
||||
flashGuid: 'test-flash-guid',
|
||||
apiVersion: '1.0.0',
|
||||
},
|
||||
isLoaded: true,
|
||||
});
|
||||
|
||||
// When: Identity changes
|
||||
await handler.onIdentityChanged();
|
||||
|
||||
// Then: System should establish mothership connection
|
||||
expect(connectionAttempts).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('should not attempt connection without valid credentials', async () => {
|
||||
const invalidCredentials = [{ apiKey: undefined }, { apiKey: '' }];
|
||||
|
||||
for (const credentials of invalidCredentials) {
|
||||
// Reset for each test
|
||||
connectionAttempts.length = 0;
|
||||
|
||||
// Given: Invalid or missing API key
|
||||
vi.mocked(connectionService.getIdentityState).mockReturnValue({
|
||||
state: credentials,
|
||||
isLoaded: false,
|
||||
});
|
||||
|
||||
// When: Identity changes
|
||||
await handler.onIdentityChanged();
|
||||
|
||||
// Then: System should not attempt connection
|
||||
expect(connectionAttempts).toHaveLength(0);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('Logout Behavior', () => {
|
||||
it('should properly clean up connections and notify subscribers on logout', async () => {
|
||||
// When: User logs out
|
||||
await handler.logout({ reason: 'User initiated logout' });
|
||||
|
||||
// Then: System should clean up connections
|
||||
expect(controllerStops).toHaveLength(1);
|
||||
|
||||
// And: Subscribers should be notified of empty state
|
||||
expect(publishedMessages).toHaveLength(2);
|
||||
|
||||
const serversMessage = publishedMessages.find(
|
||||
(m) => m.channel === GRAPHQL_PUBSUB_CHANNEL.SERVERS
|
||||
);
|
||||
const ownerMessage = publishedMessages.find(
|
||||
(m) => m.channel === GRAPHQL_PUBSUB_CHANNEL.OWNER
|
||||
);
|
||||
|
||||
expect(serversMessage?.data).toEqual({ servers: [] });
|
||||
expect(ownerMessage?.data).toEqual({
|
||||
owner: { username: 'root', url: '', avatar: '' },
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle logout gracefully even without explicit reason', async () => {
|
||||
// When: System logout occurs without reason
|
||||
await handler.logout({});
|
||||
|
||||
// Then: Cleanup should still occur properly
|
||||
expect(controllerStops).toHaveLength(1);
|
||||
expect(publishedMessages).toHaveLength(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('DDoS Prevention Behavior', () => {
|
||||
it('should demonstrate exponential backoff is respected during network errors', async () => {
|
||||
// Given: Multiple rapid network errors occur
|
||||
const errorStates = [
|
||||
{ status: MinigraphStatus.ERROR_RETRYING, error: 'Network error 1' },
|
||||
{ status: MinigraphStatus.ERROR_RETRYING, error: 'Network error 2' },
|
||||
{ status: MinigraphStatus.ERROR_RETRYING, error: 'Network error 3' },
|
||||
];
|
||||
|
||||
// When: Rapid error retry states occur
|
||||
for (const state of errorStates) {
|
||||
vi.mocked(connectionService.getConnectionState).mockReturnValue(state);
|
||||
await handler.onMothershipConnectionStatusChanged();
|
||||
}
|
||||
|
||||
// Then: No linear retry attempts should be made (respecting exponential backoff)
|
||||
expect(connectionAttempts).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should differentiate between network errors and ping failures', async () => {
|
||||
// Given: Network error followed by ping failure
|
||||
vi.mocked(connectionService.getConnectionState).mockReturnValue({
|
||||
status: MinigraphStatus.ERROR_RETRYING,
|
||||
error: 'Network error',
|
||||
});
|
||||
|
||||
// When: Network error occurs
|
||||
await handler.onMothershipConnectionStatusChanged();
|
||||
|
||||
// Then: No immediate reconnection attempt
|
||||
expect(connectionAttempts).toHaveLength(0);
|
||||
|
||||
// Given: Ping failure occurs (different issue)
|
||||
vi.mocked(connectionService.getConnectionState).mockReturnValue({
|
||||
status: MinigraphStatus.PING_FAILURE,
|
||||
error: 'Ping timeout',
|
||||
});
|
||||
|
||||
// When: Ping failure occurs
|
||||
await handler.onMothershipConnectionStatusChanged();
|
||||
|
||||
// Then: Immediate reconnection attempt should occur
|
||||
expect(connectionAttempts).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge Cases and Error Handling', () => {
|
||||
it('should handle missing connection state gracefully', async () => {
|
||||
// Given: Connection service returns undefined
|
||||
vi.mocked(connectionService.getConnectionState).mockReturnValue(undefined);
|
||||
|
||||
// When: Connection status change occurs
|
||||
await handler.onMothershipConnectionStatusChanged();
|
||||
|
||||
// Then: No errors should occur, no reconnection attempts
|
||||
expect(connectionAttempts).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should handle malformed connection state', async () => {
|
||||
// Given: Malformed connection state
|
||||
vi.mocked(connectionService.getConnectionState).mockReturnValue({
|
||||
status: 'UNKNOWN_STATUS' as any,
|
||||
error: 'Malformed state',
|
||||
});
|
||||
|
||||
// When: Connection status change occurs
|
||||
await handler.onMothershipConnectionStatusChanged();
|
||||
|
||||
// Then: Should not trigger reconnection for unknown states
|
||||
expect(connectionAttempts).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,82 +1,48 @@
|
||||
import { Injectable, Logger, OnModuleDestroy, OnModuleInit } from '@nestjs/common';
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import { existsSync, readFileSync } from 'fs';
|
||||
import { writeFile } from 'fs/promises';
|
||||
import path from 'path';
|
||||
|
||||
import { ConfigFilePersister } from '@unraid/shared/services/config-file.js';
|
||||
import { plainToInstance } from 'class-transformer';
|
||||
import { validateOrReject } from 'class-validator';
|
||||
import { parse as parseIni } from 'ini';
|
||||
import { isEqual } from 'lodash-es';
|
||||
import { bufferTime } from 'rxjs/operators';
|
||||
|
||||
import type { MyServersConfig as LegacyConfig } from './my-servers.config.js';
|
||||
import { ConfigType, MyServersConfig } from './connect.config.js';
|
||||
import { emptyMyServersConfig, MyServersConfig } from './connect.config.js';
|
||||
|
||||
@Injectable()
|
||||
export class ConnectConfigPersister implements OnModuleInit, OnModuleDestroy {
|
||||
constructor(private readonly configService: ConfigService<ConfigType, true>) {}
|
||||
|
||||
private logger = new Logger(ConnectConfigPersister.name);
|
||||
get configPath() {
|
||||
// PATHS_CONFIG_MODULES is a required environment variable.
|
||||
// It is the directory where custom config files are stored.
|
||||
return path.join(this.configService.getOrThrow('PATHS_CONFIG_MODULES'), 'connect.json');
|
||||
}
|
||||
|
||||
async onModuleDestroy() {
|
||||
await this.persist();
|
||||
}
|
||||
|
||||
async onModuleInit() {
|
||||
this.logger.verbose(`Config path: ${this.configPath}`);
|
||||
await this.loadOrMigrateConfig();
|
||||
// Persist changes to the config.
|
||||
this.configService.changes$.pipe(bufferTime(25)).subscribe({
|
||||
next: async (changes) => {
|
||||
const connectConfigChanged = changes.some(({ path }) =>
|
||||
path.startsWith('connect.config')
|
||||
);
|
||||
if (connectConfigChanged) {
|
||||
await this.persist();
|
||||
}
|
||||
},
|
||||
error: (err) => {
|
||||
this.logger.error('Error receiving config changes:', err);
|
||||
},
|
||||
});
|
||||
export class ConnectConfigPersister extends ConfigFilePersister<MyServersConfig> {
|
||||
constructor(configService: ConfigService) {
|
||||
super(configService);
|
||||
}
|
||||
|
||||
/**
|
||||
* Persist the config to disk if the given data is different from the data on-disk.
|
||||
* This helps preserve the boot flash drive's life by avoiding unnecessary writes.
|
||||
*
|
||||
* @param config - The config object to persist.
|
||||
* @returns `true` if the config was persisted, `false` otherwise.
|
||||
* @override
|
||||
* @returns The name of the config file.
|
||||
*/
|
||||
async persist(config = this.configService.get<MyServersConfig>('connect.config')) {
|
||||
try {
|
||||
if (isEqual(config, await this.loadConfig())) {
|
||||
this.logger.verbose(`Config is unchanged, skipping persistence`);
|
||||
return false;
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.error(error, `Error loading config (will overwrite file)`);
|
||||
}
|
||||
const data = JSON.stringify(config, null, 2);
|
||||
this.logger.verbose(`Persisting config to ${this.configPath}: ${data}`);
|
||||
try {
|
||||
await writeFile(this.configPath, data);
|
||||
this.logger.verbose(`Config persisted to ${this.configPath}`);
|
||||
return true;
|
||||
} catch (error) {
|
||||
this.logger.error(error, `Error persisting config to '${this.configPath}'`);
|
||||
return false;
|
||||
}
|
||||
fileName(): string {
|
||||
return 'connect.json';
|
||||
}
|
||||
|
||||
/**
|
||||
* @override
|
||||
* @returns The key of the config in the config service.
|
||||
*/
|
||||
configKey(): string {
|
||||
return 'connect.config';
|
||||
}
|
||||
|
||||
/**
|
||||
* @override
|
||||
* @returns The default config object.
|
||||
*/
|
||||
defaultConfig(): MyServersConfig {
|
||||
return emptyMyServersConfig();
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate the config object.
|
||||
* @override
|
||||
* @param config - The config object to validate.
|
||||
* @returns The validated config instance.
|
||||
*/
|
||||
@@ -89,49 +55,21 @@ export class ConnectConfigPersister implements OnModuleInit, OnModuleDestroy {
|
||||
enableImplicitConversion: true,
|
||||
});
|
||||
}
|
||||
await validateOrReject(instance);
|
||||
await validateOrReject(instance, { whitelist: true });
|
||||
return instance;
|
||||
}
|
||||
|
||||
/**
|
||||
* Load the config from the filesystem, or migrate the legacy config file to the new config format.
|
||||
* When unable to load or migrate the config, messages are logged at WARN level, but no other action is taken.
|
||||
* @returns true if the config was loaded successfully, false otherwise.
|
||||
* @override
|
||||
* @returns The migrated config object.
|
||||
*/
|
||||
private async loadOrMigrateConfig() {
|
||||
try {
|
||||
const config = await this.loadConfig();
|
||||
this.configService.set('connect.config', config);
|
||||
this.logger.verbose(`Config loaded from ${this.configPath}`);
|
||||
return true;
|
||||
} catch (error) {
|
||||
this.logger.warn(error, 'Error loading config');
|
||||
}
|
||||
|
||||
try {
|
||||
await this.migrateLegacyConfig();
|
||||
return this.persist();
|
||||
} catch (error) {
|
||||
this.logger.warn('Error migrating legacy config:', error);
|
||||
}
|
||||
|
||||
this.logger.error(
|
||||
'Failed to load or migrate config from filesystem. Config is not persisted. Using defaults in-memory.'
|
||||
);
|
||||
return false;
|
||||
async migrateConfig(): Promise<MyServersConfig> {
|
||||
return await this.migrateLegacyConfig();
|
||||
}
|
||||
|
||||
/**
|
||||
* Load the JSON config from the filesystem
|
||||
* @throws {Error} - If the config file does not exist.
|
||||
* @throws {Error} - If the config file is not parse-able.
|
||||
* @throws {Error} - If the config file is not valid.
|
||||
*/
|
||||
private async loadConfig(configFilePath = this.configPath) {
|
||||
if (!existsSync(configFilePath))
|
||||
throw new Error(`Config file does not exist at '${configFilePath}'`);
|
||||
return this.validate(JSON.parse(readFileSync(configFilePath, 'utf8')));
|
||||
}
|
||||
/**-----------------------------------------------------
|
||||
* Helpers for migrating myservers.cfg to connect.json
|
||||
*------------------------------------------------------**/
|
||||
|
||||
/**
|
||||
* Migrate the legacy config file to the new config format.
|
||||
@@ -143,8 +81,7 @@ export class ConnectConfigPersister implements OnModuleInit, OnModuleDestroy {
|
||||
private async migrateLegacyConfig(filePath?: string) {
|
||||
const myServersCfgFile = await this.readLegacyConfig(filePath);
|
||||
const legacyConfig = this.parseLegacyConfig(myServersCfgFile);
|
||||
const newConfig = await this.convertLegacyConfig(legacyConfig);
|
||||
this.configService.set('connect.config', newConfig);
|
||||
return await this.convertLegacyConfig(legacyConfig);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -0,0 +1,158 @@
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import { access, constants, mkdir, readFile, rm } from 'fs/promises';
|
||||
import { join } from 'path';
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { ConfigType } from '../config/connect.config.js';
|
||||
import { ConnectStatusWriterService } from './connect-status-writer.service.js';
|
||||
|
||||
describe('ConnectStatusWriterService Config Behavior', () => {
|
||||
let service: ConnectStatusWriterService;
|
||||
let configService: ConfigService<ConfigType, true>;
|
||||
const testDir = '/tmp/connect-status-config-test';
|
||||
const testFilePath = join(testDir, 'connectStatus.json');
|
||||
|
||||
// Simulate config changes
|
||||
let configStore: any = {};
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.clearAllMocks();
|
||||
|
||||
// Reset config store
|
||||
configStore = {};
|
||||
|
||||
// Create test directory
|
||||
await mkdir(testDir, { recursive: true });
|
||||
|
||||
// Create a ConfigService mock that behaves like the real one
|
||||
configService = {
|
||||
get: vi.fn().mockImplementation((key: string) => {
|
||||
console.log(`ConfigService.get('${key}') called, returning:`, configStore[key]);
|
||||
return configStore[key];
|
||||
}),
|
||||
set: vi.fn().mockImplementation((key: string, value: any) => {
|
||||
console.log(`ConfigService.set('${key}', ${JSON.stringify(value)}) called`);
|
||||
configStore[key] = value;
|
||||
}),
|
||||
} as unknown as ConfigService<ConfigType, true>;
|
||||
|
||||
service = new ConnectStatusWriterService(configService);
|
||||
|
||||
// Override the status file path to use our test location
|
||||
Object.defineProperty(service, 'statusFilePath', {
|
||||
get: () => testFilePath,
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await service.onModuleDestroy();
|
||||
await rm(testDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('should write status when config is updated directly', async () => {
|
||||
// Initialize service - should write PRE_INIT
|
||||
await service.onApplicationBootstrap();
|
||||
await new Promise(resolve => setTimeout(resolve, 50));
|
||||
|
||||
let content = await readFile(testFilePath, 'utf-8');
|
||||
let data = JSON.parse(content);
|
||||
console.log('Initial status:', data);
|
||||
expect(data.connectionStatus).toBe('PRE_INIT');
|
||||
|
||||
// Update config directly (simulating what ConnectionService does)
|
||||
console.log('\n=== Updating config to CONNECTED ===');
|
||||
configService.set('connect.mothership', {
|
||||
status: 'CONNECTED',
|
||||
error: null,
|
||||
lastPing: Date.now(),
|
||||
});
|
||||
|
||||
// Call the writeStatus method directly (since @OnEvent handles the event)
|
||||
await service['writeStatus']();
|
||||
|
||||
content = await readFile(testFilePath, 'utf-8');
|
||||
data = JSON.parse(content);
|
||||
console.log('Status after config update:', data);
|
||||
expect(data.connectionStatus).toBe('CONNECTED');
|
||||
});
|
||||
|
||||
it('should test the actual flow with multiple status updates', async () => {
|
||||
await service.onApplicationBootstrap();
|
||||
await new Promise(resolve => setTimeout(resolve, 50));
|
||||
|
||||
const statusUpdates = [
|
||||
{ status: 'CONNECTING', error: null, lastPing: null },
|
||||
{ status: 'CONNECTED', error: null, lastPing: Date.now() },
|
||||
{ status: 'DISCONNECTED', error: 'Lost connection', lastPing: Date.now() - 10000 },
|
||||
{ status: 'RECONNECTING', error: null, lastPing: Date.now() - 10000 },
|
||||
{ status: 'CONNECTED', error: null, lastPing: Date.now() },
|
||||
];
|
||||
|
||||
for (const update of statusUpdates) {
|
||||
console.log(`\n=== Updating to ${update.status} ===`);
|
||||
|
||||
// Update config
|
||||
configService.set('connect.mothership', update);
|
||||
|
||||
// Call writeStatus directly
|
||||
await service['writeStatus']();
|
||||
|
||||
const content = await readFile(testFilePath, 'utf-8');
|
||||
const data = JSON.parse(content);
|
||||
console.log(`Status file shows: ${data.connectionStatus}`);
|
||||
expect(data.connectionStatus).toBe(update.status);
|
||||
}
|
||||
});
|
||||
|
||||
it('should handle case where config is not set before event', async () => {
|
||||
await service.onApplicationBootstrap();
|
||||
await new Promise(resolve => setTimeout(resolve, 50));
|
||||
|
||||
// Delete the config
|
||||
delete configStore['connect.mothership'];
|
||||
|
||||
// Call writeStatus without config
|
||||
console.log('\n=== Calling writeStatus with no config ===');
|
||||
await service['writeStatus']();
|
||||
|
||||
const content = await readFile(testFilePath, 'utf-8');
|
||||
const data = JSON.parse(content);
|
||||
console.log('Status with no config:', data);
|
||||
expect(data.connectionStatus).toBe('PRE_INIT');
|
||||
|
||||
// Now set config and call writeStatus again
|
||||
console.log('\n=== Setting config and calling writeStatus ===');
|
||||
configService.set('connect.mothership', {
|
||||
status: 'CONNECTED',
|
||||
error: null,
|
||||
lastPing: Date.now(),
|
||||
});
|
||||
await service['writeStatus']();
|
||||
|
||||
const content2 = await readFile(testFilePath, 'utf-8');
|
||||
const data2 = JSON.parse(content2);
|
||||
console.log('Status after setting config:', data2);
|
||||
expect(data2.connectionStatus).toBe('CONNECTED');
|
||||
});
|
||||
|
||||
describe('cleanup on shutdown', () => {
|
||||
it('should delete status file on module destroy', async () => {
|
||||
await service.onApplicationBootstrap();
|
||||
await new Promise(resolve => setTimeout(resolve, 50));
|
||||
|
||||
// Verify file exists
|
||||
await expect(access(testFilePath, constants.F_OK)).resolves.not.toThrow();
|
||||
|
||||
// Cleanup
|
||||
await service.onModuleDestroy();
|
||||
|
||||
// Verify file is deleted
|
||||
await expect(access(testFilePath, constants.F_OK)).rejects.toThrow();
|
||||
});
|
||||
|
||||
it('should handle cleanup when file does not exist', async () => {
|
||||
// Don't bootstrap (so no file is written)
|
||||
await expect(service.onModuleDestroy()).resolves.not.toThrow();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,167 @@
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import { access, constants, mkdir, readFile, rm } from 'fs/promises';
|
||||
import { join } from 'path';
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { ConfigType } from '../config/connect.config.js';
|
||||
import { ConnectStatusWriterService } from './connect-status-writer.service.js';
|
||||
|
||||
describe('ConnectStatusWriterService Integration', () => {
|
||||
let service: ConnectStatusWriterService;
|
||||
let configService: ConfigService<ConfigType, true>;
|
||||
const testDir = '/tmp/connect-status-test';
|
||||
const testFilePath = join(testDir, 'connectStatus.json');
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.clearAllMocks();
|
||||
|
||||
// Create test directory
|
||||
await mkdir(testDir, { recursive: true });
|
||||
|
||||
configService = {
|
||||
get: vi.fn().mockImplementation((key: string) => {
|
||||
console.log(`ConfigService.get called with key: ${key}`);
|
||||
return {
|
||||
status: 'CONNECTED',
|
||||
error: null,
|
||||
lastPing: Date.now(),
|
||||
};
|
||||
}),
|
||||
} as unknown as ConfigService<ConfigType, true>;
|
||||
|
||||
service = new ConnectStatusWriterService(configService);
|
||||
|
||||
// Override the status file path to use our test location
|
||||
Object.defineProperty(service, 'statusFilePath', {
|
||||
get: () => testFilePath,
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await service.onModuleDestroy();
|
||||
await rm(testDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('should write initial PRE_INIT status, then update on event', async () => {
|
||||
// First, mock the config to return undefined (no connection metadata)
|
||||
vi.mocked(configService.get).mockReturnValue(undefined);
|
||||
|
||||
console.log('=== Starting onApplicationBootstrap ===');
|
||||
await service.onApplicationBootstrap();
|
||||
|
||||
// Wait a bit for the initial write to complete
|
||||
await new Promise(resolve => setTimeout(resolve, 50));
|
||||
|
||||
// Read initial status
|
||||
const initialContent = await readFile(testFilePath, 'utf-8');
|
||||
const initialData = JSON.parse(initialContent);
|
||||
console.log('Initial status written:', initialData);
|
||||
|
||||
expect(initialData.connectionStatus).toBe('PRE_INIT');
|
||||
expect(initialData.error).toBeNull();
|
||||
expect(initialData.lastPing).toBeNull();
|
||||
|
||||
// Now update the mock to return CONNECTED status
|
||||
vi.mocked(configService.get).mockReturnValue({
|
||||
status: 'CONNECTED',
|
||||
error: null,
|
||||
lastPing: 1234567890,
|
||||
});
|
||||
|
||||
console.log('=== Calling writeStatus directly ===');
|
||||
await service['writeStatus']();
|
||||
|
||||
// Read updated status
|
||||
const updatedContent = await readFile(testFilePath, 'utf-8');
|
||||
const updatedData = JSON.parse(updatedContent);
|
||||
console.log('Updated status after writeStatus:', updatedData);
|
||||
|
||||
expect(updatedData.connectionStatus).toBe('CONNECTED');
|
||||
expect(updatedData.lastPing).toBe(1234567890);
|
||||
});
|
||||
|
||||
it('should handle rapid status changes correctly', async () => {
|
||||
const statusChanges = [
|
||||
{ status: 'PRE_INIT', error: null, lastPing: null },
|
||||
{ status: 'CONNECTING', error: null, lastPing: null },
|
||||
{ status: 'CONNECTED', error: null, lastPing: Date.now() },
|
||||
{ status: 'DISCONNECTED', error: 'Connection lost', lastPing: Date.now() - 5000 },
|
||||
{ status: 'CONNECTED', error: null, lastPing: Date.now() },
|
||||
];
|
||||
|
||||
let changeIndex = 0;
|
||||
vi.mocked(configService.get).mockImplementation(() => {
|
||||
const change = statusChanges[changeIndex];
|
||||
console.log(`Returning status ${changeIndex}: ${change.status}`);
|
||||
return change;
|
||||
});
|
||||
|
||||
await service.onApplicationBootstrap();
|
||||
await new Promise(resolve => setTimeout(resolve, 50));
|
||||
|
||||
// Simulate the final status change
|
||||
changeIndex = statusChanges.length - 1;
|
||||
console.log(`=== Calling writeStatus for final status: ${statusChanges[changeIndex].status} ===`);
|
||||
await service['writeStatus']();
|
||||
|
||||
// Read final status
|
||||
const finalContent = await readFile(testFilePath, 'utf-8');
|
||||
const finalData = JSON.parse(finalContent);
|
||||
console.log('Final status after status change:', finalData);
|
||||
|
||||
// Should have the last status
|
||||
expect(finalData.connectionStatus).toBe('CONNECTED');
|
||||
expect(finalData.error).toBeNull();
|
||||
});
|
||||
|
||||
it('should handle multiple write calls correctly', async () => {
|
||||
const writes: number[] = [];
|
||||
const originalWriteStatus = service['writeStatus'].bind(service);
|
||||
|
||||
service['writeStatus'] = async function() {
|
||||
const timestamp = Date.now();
|
||||
writes.push(timestamp);
|
||||
console.log(`writeStatus called at ${timestamp}`);
|
||||
return originalWriteStatus();
|
||||
};
|
||||
|
||||
await service.onApplicationBootstrap();
|
||||
await new Promise(resolve => setTimeout(resolve, 50));
|
||||
|
||||
const initialWrites = writes.length;
|
||||
console.log(`Initial writes: ${initialWrites}`);
|
||||
|
||||
// Make multiple write calls
|
||||
for (let i = 0; i < 3; i++) {
|
||||
console.log(`Calling writeStatus ${i}`);
|
||||
await service['writeStatus']();
|
||||
}
|
||||
|
||||
console.log(`Total writes: ${writes.length}`);
|
||||
console.log('Write timestamps:', writes);
|
||||
|
||||
// Should have initial write + 3 additional writes
|
||||
expect(writes.length).toBe(initialWrites + 3);
|
||||
});
|
||||
|
||||
describe('cleanup on shutdown', () => {
|
||||
it('should delete status file on module destroy', async () => {
|
||||
await service.onApplicationBootstrap();
|
||||
await new Promise(resolve => setTimeout(resolve, 50));
|
||||
|
||||
// Verify file exists
|
||||
await expect(access(testFilePath, constants.F_OK)).resolves.not.toThrow();
|
||||
|
||||
// Cleanup
|
||||
await service.onModuleDestroy();
|
||||
|
||||
// Verify file is deleted
|
||||
await expect(access(testFilePath, constants.F_OK)).rejects.toThrow();
|
||||
});
|
||||
|
||||
it('should handle cleanup gracefully when file does not exist', async () => {
|
||||
// Don't bootstrap (so no file is created)
|
||||
await expect(service.onModuleDestroy()).resolves.not.toThrow();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,140 @@
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import { unlink, writeFile } from 'fs/promises';
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { ConfigType } from '../config/connect.config.js';
|
||||
import { ConnectStatusWriterService } from './connect-status-writer.service.js';
|
||||
|
||||
vi.mock('fs/promises', () => ({
|
||||
writeFile: vi.fn(),
|
||||
unlink: vi.fn(),
|
||||
}));
|
||||
|
||||
describe('ConnectStatusWriterService', () => {
|
||||
let service: ConnectStatusWriterService;
|
||||
let configService: ConfigService<ConfigType, true>;
|
||||
let writeFileMock: ReturnType<typeof vi.fn>;
|
||||
let unlinkMock: ReturnType<typeof vi.fn>;
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.clearAllMocks();
|
||||
vi.useFakeTimers();
|
||||
|
||||
writeFileMock = vi.mocked(writeFile);
|
||||
unlinkMock = vi.mocked(unlink);
|
||||
|
||||
configService = {
|
||||
get: vi.fn().mockReturnValue({
|
||||
status: 'CONNECTED',
|
||||
error: null,
|
||||
lastPing: Date.now(),
|
||||
}),
|
||||
} as unknown as ConfigService<ConfigType, true>;
|
||||
|
||||
service = new ConnectStatusWriterService(configService);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
vi.useRealTimers();
|
||||
});
|
||||
|
||||
describe('onApplicationBootstrap', () => {
|
||||
it('should write initial status on bootstrap', async () => {
|
||||
await service.onApplicationBootstrap();
|
||||
|
||||
expect(writeFileMock).toHaveBeenCalledTimes(1);
|
||||
expect(writeFileMock).toHaveBeenCalledWith(
|
||||
'/var/local/emhttp/connectStatus.json',
|
||||
expect.stringContaining('CONNECTED')
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle event-driven status changes', async () => {
|
||||
await service.onApplicationBootstrap();
|
||||
writeFileMock.mockClear();
|
||||
|
||||
// The service uses @OnEvent decorator, so we need to call the method directly
|
||||
await service['writeStatus']();
|
||||
|
||||
expect(writeFileMock).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('write content', () => {
|
||||
it('should write correct JSON structure with all fields', async () => {
|
||||
const mockMetadata = {
|
||||
status: 'CONNECTED',
|
||||
error: 'Some error',
|
||||
lastPing: 1234567890,
|
||||
};
|
||||
|
||||
vi.mocked(configService.get).mockReturnValue(mockMetadata);
|
||||
|
||||
await service.onApplicationBootstrap();
|
||||
|
||||
const writeCall = writeFileMock.mock.calls[0];
|
||||
const writtenData = JSON.parse(writeCall[1] as string);
|
||||
|
||||
expect(writtenData).toMatchObject({
|
||||
connectionStatus: 'CONNECTED',
|
||||
error: 'Some error',
|
||||
lastPing: 1234567890,
|
||||
allowedOrigins: '',
|
||||
});
|
||||
expect(writtenData.timestamp).toBeDefined();
|
||||
expect(typeof writtenData.timestamp).toBe('number');
|
||||
});
|
||||
|
||||
it('should handle missing connection metadata', async () => {
|
||||
vi.mocked(configService.get).mockReturnValue(undefined);
|
||||
|
||||
await service.onApplicationBootstrap();
|
||||
|
||||
const writeCall = writeFileMock.mock.calls[0];
|
||||
const writtenData = JSON.parse(writeCall[1] as string);
|
||||
|
||||
expect(writtenData).toMatchObject({
|
||||
connectionStatus: 'PRE_INIT',
|
||||
error: null,
|
||||
lastPing: null,
|
||||
allowedOrigins: '',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('error handling', () => {
|
||||
it('should handle write errors gracefully', async () => {
|
||||
writeFileMock.mockRejectedValue(new Error('Write failed'));
|
||||
|
||||
await expect(service.onApplicationBootstrap()).resolves.not.toThrow();
|
||||
|
||||
// Test direct write error handling
|
||||
await expect(service['writeStatus']()).resolves.not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('cleanup on shutdown', () => {
|
||||
it('should delete status file on module destroy', async () => {
|
||||
await service.onModuleDestroy();
|
||||
|
||||
expect(unlinkMock).toHaveBeenCalledTimes(1);
|
||||
expect(unlinkMock).toHaveBeenCalledWith('/var/local/emhttp/connectStatus.json');
|
||||
});
|
||||
|
||||
it('should handle file deletion errors gracefully', async () => {
|
||||
unlinkMock.mockRejectedValue(new Error('File not found'));
|
||||
|
||||
await expect(service.onModuleDestroy()).resolves.not.toThrow();
|
||||
|
||||
expect(unlinkMock).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should ensure file is deleted even if it was never written', async () => {
|
||||
// Don't bootstrap (so no file is written)
|
||||
await service.onModuleDestroy();
|
||||
|
||||
expect(unlinkMock).toHaveBeenCalledTimes(1);
|
||||
expect(unlinkMock).toHaveBeenCalledWith('/var/local/emhttp/connectStatus.json');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,11 +1,14 @@
|
||||
import { Injectable, Logger, OnModuleInit } from '@nestjs/common';
|
||||
import { Injectable, Logger, OnApplicationBootstrap, OnModuleDestroy } from '@nestjs/common';
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import { OnEvent } from '@nestjs/event-emitter';
|
||||
import { unlink } from 'fs/promises';
|
||||
import { writeFile } from 'fs/promises';
|
||||
|
||||
import { ConnectionMetadata, ConfigType } from './connect.config.js';
|
||||
import { ConfigType, ConnectionMetadata } from '../config/connect.config.js';
|
||||
import { EVENTS } from '../helper/nest-tokens.js';
|
||||
|
||||
@Injectable()
|
||||
export class ConnectStatusWriterService implements OnModuleInit {
|
||||
export class ConnectStatusWriterService implements OnApplicationBootstrap, OnModuleDestroy {
|
||||
constructor(private readonly configService: ConfigService<ConfigType, true>) {}
|
||||
|
||||
private logger = new Logger(ConnectStatusWriterService.name);
|
||||
@@ -15,30 +18,27 @@ export class ConnectStatusWriterService implements OnModuleInit {
|
||||
return '/var/local/emhttp/connectStatus.json';
|
||||
}
|
||||
|
||||
async onModuleInit() {
|
||||
async onApplicationBootstrap() {
|
||||
this.logger.verbose(`Status file path: ${this.statusFilePath}`);
|
||||
|
||||
|
||||
// Write initial status
|
||||
await this.writeStatus();
|
||||
|
||||
// Listen for changes to connection status
|
||||
this.configService.changes$.subscribe({
|
||||
next: async (change) => {
|
||||
const connectionChanged = change.path && change.path.startsWith('connect.mothership');
|
||||
if (connectionChanged) {
|
||||
await this.writeStatus();
|
||||
}
|
||||
},
|
||||
error: (err) => {
|
||||
this.logger.error('Error receiving config changes:', err);
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async onModuleDestroy() {
|
||||
try {
|
||||
await unlink(this.statusFilePath);
|
||||
this.logger.verbose(`Status file deleted: ${this.statusFilePath}`);
|
||||
} catch (error) {
|
||||
this.logger.debug(`Could not delete status file: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
@OnEvent(EVENTS.MOTHERSHIP_CONNECTION_STATUS_CHANGED, { async: true })
|
||||
private async writeStatus() {
|
||||
try {
|
||||
const connectionMetadata = this.configService.get<ConnectionMetadata>('connect.mothership');
|
||||
|
||||
|
||||
// Try to get allowed origins from the store
|
||||
let allowedOrigins = '';
|
||||
try {
|
||||
@@ -48,22 +48,22 @@ export class ConnectStatusWriterService implements OnModuleInit {
|
||||
} catch (error) {
|
||||
this.logger.debug('Could not get allowed origins:', error);
|
||||
}
|
||||
|
||||
|
||||
const statusData = {
|
||||
connectionStatus: connectionMetadata?.status || 'PRE_INIT',
|
||||
error: connectionMetadata?.error || null,
|
||||
lastPing: connectionMetadata?.lastPing || null,
|
||||
allowedOrigins: allowedOrigins,
|
||||
timestamp: Date.now()
|
||||
timestamp: Date.now(),
|
||||
};
|
||||
|
||||
const data = JSON.stringify(statusData, null, 2);
|
||||
this.logger.verbose(`Writing connection status: ${data}`);
|
||||
|
||||
|
||||
await writeFile(this.statusFilePath, data);
|
||||
this.logger.verbose(`Status written to ${this.statusFilePath}`);
|
||||
} catch (error) {
|
||||
this.logger.error(error, `Error writing status to '${this.statusFilePath}'`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3,7 +3,6 @@ import { ConfigModule, ConfigService } from '@nestjs/config';
|
||||
|
||||
import { ConnectConfigPersister } from './config/config.persistence.js';
|
||||
import { configFeature } from './config/connect.config.js';
|
||||
import { ConnectStatusWriterService } from './config/connect-status-writer.service.js';
|
||||
import { MothershipModule } from './mothership-proxy/mothership.module.js';
|
||||
import { ConnectModule } from './unraid-connect/connect.module.js';
|
||||
|
||||
@@ -11,7 +10,7 @@ export const adapter = 'nestjs';
|
||||
|
||||
@Module({
|
||||
imports: [ConfigModule.forFeature(configFeature), ConnectModule, MothershipModule],
|
||||
providers: [ConnectConfigPersister, ConnectStatusWriterService],
|
||||
providers: [ConnectConfigPersister],
|
||||
exports: [],
|
||||
})
|
||||
class ConnectPluginModule {
|
||||
|
||||
@@ -130,11 +130,19 @@ export class MothershipConnectionService implements OnModuleInit, OnModuleDestro
|
||||
}
|
||||
|
||||
async onModuleInit() {
|
||||
// Crash on startup if these config values are not set initially
|
||||
// Warn on startup if these config values are not set initially
|
||||
const { unraidVersion, flashGuid, apiVersion } = this.configKeys;
|
||||
const warnings: string[] = [];
|
||||
[unraidVersion, flashGuid, apiVersion].forEach((key) => {
|
||||
this.configService.getOrThrow(key);
|
||||
try {
|
||||
this.configService.getOrThrow(key);
|
||||
} catch (error) {
|
||||
warnings.push(`${key} is not set`);
|
||||
}
|
||||
});
|
||||
if (warnings.length > 0) {
|
||||
this.logger.warn('Missing config values: %s', warnings.join(', '));
|
||||
}
|
||||
// Setup IDENTITY_CHANGED & METADATA_CHANGED events
|
||||
this.setupIdentitySubscription();
|
||||
this.setupMetadataChangedEvent();
|
||||
|
||||
@@ -32,7 +32,7 @@ export class MothershipHandler {
|
||||
const state = this.connectionService.getConnectionState();
|
||||
if (
|
||||
state &&
|
||||
[MinigraphStatus.PING_FAILURE, MinigraphStatus.ERROR_RETRYING].includes(state.status)
|
||||
[MinigraphStatus.PING_FAILURE].includes(state.status)
|
||||
) {
|
||||
this.logger.verbose(
|
||||
'Mothership connection status changed to %s; setting up mothership subscription',
|
||||
|
||||
@@ -3,18 +3,20 @@ import { Module } from '@nestjs/common';
|
||||
import { ConnectApiKeyService } from '../authn/connect-api-key.service.js';
|
||||
import { CloudResolver } from '../connection-status/cloud.resolver.js';
|
||||
import { CloudService } from '../connection-status/cloud.service.js';
|
||||
import { ConnectStatusWriterService } from '../connection-status/connect-status-writer.service.js';
|
||||
import { TimeoutCheckerJob } from '../connection-status/timeout-checker.job.js';
|
||||
import { InternalClientService } from '../internal-rpc/internal.client.js';
|
||||
import { RemoteAccessModule } from '../remote-access/remote-access.module.js';
|
||||
import { MothershipConnectionService } from './connection.service.js';
|
||||
import { MothershipGraphqlClientService } from './graphql.client.js';
|
||||
import { MothershipSubscriptionHandler } from './mothership-subscription.handler.js';
|
||||
import { MothershipHandler } from './mothership.events.js';
|
||||
import { MothershipController } from './mothership.controller.js';
|
||||
import { MothershipHandler } from './mothership.events.js';
|
||||
|
||||
@Module({
|
||||
imports: [RemoteAccessModule],
|
||||
providers: [
|
||||
ConnectStatusWriterService,
|
||||
ConnectApiKeyService,
|
||||
MothershipConnectionService,
|
||||
MothershipGraphqlClientService,
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
"commander": "14.0.0",
|
||||
"create-create-app": "7.3.0",
|
||||
"fs-extra": "11.3.0",
|
||||
"inquirer": "12.6.3",
|
||||
"inquirer": "12.7.0",
|
||||
"validate-npm-package-name": "6.0.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
@@ -25,7 +25,7 @@
|
||||
"@nestjs/graphql": "13.1.0",
|
||||
"@types/fs-extra": "11.0.4",
|
||||
"@types/inquirer": "9.0.8",
|
||||
"@types/node": "22.15.32",
|
||||
"@types/node": "22.16.4",
|
||||
"@types/validate-npm-package-name": "4.0.2",
|
||||
"class-transformer": "0.5.1",
|
||||
"class-validator": "0.14.2",
|
||||
|
||||
@@ -1,81 +1,25 @@
|
||||
import { Logger, Injectable, OnModuleInit } from "@nestjs/common";
|
||||
import { ConfigService } from "@nestjs/config";
|
||||
import { existsSync, readFileSync } from "fs";
|
||||
import { writeFile } from "fs/promises";
|
||||
import path from "path";
|
||||
import { bufferTime } from "rxjs/operators";
|
||||
import { Injectable } from "@nestjs/common";
|
||||
import { ConfigFilePersister } from "@unraid/shared/services/config-file.js"; // npm install @unraid/shared
|
||||
import { PluginNameConfig } from "./config.entity.js";
|
||||
import { ConfigService } from "@nestjs/config";
|
||||
|
||||
@Injectable()
|
||||
export class PluginNameConfigPersister implements OnModuleInit {
|
||||
constructor(private readonly configService: ConfigService) {}
|
||||
|
||||
private logger = new Logger(PluginNameConfigPersister.name);
|
||||
|
||||
/** the file path to the config file for this plugin */
|
||||
get configPath() {
|
||||
return path.join(
|
||||
this.configService.get("PATHS_CONFIG_MODULES")!,
|
||||
"plugin-name.json" // Use kebab-case for the filename
|
||||
);
|
||||
export class PluginNameConfigPersister extends ConfigFilePersister<PluginNameConfig> {
|
||||
constructor(configService: ConfigService) {
|
||||
super(configService);
|
||||
}
|
||||
|
||||
onModuleInit() {
|
||||
this.logger.debug(`Config path: ${this.configPath}`);
|
||||
// Load the config from the file if it exists, otherwise initialize it with defaults.
|
||||
if (existsSync(this.configPath)) {
|
||||
try {
|
||||
const configFromFile = JSON.parse(
|
||||
readFileSync(this.configPath, "utf8")
|
||||
);
|
||||
this.configService.set("plugin-name", configFromFile);
|
||||
this.logger.verbose(`Config loaded from ${this.configPath}`);
|
||||
} catch (error) {
|
||||
this.logger.error(
|
||||
`Error reading or parsing config file at ${this.configPath}. Using defaults.`,
|
||||
error
|
||||
);
|
||||
// If loading fails, ensure default config is set and persisted
|
||||
this.persist();
|
||||
}
|
||||
} else {
|
||||
this.logger.log(
|
||||
`Config file ${this.configPath} does not exist. Writing default config...`
|
||||
);
|
||||
// Persist the default configuration provided by configFeature
|
||||
this.persist();
|
||||
}
|
||||
|
||||
// Automatically persist changes to the config file after a short delay.
|
||||
this.configService.changes$.pipe(bufferTime(25)).subscribe({
|
||||
next: async (changes) => {
|
||||
const pluginNameConfigChanged = changes.some(({ path }) =>
|
||||
path.startsWith("plugin-name.")
|
||||
);
|
||||
if (pluginNameConfigChanged) {
|
||||
this.logger.verbose("Plugin config changed");
|
||||
await this.persist();
|
||||
}
|
||||
},
|
||||
error: (err) => {
|
||||
this.logger.error("Error receiving config changes:", err);
|
||||
},
|
||||
});
|
||||
fileName(): string {
|
||||
return "plugin-name.json"; // Use kebab-case for the filename
|
||||
}
|
||||
|
||||
async persist(
|
||||
config = this.configService.get<PluginNameConfig>("plugin-name")
|
||||
) {
|
||||
const data = JSON.stringify(config, null, 2);
|
||||
this.logger.verbose(`Persisting config to ${this.configPath}: ${data}`);
|
||||
try {
|
||||
await writeFile(this.configPath, data);
|
||||
this.logger.verbose(`Config change persisted to ${this.configPath}`);
|
||||
} catch (error) {
|
||||
this.logger.error(
|
||||
`Error persisting config to '${this.configPath}':`,
|
||||
error
|
||||
);
|
||||
}
|
||||
configKey(): string {
|
||||
return "plugin-name";
|
||||
}
|
||||
|
||||
defaultConfig(): PluginNameConfig {
|
||||
// Return the default configuration for your plugin
|
||||
// This should match the structure defined in your config.entity.ts
|
||||
return {} as PluginNameConfig;
|
||||
}
|
||||
}
|
||||
|
||||
9
packages/unraid-shared/justfile
Normal file
9
packages/unraid-shared/justfile
Normal file
@@ -0,0 +1,9 @@
|
||||
# Justfile for unraid-shared
|
||||
|
||||
# Default recipe to run when just is called without arguments
|
||||
default:
|
||||
@just --list
|
||||
|
||||
# Watch for changes in src files and run clean + build
|
||||
watch:
|
||||
watchexec -r -e ts,tsx -w src -- pnpm build
|
||||
@@ -31,9 +31,9 @@
|
||||
"@jsonforms/core": "3.6.0",
|
||||
"@nestjs/common": "11.1.3",
|
||||
"@nestjs/graphql": "13.1.0",
|
||||
"@types/bun": "1.2.16",
|
||||
"@types/bun": "1.2.18",
|
||||
"@types/lodash-es": "4.17.12",
|
||||
"@types/node": "22.15.32",
|
||||
"@types/node": "22.16.4",
|
||||
"class-validator": "0.14.2",
|
||||
"graphql": "16.11.0",
|
||||
"graphql-scalars": "1.24.2",
|
||||
@@ -47,11 +47,13 @@
|
||||
"@graphql-tools/utils": "10.8.6",
|
||||
"@jsonforms/core": "3.6.0",
|
||||
"@nestjs/common": "11.1.3",
|
||||
"@nestjs/config": "4.0.2",
|
||||
"@nestjs/graphql": "13.1.0",
|
||||
"class-validator": "0.14.2",
|
||||
"graphql": "16.11.0",
|
||||
"graphql-scalars": "1.24.2",
|
||||
"lodash-es": "4.17.21",
|
||||
"nest-authz": "2.17.0"
|
||||
"nest-authz": "2.17.0",
|
||||
"rxjs": "7.8.2"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,495 @@
|
||||
import { expect, test, describe, beforeEach, afterEach } from "bun:test";
|
||||
import { Subject } from "rxjs";
|
||||
import { readFile, writeFile, mkdir, rm } from "node:fs/promises";
|
||||
import { join } from "node:path";
|
||||
import { tmpdir } from "node:os";
|
||||
import { ConfigFilePersister } from "../config-file.js";
|
||||
|
||||
/**
|
||||
* TEST SCOPE: ConfigFilePersister NestJS Integration
|
||||
*
|
||||
* BEHAVIORS TESTED:
|
||||
* • NestJS lifecycle integration (OnModuleInit, OnModuleDestroy)
|
||||
* • Reactive config change subscription with 25ms buffering
|
||||
* • ConfigService integration for path resolution and config storage
|
||||
* • Automatic config loading with migration priority over defaults
|
||||
* • Config change detection and selective persistence (matching configKey only)
|
||||
* • Graceful error handling for all failure scenarios
|
||||
* • Flash drive optimization through change detection
|
||||
* • Standalone file access via getFileHandler() delegation
|
||||
* • Proper cleanup of subscriptions and final state persistence
|
||||
*
|
||||
* INTEGRATION SCENARIOS:
|
||||
* ✓ Module initialization with existing/missing/invalid config files
|
||||
* ✓ Reactive config change processing with proper filtering
|
||||
* ✓ Module destruction with subscription cleanup and final persistence
|
||||
* ✓ Error resilience (file system errors, validation failures, service errors)
|
||||
* ✓ Migration vs defaults priority during initialization
|
||||
* ✓ Full application lifecycle from startup to shutdown
|
||||
*
|
||||
* COVERAGE FOCUS:
|
||||
* • NestJS framework integration correctness
|
||||
* • Reactive configuration management
|
||||
* • Production-like error scenarios
|
||||
* • Memory leak prevention (subscription management)
|
||||
* • Data persistence guarantees during shutdown
|
||||
*
|
||||
* NOT TESTED (covered in other files):
|
||||
* • Low-level file operations (ConfigFileHandler)
|
||||
* • Abstract class behavior (ConfigDefinition)
|
||||
*/
|
||||
|
||||
interface TestConfig {
|
||||
name: string;
|
||||
version: number;
|
||||
enabled: boolean;
|
||||
settings: {
|
||||
timeout: number;
|
||||
retries: number;
|
||||
};
|
||||
}
|
||||
|
||||
class TestConfigFilePersister extends ConfigFilePersister<TestConfig> {
|
||||
constructor(configService: any) {
|
||||
super(configService);
|
||||
}
|
||||
|
||||
fileName(): string {
|
||||
return "test-config.json";
|
||||
}
|
||||
|
||||
configKey(): string {
|
||||
return "testConfig";
|
||||
}
|
||||
|
||||
defaultConfig(): TestConfig {
|
||||
return {
|
||||
name: "test",
|
||||
version: 1,
|
||||
enabled: false,
|
||||
settings: {
|
||||
timeout: 5000,
|
||||
retries: 3,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
async validate(config: object): Promise<TestConfig> {
|
||||
const testConfig = config as TestConfig;
|
||||
if (testConfig.version < 1) {
|
||||
throw new Error("Invalid version: must be >= 1");
|
||||
}
|
||||
if (testConfig.settings.timeout < 1000) {
|
||||
throw new Error("Invalid timeout: must be >= 1000");
|
||||
}
|
||||
return testConfig;
|
||||
}
|
||||
|
||||
async migrateConfig(): Promise<TestConfig> {
|
||||
return {
|
||||
name: "migrated",
|
||||
version: 2,
|
||||
enabled: true,
|
||||
settings: {
|
||||
timeout: 3000,
|
||||
retries: 5,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
describe("ConfigFilePersister Integration Tests", () => {
|
||||
let configService: any;
|
||||
let persister: TestConfigFilePersister;
|
||||
let testDir: string;
|
||||
let configPath: string;
|
||||
let changesSubject: Subject<any>;
|
||||
let configStore: Record<string, any>;
|
||||
|
||||
beforeEach(async () => {
|
||||
// Setup test directory
|
||||
testDir = join(tmpdir(), `config-test-${Date.now()}`);
|
||||
await mkdir(testDir, { recursive: true });
|
||||
configPath = join(testDir, "test-config.json");
|
||||
|
||||
// Setup config store
|
||||
configStore = {};
|
||||
|
||||
// Setup rxjs subject for config changes
|
||||
changesSubject = new Subject();
|
||||
|
||||
// Mock ConfigService
|
||||
configService = {
|
||||
get: (key: string) => configStore[key],
|
||||
set: (key: string, value: any) => {
|
||||
configStore[key] = value;
|
||||
},
|
||||
getOrThrow: (key: string) => {
|
||||
if (key === "PATHS_CONFIG_MODULES") return testDir;
|
||||
throw new Error(`Config key ${key} not found`);
|
||||
},
|
||||
changes$: changesSubject.asObservable(),
|
||||
};
|
||||
|
||||
persister = new TestConfigFilePersister(configService);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
// Proper cleanup
|
||||
changesSubject.complete();
|
||||
await persister.onModuleDestroy?.();
|
||||
await rm(testDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
test("configPath returns correct path", () => {
|
||||
expect(persister.configPath()).toBe(configPath);
|
||||
});
|
||||
|
||||
test("loads existing config from file", async () => {
|
||||
const existingConfig = {
|
||||
name: "existing",
|
||||
version: 2,
|
||||
enabled: true,
|
||||
settings: {
|
||||
timeout: 3000,
|
||||
retries: 5,
|
||||
},
|
||||
};
|
||||
await writeFile(configPath, JSON.stringify(existingConfig, null, 2));
|
||||
|
||||
await persister.onModuleInit();
|
||||
|
||||
// Should load existing config
|
||||
expect(configStore.testConfig).toEqual(existingConfig);
|
||||
});
|
||||
|
||||
test("handles invalid config by attempting migration", async () => {
|
||||
const invalidConfig = {
|
||||
name: "invalid",
|
||||
version: 0, // Invalid version
|
||||
enabled: true,
|
||||
settings: {
|
||||
timeout: 500, // Invalid timeout
|
||||
retries: 5,
|
||||
},
|
||||
};
|
||||
await writeFile(configPath, JSON.stringify(invalidConfig, null, 2));
|
||||
|
||||
await persister.onModuleInit();
|
||||
|
||||
// Should call migrate and set migrated config
|
||||
expect(configStore.testConfig).toEqual({
|
||||
name: "migrated",
|
||||
version: 2,
|
||||
enabled: true,
|
||||
settings: {
|
||||
timeout: 3000,
|
||||
retries: 5,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test("persists config to file", async () => {
|
||||
const config = {
|
||||
name: "persist-test",
|
||||
version: 2,
|
||||
enabled: true,
|
||||
settings: {
|
||||
timeout: 4000,
|
||||
retries: 4,
|
||||
},
|
||||
};
|
||||
|
||||
const result = await persister.persist(config);
|
||||
|
||||
expect(result).toBe(true);
|
||||
const fileContent = await readFile(configPath, "utf8");
|
||||
const parsedConfig = JSON.parse(fileContent);
|
||||
expect(parsedConfig).toEqual(config);
|
||||
});
|
||||
|
||||
test("skips persistence when config is unchanged", async () => {
|
||||
const config = {
|
||||
name: "unchanged",
|
||||
version: 1,
|
||||
enabled: false,
|
||||
settings: {
|
||||
timeout: 5000,
|
||||
retries: 3,
|
||||
},
|
||||
};
|
||||
|
||||
// Write initial config
|
||||
await writeFile(configPath, JSON.stringify(config, null, 2));
|
||||
|
||||
const result = await persister.persist(config);
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
test("loads and validates config from file", async () => {
|
||||
const config = {
|
||||
name: "file-test",
|
||||
version: 3,
|
||||
enabled: true,
|
||||
settings: {
|
||||
timeout: 2000,
|
||||
retries: 1,
|
||||
},
|
||||
};
|
||||
await writeFile(configPath, JSON.stringify(config));
|
||||
|
||||
const result = await persister.getFileHandler().readConfigFile();
|
||||
|
||||
expect(result).toEqual(config);
|
||||
});
|
||||
|
||||
test("throws error when file doesn't exist", async () => {
|
||||
await expect(persister.getFileHandler().readConfigFile()).rejects.toThrow(
|
||||
"Config file does not exist"
|
||||
);
|
||||
});
|
||||
|
||||
test("throws error when file contains invalid JSON", async () => {
|
||||
await writeFile(configPath, "{ invalid json");
|
||||
|
||||
await expect(persister.getFileHandler().readConfigFile()).rejects.toThrow();
|
||||
});
|
||||
|
||||
test("throws error when config is invalid", async () => {
|
||||
const invalidConfig = {
|
||||
name: "invalid",
|
||||
version: -1,
|
||||
enabled: true,
|
||||
settings: {
|
||||
timeout: 100,
|
||||
retries: 1,
|
||||
},
|
||||
};
|
||||
await writeFile(configPath, JSON.stringify(invalidConfig));
|
||||
|
||||
await expect(persister.getFileHandler().readConfigFile()).rejects.toThrow(
|
||||
"Invalid version"
|
||||
);
|
||||
});
|
||||
|
||||
test("base class migration throws not implemented error", async () => {
|
||||
const basePersister = new (class extends ConfigFilePersister<TestConfig> {
|
||||
fileName() {
|
||||
return "base-test.json";
|
||||
}
|
||||
configKey() {
|
||||
return "baseTest";
|
||||
}
|
||||
defaultConfig() {
|
||||
return persister.defaultConfig();
|
||||
}
|
||||
})(configService);
|
||||
|
||||
await expect(basePersister.migrateConfig()).rejects.toThrow(
|
||||
"Not implemented"
|
||||
);
|
||||
});
|
||||
|
||||
test("unsubscribes from config changes and persists final state", async () => {
|
||||
await persister.onModuleInit();
|
||||
|
||||
// Setup final config state
|
||||
configStore["testConfig"] = {
|
||||
name: "final",
|
||||
version: 4,
|
||||
enabled: false,
|
||||
settings: {
|
||||
timeout: 1000,
|
||||
retries: 10,
|
||||
},
|
||||
};
|
||||
|
||||
await persister.onModuleDestroy();
|
||||
|
||||
// Should persist final state
|
||||
const fileContent = await readFile(configPath, "utf8");
|
||||
const parsedConfig = JSON.parse(fileContent);
|
||||
expect(parsedConfig.name).toBe("final");
|
||||
});
|
||||
|
||||
test("handles destroy when not initialized", async () => {
|
||||
// Should not throw error
|
||||
await expect(persister.onModuleDestroy()).resolves.toBeUndefined();
|
||||
});
|
||||
|
||||
test("config change subscription is properly set up", async () => {
|
||||
// Pre-create config file to avoid migration
|
||||
const initialConfig = persister.defaultConfig();
|
||||
await writeFile(configPath, JSON.stringify(initialConfig, null, 2));
|
||||
|
||||
await persister.onModuleInit();
|
||||
|
||||
// Verify that the config observer is active by checking internal state
|
||||
// This tests that the subscription was created without relying on timing
|
||||
expect((persister as any).configObserver).toBeDefined();
|
||||
expect((persister as any).configObserver.closed).toBe(false);
|
||||
|
||||
// Test that non-matching changes are ignored (synchronous test)
|
||||
configStore["testConfig"] = persister.defaultConfig();
|
||||
const initialFileContent = await readFile(configPath, "utf8");
|
||||
|
||||
// Emit a non-matching config change
|
||||
changesSubject.next([{ path: "otherConfig.setting" }]);
|
||||
|
||||
// Wait briefly to ensure no processing occurs
|
||||
await new Promise((resolve) => setTimeout(resolve, 30));
|
||||
|
||||
// File should remain unchanged
|
||||
const afterFileContent = await readFile(configPath, "utf8");
|
||||
expect(afterFileContent).toBe(initialFileContent);
|
||||
});
|
||||
|
||||
test("ignores non-matching config changes", async () => {
|
||||
// Pre-create config file
|
||||
const initialConfig = persister.defaultConfig();
|
||||
await writeFile(configPath, JSON.stringify(initialConfig, null, 2));
|
||||
|
||||
await persister.onModuleInit();
|
||||
|
||||
// Set initial config and write to file
|
||||
configStore["testConfig"] = persister.defaultConfig();
|
||||
|
||||
// Get initial modification time
|
||||
const stats1 = await import("fs/promises").then((fs) =>
|
||||
fs.stat(configPath)
|
||||
);
|
||||
|
||||
// Wait a bit to ensure timestamp difference
|
||||
await new Promise((resolve) => setTimeout(resolve, 10));
|
||||
|
||||
// Emit change for different config key
|
||||
changesSubject.next([{ path: "otherConfig.setting" }]);
|
||||
|
||||
// Wait for buffer time
|
||||
await new Promise((resolve) => setTimeout(resolve, 50));
|
||||
|
||||
// File should remain unchanged (same modification time)
|
||||
const stats2 = await import("fs/promises").then((fs) =>
|
||||
fs.stat(configPath)
|
||||
);
|
||||
expect(stats2.mtime).toEqual(stats1.mtime);
|
||||
});
|
||||
|
||||
test("handles config service errors gracefully", async () => {
|
||||
// Mock config service to throw error on get
|
||||
const errorConfigService = {
|
||||
...configService,
|
||||
get: () => {
|
||||
throw new Error("Config service error");
|
||||
},
|
||||
};
|
||||
|
||||
const errorPersister = new TestConfigFilePersister(errorConfigService);
|
||||
|
||||
// Should still initialize (migration will be called due to no file)
|
||||
await errorPersister.onModuleInit();
|
||||
|
||||
// Should have migrated config since get failed
|
||||
const expectedMigrated = await errorPersister.migrateConfig();
|
||||
expect(configStore.testConfig).toEqual(expectedMigrated);
|
||||
});
|
||||
|
||||
test("handles persistence errors gracefully", async () => {
|
||||
await persister.onModuleInit();
|
||||
|
||||
// Create a persister that points to invalid directory
|
||||
const invalidPersister = new TestConfigFilePersister({
|
||||
...configService,
|
||||
getOrThrow: (key: string) => {
|
||||
if (key === "PATHS_CONFIG_MODULES")
|
||||
return "/invalid/path/that/does/not/exist";
|
||||
throw new Error(`Config key ${key} not found`);
|
||||
},
|
||||
});
|
||||
|
||||
const config = { ...persister.defaultConfig(), name: "error-test" };
|
||||
|
||||
// Should not throw despite write error
|
||||
const result = await invalidPersister.persist(config);
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
test("migration priority over defaults when file doesn't exist", async () => {
|
||||
// No file exists, should trigger migration path
|
||||
await persister.onModuleInit();
|
||||
|
||||
// ConfigFilePersister prioritizes migration over defaults when file doesn't exist
|
||||
expect(configStore.testConfig).toEqual({
|
||||
name: "migrated",
|
||||
version: 2,
|
||||
enabled: true,
|
||||
settings: {
|
||||
timeout: 3000,
|
||||
retries: 5,
|
||||
},
|
||||
});
|
||||
|
||||
// Should persist migrated config to file
|
||||
const fileContent = await readFile(configPath, "utf8");
|
||||
const parsedConfig = JSON.parse(fileContent);
|
||||
expect(parsedConfig).toEqual({
|
||||
name: "migrated",
|
||||
version: 2,
|
||||
enabled: true,
|
||||
settings: {
|
||||
timeout: 3000,
|
||||
retries: 5,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test("full lifecycle integration", async () => {
|
||||
// Initialize - will use migration since no file exists
|
||||
await persister.onModuleInit();
|
||||
|
||||
// Verify initial state (migrated, not defaults)
|
||||
expect(configStore.testConfig).toEqual({
|
||||
name: "migrated",
|
||||
version: 2,
|
||||
enabled: true,
|
||||
settings: {
|
||||
timeout: 3000,
|
||||
retries: 5,
|
||||
},
|
||||
});
|
||||
|
||||
// Simulate config change
|
||||
configStore["testConfig"] = {
|
||||
name: "lifecycle-test",
|
||||
version: 5,
|
||||
enabled: true,
|
||||
settings: {
|
||||
timeout: 1500,
|
||||
retries: 7,
|
||||
},
|
||||
};
|
||||
|
||||
// Trigger change notification
|
||||
changesSubject.next([{ path: "testConfig.enabled" }]);
|
||||
|
||||
// Wait for persistence
|
||||
await new Promise((resolve) => setTimeout(resolve, 50));
|
||||
|
||||
// Cleanup
|
||||
await persister.onModuleDestroy();
|
||||
|
||||
// Verify final persisted state
|
||||
const fileContent = await readFile(configPath, "utf8");
|
||||
const parsedConfig = JSON.parse(fileContent);
|
||||
expect(parsedConfig).toEqual({
|
||||
name: "lifecycle-test",
|
||||
version: 5,
|
||||
enabled: true,
|
||||
settings: {
|
||||
timeout: 1500,
|
||||
retries: 7,
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
147
packages/unraid-shared/src/services/config-file.ts
Normal file
147
packages/unraid-shared/src/services/config-file.ts
Normal file
@@ -0,0 +1,147 @@
|
||||
import {
|
||||
Logger,
|
||||
type OnModuleDestroy,
|
||||
type OnModuleInit,
|
||||
} from "@nestjs/common";
|
||||
import type { ConfigService } from "@nestjs/config";
|
||||
import path from "node:path";
|
||||
|
||||
import { bufferTime } from "rxjs/operators";
|
||||
import type { Subscription } from "rxjs";
|
||||
import { ConfigFileHandler } from "../util/config-file-handler.js";
|
||||
import { ConfigDefinition } from "../util/config-definition.js";
|
||||
|
||||
/**
|
||||
* Abstract base class for persisting configuration objects to JSON files.
|
||||
*
|
||||
* Provides NestJS integration with reactive config updates, standalone file operations,
|
||||
* and lifecycle management with automatic persistence.
|
||||
*
|
||||
* @template T The configuration object type that extends object
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* @Injectable()
|
||||
* class MyConfigPersister extends ConfigFilePersister<MyConfig> {
|
||||
* constructor(configService: ConfigService) {
|
||||
* super(configService);
|
||||
* }
|
||||
*
|
||||
* fileName() { return "my-config.json"; }
|
||||
* configKey() { return "myConfig"; }
|
||||
* defaultConfig(): MyConfig {
|
||||
* return { enabled: false, timeout: 5000 };
|
||||
* }
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
export abstract class ConfigFilePersister<T extends object>
|
||||
extends ConfigDefinition<T>
|
||||
implements OnModuleInit, OnModuleDestroy
|
||||
{
|
||||
private configObserver?: Subscription;
|
||||
private fileHandler: ConfigFileHandler<T>;
|
||||
|
||||
/**
|
||||
* Creates a new ConfigFilePersister instance.
|
||||
*
|
||||
* @param configService The NestJS ConfigService instance for reactive config management
|
||||
*/
|
||||
constructor(protected readonly configService: ConfigService) {
|
||||
super();
|
||||
this.logger = new Logger(`ConfigFilePersister:${this.fileName()}`);
|
||||
this.fileHandler = new ConfigFileHandler(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the configuration key used in the ConfigService.
|
||||
*
|
||||
* This key is used to:
|
||||
* - Store/retrieve config from the ConfigService
|
||||
* - Filter config change events to only process relevant changes
|
||||
* - Namespace configuration to avoid conflicts
|
||||
*
|
||||
* @returns The config key string (e.g., "userPreferences", "apiSettings")
|
||||
* @example "myModuleConfig"
|
||||
*/
|
||||
abstract configKey(): string;
|
||||
|
||||
/**
|
||||
* Returns the absolute path to the configuration file.
|
||||
* Combines `PATHS_CONFIG_MODULES` environment variable with the filename.
|
||||
*
|
||||
* @throws Error if `PATHS_CONFIG_MODULES` environment variable is not set
|
||||
*/
|
||||
configPath(): string {
|
||||
return path.join(
|
||||
this.configService.getOrThrow("PATHS_CONFIG_MODULES"),
|
||||
this.fileName()
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a standalone ConfigFileHandler for direct file operations outside NestJS.
|
||||
*/
|
||||
getFileHandler(): ConfigFileHandler<T> {
|
||||
return this.fileHandler;
|
||||
}
|
||||
|
||||
/**
|
||||
* NestJS lifecycle hook for cleanup.
|
||||
* Unsubscribes from config changes and persists final state.
|
||||
*/
|
||||
async onModuleDestroy() {
|
||||
this.configObserver?.unsubscribe();
|
||||
await this.persist();
|
||||
}
|
||||
|
||||
/**
|
||||
* NestJS lifecycle hook for initialization.
|
||||
* Loads config from disk and sets up reactive change subscription.
|
||||
*/
|
||||
async onModuleInit() {
|
||||
this.logger.verbose(`Config path: ${this.configPath()}`);
|
||||
await this.loadOrMigrateConfig();
|
||||
|
||||
this.configObserver = this.configService.changes$
|
||||
.pipe(bufferTime(25))
|
||||
.subscribe({
|
||||
next: async (changes) => {
|
||||
const configChanged = changes.some(({ path }) =>
|
||||
path?.startsWith(this.configKey())
|
||||
);
|
||||
if (configChanged) {
|
||||
await this.persist();
|
||||
}
|
||||
},
|
||||
error: (err) => {
|
||||
this.logger.error("Error receiving config changes:", err);
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Persists configuration to disk with change detection optimization.
|
||||
*
|
||||
* @param config - The config object to persist (defaults to current config from service)
|
||||
* @returns `true` if persisted to disk, `false` if skipped or failed
|
||||
*/
|
||||
async persist(
|
||||
config = this.configService.get(this.configKey())
|
||||
): Promise<boolean> {
|
||||
if (!config) {
|
||||
this.logger.warn(`Cannot persist undefined config`);
|
||||
return false;
|
||||
}
|
||||
return await this.fileHandler.writeConfigFile(config);
|
||||
}
|
||||
|
||||
/**
|
||||
* Load or migrate configuration and set it in ConfigService.
|
||||
*/
|
||||
private async loadOrMigrateConfig() {
|
||||
const config = await this.fileHandler.loadConfig();
|
||||
this.configService.set(this.configKey(), config);
|
||||
return this.persist(config);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,192 @@
|
||||
import { expect, test, describe, beforeEach } from "bun:test";
|
||||
import { join } from "node:path";
|
||||
import { tmpdir } from "node:os";
|
||||
import { ConfigDefinition } from "../config-definition.js";
|
||||
|
||||
/**
|
||||
* TEST SCOPE: ConfigDefinition Abstract Base Class
|
||||
*
|
||||
* BEHAVIORS TESTED:
|
||||
* • Core abstract method implementations (fileName, configPath, defaultConfig)
|
||||
* • Default validation behavior (passthrough without transformation)
|
||||
* • Custom validation with data transformation and error throwing
|
||||
* • Default migration behavior (throws "Not implemented" error)
|
||||
* • Custom migration implementation with success and failure scenarios
|
||||
* • Error propagation from validation and migration methods
|
||||
*
|
||||
* COVERAGE FOCUS:
|
||||
* • Abstract class contract enforcement
|
||||
* • Extension point behavior (validate, migrate methods)
|
||||
* • Error handling patterns for implementors
|
||||
* • Type safety and configuration structure validation
|
||||
*
|
||||
* NOT TESTED (covered in other files):
|
||||
* • File I/O operations (ConfigFileHandler)
|
||||
* • NestJS integration (ConfigFilePersister)
|
||||
* • Reactive config changes
|
||||
*/
|
||||
|
||||
interface TestConfig {
|
||||
name: string;
|
||||
version: number;
|
||||
enabled: boolean;
|
||||
timeout: number;
|
||||
}
|
||||
|
||||
class TestConfigDefinition extends ConfigDefinition<TestConfig> {
|
||||
constructor(private configDir: string, loggerName?: string) {
|
||||
super(loggerName);
|
||||
}
|
||||
|
||||
fileName(): string {
|
||||
return "test-config.json";
|
||||
}
|
||||
|
||||
configPath(): string {
|
||||
return join(this.configDir, this.fileName());
|
||||
}
|
||||
|
||||
defaultConfig(): TestConfig {
|
||||
return {
|
||||
name: "test",
|
||||
version: 1,
|
||||
enabled: false,
|
||||
timeout: 5000,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
class ValidatingConfigDefinition extends TestConfigDefinition {
|
||||
async validate(config: object): Promise<TestConfig> {
|
||||
const testConfig = config as TestConfig;
|
||||
|
||||
if (typeof testConfig.name !== "string" || testConfig.name.length === 0) {
|
||||
throw new Error("Name must be a non-empty string");
|
||||
}
|
||||
|
||||
if (typeof testConfig.version !== "number" || testConfig.version < 1) {
|
||||
throw new Error("Version must be a number >= 1");
|
||||
}
|
||||
|
||||
if (typeof testConfig.timeout !== "number" || testConfig.timeout < 1000) {
|
||||
throw new Error("Timeout must be a number >= 1000");
|
||||
}
|
||||
|
||||
// Test data transformation
|
||||
return {
|
||||
...testConfig,
|
||||
name: testConfig.name.trim(),
|
||||
timeout: Math.max(testConfig.timeout, 1000),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
class MigratingConfigDefinition extends TestConfigDefinition {
|
||||
public migrationShouldFail = false;
|
||||
public migrationCallCount = 0;
|
||||
|
||||
async migrateConfig(): Promise<TestConfig> {
|
||||
this.migrationCallCount++;
|
||||
|
||||
if (this.migrationShouldFail) {
|
||||
throw new Error("Migration failed");
|
||||
}
|
||||
|
||||
return {
|
||||
name: "migrated",
|
||||
version: 2,
|
||||
enabled: true,
|
||||
timeout: 3000,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
describe("ConfigDefinition", () => {
|
||||
let testDir: string;
|
||||
let configDefinition: TestConfigDefinition;
|
||||
|
||||
beforeEach(() => {
|
||||
testDir = join(tmpdir(), `config-def-test-${Date.now()}`);
|
||||
configDefinition = new TestConfigDefinition(testDir);
|
||||
});
|
||||
|
||||
describe("Core Functionality", () => {
|
||||
test("abstract methods are implemented correctly", () => {
|
||||
expect(configDefinition.fileName()).toBe("test-config.json");
|
||||
expect(configDefinition.configPath()).toBe(
|
||||
join(testDir, "test-config.json")
|
||||
);
|
||||
expect(configDefinition.defaultConfig()).toEqual({
|
||||
name: "test",
|
||||
version: 1,
|
||||
enabled: false,
|
||||
timeout: 5000,
|
||||
});
|
||||
});
|
||||
|
||||
test("default validation is passthrough", async () => {
|
||||
const config = { name: "test", version: 2, enabled: true, timeout: 3000 };
|
||||
const result = await configDefinition.validate(config);
|
||||
expect(result).toEqual(config);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Validation Behavior", () => {
|
||||
test("validation can transform and validate config", async () => {
|
||||
const validatingDefinition = new ValidatingConfigDefinition(testDir);
|
||||
const config = {
|
||||
name: " test-name ", // Should be trimmed
|
||||
version: 2,
|
||||
enabled: true,
|
||||
timeout: 1500,
|
||||
};
|
||||
|
||||
const result = await validatingDefinition.validate(config);
|
||||
expect(result.name).toBe("test-name"); // Trimmed
|
||||
expect(result.timeout).toBe(1500);
|
||||
});
|
||||
|
||||
test("validation errors are thrown for invalid configs", async () => {
|
||||
const validatingDefinition = new ValidatingConfigDefinition(testDir);
|
||||
const invalidConfig = {
|
||||
name: "",
|
||||
version: 0,
|
||||
enabled: false,
|
||||
timeout: 500,
|
||||
};
|
||||
|
||||
await expect(
|
||||
validatingDefinition.validate(invalidConfig)
|
||||
).rejects.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe("Migration Behavior", () => {
|
||||
test("default migration throws not implemented error", async () => {
|
||||
await expect(configDefinition.migrateConfig()).rejects.toThrow(
|
||||
"Not implemented"
|
||||
);
|
||||
});
|
||||
|
||||
test("custom migration works when implemented", async () => {
|
||||
const migratingDefinition = new MigratingConfigDefinition(testDir);
|
||||
const result = await migratingDefinition.migrateConfig();
|
||||
|
||||
expect(result).toEqual({
|
||||
name: "migrated",
|
||||
version: 2,
|
||||
enabled: true,
|
||||
timeout: 3000,
|
||||
});
|
||||
});
|
||||
|
||||
test("migration failures are propagated as errors", async () => {
|
||||
const migratingDefinition = new MigratingConfigDefinition(testDir);
|
||||
migratingDefinition.migrationShouldFail = true;
|
||||
|
||||
await expect(migratingDefinition.migrateConfig()).rejects.toThrow(
|
||||
"Migration failed"
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,466 @@
|
||||
import { expect, test, describe, beforeEach, afterEach } from "bun:test";
|
||||
import { readFile, writeFile, mkdir, rm } from "node:fs/promises";
|
||||
import { join } from "node:path";
|
||||
import { tmpdir } from "node:os";
|
||||
import { ConfigFileHandler } from "../config-file-handler.js";
|
||||
import { ConfigDefinition } from "../config-definition.js";
|
||||
|
||||
/**
|
||||
* TEST SCOPE: ConfigFileHandler Standalone File Operations
|
||||
*
|
||||
* BEHAVIORS TESTED:
|
||||
* • Configuration loading with error recovery cascade:
|
||||
* - File exists & valid → load directly
|
||||
* - File read fails → attempt migration → fallback to defaults
|
||||
* - File valid but merged config fails validation → attempt migration
|
||||
* - Migration succeeds but merged result fails validation → fallback to defaults
|
||||
* - Migration fails → fallback to defaults
|
||||
* • File I/O operations (read, write) with validation
|
||||
* • Flash drive optimization (skip writes when config unchanged)
|
||||
* • Partial config updates with deep merging
|
||||
* • Error resilience (invalid JSON, validation failures, file system errors)
|
||||
* • End-to-end workflows (load → update → reload cycles)
|
||||
*
|
||||
* CRITICAL ERROR RECOVERY PATHS:
|
||||
* ✓ read failed → migration failed → defaults written
|
||||
* ✓ read failed → migration succeeded but combo validation failed → defaults written
|
||||
* ✓ read succeeded but merged validation failed → migration → recovery
|
||||
*
|
||||
* COVERAGE FOCUS:
|
||||
* • Data integrity during all error scenarios
|
||||
* • Performance optimization (change detection)
|
||||
* • Configuration persistence reliability
|
||||
* • Validation error handling at all stages
|
||||
*
|
||||
* NOT TESTED (covered in other files):
|
||||
* • NestJS integration and reactive changes (ConfigFilePersister)
|
||||
* • Abstract class behavior (ConfigDefinition)
|
||||
*/
|
||||
|
||||
interface TestConfig {
|
||||
name: string;
|
||||
version: number;
|
||||
enabled: boolean;
|
||||
timeout: number;
|
||||
maxRetries?: number; // Optional field for testing merge validation
|
||||
}
|
||||
|
||||
class TestConfigDefinition extends ConfigDefinition<TestConfig> {
|
||||
public migrationCallCount = 0;
|
||||
public migrationShouldFail = false;
|
||||
public validationShouldFail = false;
|
||||
public mergeValidationShouldFail = false; // New flag for the edge case
|
||||
|
||||
constructor(private configDir: string) {
|
||||
super("TestConfigDefinition");
|
||||
}
|
||||
|
||||
fileName(): string {
|
||||
return "test-config.json";
|
||||
}
|
||||
|
||||
configPath(): string {
|
||||
return join(this.configDir, this.fileName());
|
||||
}
|
||||
|
||||
defaultConfig(): TestConfig {
|
||||
return {
|
||||
name: "test",
|
||||
version: 1,
|
||||
enabled: false,
|
||||
timeout: 5000,
|
||||
maxRetries: 3, // Default includes maxRetries
|
||||
};
|
||||
}
|
||||
|
||||
async validate(config: object): Promise<TestConfig> {
|
||||
if (this.validationShouldFail) {
|
||||
throw new Error("Validation failed");
|
||||
}
|
||||
|
||||
const testConfig = config as TestConfig;
|
||||
|
||||
// Basic validation
|
||||
if (typeof testConfig.version !== "number" || testConfig.version < 1) {
|
||||
throw new Error("Invalid version: must be >= 1");
|
||||
}
|
||||
|
||||
if (typeof testConfig.timeout !== "number" || testConfig.timeout < 1000) {
|
||||
throw new Error("Invalid timeout: must be >= 1000");
|
||||
}
|
||||
|
||||
// Critical edge case: maxRetries validation that could fail after merge
|
||||
if (testConfig.maxRetries !== undefined && testConfig.maxRetries < 0) {
|
||||
throw new Error("Invalid maxRetries: must be >= 0");
|
||||
}
|
||||
|
||||
// Simulate a validation that fails specifically for merged configs
|
||||
if (this.mergeValidationShouldFail && testConfig.maxRetries === -1) {
|
||||
throw new Error("Merged validation failed: maxRetries cannot be -1");
|
||||
}
|
||||
|
||||
return testConfig;
|
||||
}
|
||||
|
||||
async migrateConfig(): Promise<TestConfig> {
|
||||
this.migrationCallCount++;
|
||||
|
||||
if (this.migrationShouldFail) {
|
||||
throw new Error("Migration failed");
|
||||
}
|
||||
|
||||
return {
|
||||
name: "migrated",
|
||||
version: 2,
|
||||
enabled: true,
|
||||
timeout: 3000,
|
||||
maxRetries: 5,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
describe("ConfigFileHandler", () => {
|
||||
let testDir: string;
|
||||
let configPath: string;
|
||||
let configDefinition: TestConfigDefinition;
|
||||
let fileHandler: ConfigFileHandler<TestConfig>;
|
||||
|
||||
beforeEach(async () => {
|
||||
testDir = join(tmpdir(), `config-handler-test-${Date.now()}`);
|
||||
await mkdir(testDir, { recursive: true });
|
||||
configPath = join(testDir, "test-config.json");
|
||||
|
||||
configDefinition = new TestConfigDefinition(testDir);
|
||||
fileHandler = new ConfigFileHandler(configDefinition);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await rm(testDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
describe("Critical loadConfig Scenarios", () => {
|
||||
test("loads valid config from file successfully", async () => {
|
||||
const validConfig = {
|
||||
name: "existing",
|
||||
version: 2,
|
||||
enabled: true,
|
||||
timeout: 3000,
|
||||
maxRetries: 2,
|
||||
};
|
||||
await writeFile(configPath, JSON.stringify(validConfig));
|
||||
|
||||
const result = await fileHandler.loadConfig();
|
||||
expect(result.name).toBe("existing");
|
||||
expect(result.version).toBe(2);
|
||||
expect(result.maxRetries).toBe(2);
|
||||
});
|
||||
|
||||
test("falls back to migration when file doesn't exist", async () => {
|
||||
const result = await fileHandler.loadConfig();
|
||||
|
||||
expect(configDefinition.migrationCallCount).toBe(1);
|
||||
expect(result.name).toBe("migrated");
|
||||
expect(result.version).toBe(2);
|
||||
|
||||
// Should persist migrated config
|
||||
const persistedContent = await readFile(configPath, "utf8");
|
||||
const persistedConfig = JSON.parse(persistedContent);
|
||||
expect(persistedConfig.name).toBe("migrated");
|
||||
});
|
||||
|
||||
test("falls back to defaults when migration fails", async () => {
|
||||
configDefinition.migrationShouldFail = true;
|
||||
|
||||
const result = await fileHandler.loadConfig();
|
||||
|
||||
expect(result.name).toBe("test"); // From defaults
|
||||
expect(result.version).toBe(1);
|
||||
});
|
||||
|
||||
test("CRITICAL: file valid but merged config fails validation - triggers migration", async () => {
|
||||
// File contains valid config but defaults have invalid maxRetries
|
||||
const fileConfig = {
|
||||
name: "file-valid",
|
||||
version: 2,
|
||||
enabled: true,
|
||||
timeout: 2000,
|
||||
// Note: no maxRetries in file
|
||||
};
|
||||
await writeFile(configPath, JSON.stringify(fileConfig));
|
||||
|
||||
// Override defaults to include invalid value that fails after merge
|
||||
const originalDefaults = configDefinition.defaultConfig;
|
||||
configDefinition.defaultConfig = () => ({
|
||||
name: "test",
|
||||
version: 1,
|
||||
enabled: false,
|
||||
timeout: 5000,
|
||||
maxRetries: -1, // This will cause merged validation to fail!
|
||||
});
|
||||
|
||||
configDefinition.mergeValidationShouldFail = true;
|
||||
|
||||
// This should NOT throw - should catch validation error and migrate
|
||||
const result = await fileHandler.loadConfig();
|
||||
|
||||
// Should have triggered migration due to validation failure
|
||||
expect(configDefinition.migrationCallCount).toBe(1);
|
||||
expect(result.name).toBe("migrated");
|
||||
expect(result.maxRetries).toBe(5); // From migration
|
||||
|
||||
// Restore original method
|
||||
configDefinition.defaultConfig = originalDefaults;
|
||||
});
|
||||
|
||||
test("handles invalid JSON by migrating", async () => {
|
||||
await writeFile(configPath, "{ invalid json");
|
||||
|
||||
const result = await fileHandler.loadConfig();
|
||||
expect(configDefinition.migrationCallCount).toBe(1);
|
||||
expect(result.name).toBe("migrated");
|
||||
});
|
||||
|
||||
test("CRITICAL: read failed → migration succeeded but merged validation fails → defaults used", async () => {
|
||||
// No file exists (read will fail)
|
||||
// Migration will succeed but return config that passes its own validation
|
||||
// But when merged with defaults, the result fails validation
|
||||
|
||||
// Create a special definition for this edge case
|
||||
class SpecialMigrationDefinition extends TestConfigDefinition {
|
||||
async migrateConfig(): Promise<TestConfig> {
|
||||
this.migrationCallCount++;
|
||||
// Return a config that's valid on its own
|
||||
return {
|
||||
name: "migration-success",
|
||||
version: 2,
|
||||
enabled: true,
|
||||
timeout: 2000,
|
||||
// Missing maxRetries - will be merged from defaults
|
||||
};
|
||||
}
|
||||
|
||||
async validate(config: object): Promise<TestConfig> {
|
||||
const testConfig = config as TestConfig;
|
||||
|
||||
// Basic validation
|
||||
if (
|
||||
typeof testConfig.version !== "number" ||
|
||||
testConfig.version < 1
|
||||
) {
|
||||
throw new Error("Invalid version: must be >= 1");
|
||||
}
|
||||
|
||||
if (
|
||||
typeof testConfig.timeout !== "number" ||
|
||||
testConfig.timeout < 1000
|
||||
) {
|
||||
throw new Error("Invalid timeout: must be >= 1000");
|
||||
}
|
||||
|
||||
// This validation will fail after merge when maxRetries comes from defaults
|
||||
if (
|
||||
testConfig.maxRetries !== undefined &&
|
||||
testConfig.name === "migration-success" &&
|
||||
testConfig.maxRetries === 3
|
||||
) {
|
||||
throw new Error(
|
||||
"Special validation failure: migration + defaults combo invalid"
|
||||
);
|
||||
}
|
||||
|
||||
return testConfig;
|
||||
}
|
||||
}
|
||||
|
||||
const specialDefinition = new SpecialMigrationDefinition(testDir);
|
||||
const specialHandler = new ConfigFileHandler(specialDefinition);
|
||||
|
||||
// Should NOT throw - should catch validation error and fall back to defaults
|
||||
const result = await specialHandler.loadConfig();
|
||||
|
||||
// Should have attempted migration
|
||||
expect(specialDefinition.migrationCallCount).toBe(1);
|
||||
|
||||
// But result should be from defaults due to validation failure
|
||||
expect(result.name).toBe("test"); // From defaults
|
||||
expect(result.version).toBe(1); // From defaults
|
||||
expect(result.maxRetries).toBe(3); // From defaults
|
||||
});
|
||||
});
|
||||
|
||||
describe("File Operations", () => {
|
||||
test("readConfigFile validates config from disk", async () => {
|
||||
const config = {
|
||||
name: "read-test",
|
||||
version: 2,
|
||||
enabled: true,
|
||||
timeout: 2000,
|
||||
};
|
||||
await writeFile(configPath, JSON.stringify(config));
|
||||
|
||||
const result = await fileHandler.readConfigFile();
|
||||
expect(result).toEqual(config);
|
||||
});
|
||||
|
||||
test("readConfigFile throws for invalid config", async () => {
|
||||
const invalidConfig = {
|
||||
name: "invalid",
|
||||
version: -1,
|
||||
enabled: true,
|
||||
timeout: 2000,
|
||||
};
|
||||
await writeFile(configPath, JSON.stringify(invalidConfig));
|
||||
|
||||
await expect(fileHandler.readConfigFile()).rejects.toThrow(
|
||||
"Invalid version"
|
||||
);
|
||||
});
|
||||
|
||||
test("writeConfigFile persists config to disk", async () => {
|
||||
const config = {
|
||||
name: "write-test",
|
||||
version: 3,
|
||||
enabled: true,
|
||||
timeout: 4000,
|
||||
};
|
||||
|
||||
const success = await fileHandler.writeConfigFile(config);
|
||||
expect(success).toBe(true);
|
||||
|
||||
const fileContent = await readFile(configPath, "utf8");
|
||||
expect(JSON.parse(fileContent)).toEqual(config);
|
||||
});
|
||||
|
||||
test("writeConfigFile skips write when config unchanged (flash drive optimization)", async () => {
|
||||
const config = {
|
||||
name: "unchanged",
|
||||
version: 1,
|
||||
enabled: false,
|
||||
timeout: 5000,
|
||||
};
|
||||
await writeFile(configPath, JSON.stringify(config, null, 2));
|
||||
|
||||
const success = await fileHandler.writeConfigFile(config);
|
||||
expect(success).toBe(false); // Skipped
|
||||
});
|
||||
|
||||
test("writeConfigFile proceeds with write when existing file has invalid JSON", async () => {
|
||||
// Pre-existing file with invalid JSON
|
||||
await writeFile(configPath, "{ invalid json");
|
||||
|
||||
const config = {
|
||||
name: "write-despite-invalid",
|
||||
version: 2,
|
||||
enabled: true,
|
||||
timeout: 4000,
|
||||
};
|
||||
|
||||
// Should proceed with write despite invalid existing file
|
||||
const success = await fileHandler.writeConfigFile(config);
|
||||
expect(success).toBe(true);
|
||||
|
||||
// Should have written valid config
|
||||
const fileContent = await readFile(configPath, "utf8");
|
||||
expect(JSON.parse(fileContent)).toEqual(config);
|
||||
});
|
||||
|
||||
test("writeConfigFile handles validation errors", async () => {
|
||||
configDefinition.validationShouldFail = true;
|
||||
const config = {
|
||||
name: "invalid",
|
||||
version: 1,
|
||||
enabled: false,
|
||||
timeout: 5000,
|
||||
};
|
||||
|
||||
const success = await fileHandler.writeConfigFile(config);
|
||||
expect(success).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("updateConfig Operations", () => {
|
||||
test("updates existing config with partial changes", async () => {
|
||||
const existing = {
|
||||
name: "existing",
|
||||
version: 1,
|
||||
enabled: false,
|
||||
timeout: 5000,
|
||||
};
|
||||
await writeFile(configPath, JSON.stringify(existing));
|
||||
|
||||
const success = await fileHandler.updateConfig({
|
||||
enabled: true,
|
||||
timeout: 8000,
|
||||
});
|
||||
expect(success).toBe(true);
|
||||
|
||||
const updated = JSON.parse(await readFile(configPath, "utf8"));
|
||||
expect(updated.name).toBe("existing"); // Preserved
|
||||
expect(updated.enabled).toBe(true); // Updated
|
||||
expect(updated.timeout).toBe(8000); // Updated
|
||||
});
|
||||
|
||||
test("creates config when file doesn't exist (via migration)", async () => {
|
||||
const updates = { name: "new", enabled: true };
|
||||
|
||||
const success = await fileHandler.updateConfig(updates);
|
||||
expect(success).toBe(true);
|
||||
|
||||
const created = JSON.parse(await readFile(configPath, "utf8"));
|
||||
expect(created.name).toBe("new"); // From update
|
||||
expect(created.version).toBe(2); // From migration (no file existed)
|
||||
});
|
||||
|
||||
test("handles validation errors during update", async () => {
|
||||
const existing = {
|
||||
name: "existing",
|
||||
version: 1,
|
||||
enabled: false,
|
||||
timeout: 5000,
|
||||
};
|
||||
await writeFile(configPath, JSON.stringify(existing));
|
||||
|
||||
const success = await fileHandler.updateConfig({ version: -1 }); // Invalid
|
||||
expect(success).toBe(false);
|
||||
|
||||
// Original should be unchanged
|
||||
const unchanged = JSON.parse(await readFile(configPath, "utf8"));
|
||||
expect(unchanged.version).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Error Resilience", () => {
|
||||
test("handles write errors gracefully", async () => {
|
||||
const invalidDefinition = new TestConfigDefinition(
|
||||
"/invalid/readonly/path"
|
||||
);
|
||||
const invalidHandler = new ConfigFileHandler(invalidDefinition);
|
||||
|
||||
const config = {
|
||||
name: "error-test",
|
||||
version: 1,
|
||||
enabled: false,
|
||||
timeout: 5000,
|
||||
};
|
||||
const success = await invalidHandler.writeConfigFile(config);
|
||||
expect(success).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("End-to-End Workflow", () => {
|
||||
test("complete workflow: load -> update -> reload", async () => {
|
||||
// 1. Load (triggers migration since no file)
|
||||
let config = await fileHandler.loadConfig();
|
||||
expect(config.name).toBe("migrated");
|
||||
|
||||
// 2. Update
|
||||
await fileHandler.updateConfig({ name: "workflow-test", timeout: 6000 });
|
||||
|
||||
// 3. Reload from disk
|
||||
config = await fileHandler.readConfigFile();
|
||||
expect(config.name).toBe("workflow-test");
|
||||
expect(config.timeout).toBe(6000);
|
||||
expect(config.version).toBe(2); // Preserved from migration
|
||||
});
|
||||
});
|
||||
});
|
||||
100
packages/unraid-shared/src/util/config-definition.ts
Normal file
100
packages/unraid-shared/src/util/config-definition.ts
Normal file
@@ -0,0 +1,100 @@
|
||||
import { Logger } from "@nestjs/common";
|
||||
|
||||
/**
|
||||
* Abstract base class for configuration behavior without NestJS dependencies.
|
||||
* Provides core configuration logic including file path resolution, defaults,
|
||||
* validation, and migration support.
|
||||
*
|
||||
* @template T The configuration object type that extends object
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* interface MyConfig {
|
||||
* enabled: boolean;
|
||||
* timeout: number;
|
||||
* }
|
||||
*
|
||||
* class MyConfigDefinition extends ConfigDefinition<MyConfig> {
|
||||
* constructor(private configDir: string) {
|
||||
* super('MyConfig');
|
||||
* }
|
||||
*
|
||||
* fileName() { return "my-config.json"; }
|
||||
* configPath() { return path.join(this.configDir, this.fileName()); }
|
||||
* defaultConfig(): MyConfig { return { enabled: false, timeout: 5000 }; }
|
||||
*
|
||||
* async validate(config: object): Promise<MyConfig> {
|
||||
* const myConfig = config as MyConfig;
|
||||
* if (myConfig.timeout < 1000) throw new Error("Timeout too low");
|
||||
* return myConfig;
|
||||
* }
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
export abstract class ConfigDefinition<T extends object> {
|
||||
protected logger: Logger;
|
||||
|
||||
/**
|
||||
* @param loggerName Optional custom logger name (defaults to generic name)
|
||||
*/
|
||||
constructor(loggerName?: string) {
|
||||
this.logger = new Logger(loggerName ?? `ConfigDefinition:${this.fileName()}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the filename for the configuration file.
|
||||
*
|
||||
* @returns The name of the config file (e.g., "my-config.json")
|
||||
* @example "user-preferences.json"
|
||||
*/
|
||||
abstract fileName(): string;
|
||||
|
||||
/**
|
||||
* Returns the absolute path to the configuration file.
|
||||
*/
|
||||
abstract configPath(): string;
|
||||
|
||||
/**
|
||||
* Returns the default configuration object.
|
||||
* Used as fallback when migration fails or as base for merging.
|
||||
*/
|
||||
abstract defaultConfig(): T;
|
||||
|
||||
/**
|
||||
* Validates and transforms a configuration object.
|
||||
*
|
||||
* Override to implement custom validation logic such as:
|
||||
* - Schema validation
|
||||
* - Range checking for numeric values
|
||||
* - Data transformation/normalization
|
||||
*
|
||||
* @param config - The raw config object to validate
|
||||
* @returns The validated and potentially transformed config
|
||||
* @throws Error if the config is invalid
|
||||
*/
|
||||
async validate(config: object): Promise<T> {
|
||||
return config as T;
|
||||
}
|
||||
|
||||
/**
|
||||
* Migrates legacy or corrupted configuration to the current format.
|
||||
*
|
||||
* Called when:
|
||||
* - Config file doesn't exist (first-time setup)
|
||||
* - Config file contains invalid JSON
|
||||
* - Config validation fails
|
||||
*
|
||||
* Override to provide custom migration logic for legacy formats,
|
||||
* version upgrades, or first-time installations.
|
||||
*
|
||||
* Note:
|
||||
* - Backwards-compatible updates such as field additions are better handled via `defaultConfig()`
|
||||
* because `defaultConfig()` is merged with the loaded config.
|
||||
*
|
||||
* @returns Migrated configuration object
|
||||
* @throws Error if migration is not possible (falls back to defaults)
|
||||
*/
|
||||
async migrateConfig(): Promise<T> {
|
||||
throw new Error("Not implemented");
|
||||
}
|
||||
}
|
||||
157
packages/unraid-shared/src/util/config-file-handler.ts
Normal file
157
packages/unraid-shared/src/util/config-file-handler.ts
Normal file
@@ -0,0 +1,157 @@
|
||||
import { Logger } from "@nestjs/common";
|
||||
import { readFile, writeFile } from "node:fs/promises";
|
||||
import { isEqual } from "lodash-es";
|
||||
import { ConfigDefinition } from "./config-definition.js";
|
||||
import { fileExists } from "./file.js";
|
||||
|
||||
/**
|
||||
* Standalone configuration file handler that works with any ConfigDefinition.
|
||||
* Can be used independently of NestJS DI container.
|
||||
*
|
||||
* This class provides robust file operations with the following features:
|
||||
* - **Migration Priority**: When files don't exist, migration is attempted before falling back to defaults
|
||||
* - **Change Detection**: Uses deep equality checks to avoid unnecessary disk writes (flash drive optimization)
|
||||
* - **Error Resilience**: Graceful handling of file system errors, JSON parsing failures, and validation errors
|
||||
* - **Atomic Operations**: Individual methods for specific file operations (read, write, update)
|
||||
*
|
||||
* @template T The configuration object type that extends object
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const configDef = new MyConfigDefinition('/etc/myapp');
|
||||
* const fileHandler = new ConfigFileHandler(configDef);
|
||||
*
|
||||
* // Load config with migration fallback
|
||||
* const config = await fileHandler.loadConfig();
|
||||
*
|
||||
* // Update specific properties
|
||||
* await fileHandler.updateConfig({ enabled: true });
|
||||
* ```
|
||||
*/
|
||||
export class ConfigFileHandler<T extends object> {
|
||||
private readonly logger: Logger;
|
||||
|
||||
/**
|
||||
* @param definition The configuration definition that provides behavior
|
||||
*/
|
||||
constructor(private readonly definition: ConfigDefinition<T>) {
|
||||
this.logger = new Logger(`ConfigFileHandler:${definition.fileName()}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads configuration from file, with migration fallback.
|
||||
*
|
||||
* Strategy:
|
||||
* 1. Load and validate existing config
|
||||
* 2. If loading fails, attempt migration
|
||||
* 3. If migration fails, use defaults
|
||||
* 4. Merge result with defaults and persist if migrated
|
||||
*
|
||||
* @returns Complete configuration object
|
||||
*/
|
||||
async loadConfig(): Promise<T> {
|
||||
const defaultConfig = this.definition.defaultConfig();
|
||||
|
||||
try {
|
||||
const fileConfig = await this.readConfigFile();
|
||||
return await this.definition.validate({
|
||||
...defaultConfig,
|
||||
...fileConfig,
|
||||
});
|
||||
} catch (error) {
|
||||
this.logger.warn(error, "Error loading config. Attempting to migrate...");
|
||||
|
||||
try {
|
||||
const migratedConfig = await this.definition.migrateConfig();
|
||||
const mergedConfig = await this.definition.validate({
|
||||
...defaultConfig,
|
||||
...migratedConfig,
|
||||
});
|
||||
// Persist migrated config for future loads
|
||||
await this.writeConfigFile(mergedConfig);
|
||||
return mergedConfig;
|
||||
} catch (migrationError) {
|
||||
this.logger.warn("Migration failed. Using defaults.", migrationError);
|
||||
return defaultConfig;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads and validates configuration from file.
|
||||
*
|
||||
* @param configPath - Path to config file (defaults to `configPath()`)
|
||||
* @returns Validated configuration object from disk
|
||||
* @throws Error if file doesn't exist, contains invalid JSON, or fails validation
|
||||
*/
|
||||
async readConfigFile(configPath = this.definition.configPath()): Promise<T> {
|
||||
if (!(await fileExists(configPath))) {
|
||||
throw new Error(`Config file does not exist at '${configPath}'`);
|
||||
}
|
||||
const content = await readFile(configPath, "utf8");
|
||||
const parsed = JSON.parse(content);
|
||||
return await this.definition.validate(parsed);
|
||||
}
|
||||
|
||||
/**
|
||||
* Writes configuration to file with change detection optimization.
|
||||
* Uses deep equality checks to avoid unnecessary writes.
|
||||
*
|
||||
* @param config - The config object to write to disk
|
||||
* @returns `true` if written to disk, `false` if skipped or failed
|
||||
*/
|
||||
async writeConfigFile(config: T): Promise<boolean> {
|
||||
try {
|
||||
config = await this.definition.validate(config);
|
||||
} catch (error) {
|
||||
this.logger.error(error, `Cannot write invalid config`);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Skip write if config is unchanged (flash drive optimization)
|
||||
try {
|
||||
const existingConfig = await this.readConfigFile();
|
||||
if (isEqual(config, existingConfig)) {
|
||||
this.logger.verbose(`Config is unchanged, skipping write`);
|
||||
return false;
|
||||
}
|
||||
} catch (error) {
|
||||
// File doesn't exist or is invalid, proceed with write
|
||||
this.logger.verbose(`Existing config unreadable, proceeding with write`);
|
||||
}
|
||||
|
||||
try {
|
||||
const data = JSON.stringify(config, null, 2);
|
||||
this.logger.verbose("Writing config");
|
||||
await writeFile(this.definition.configPath(), data);
|
||||
return true;
|
||||
} catch (error) {
|
||||
this.logger.error(
|
||||
error,
|
||||
`Error writing config to '${this.definition.configPath()}'`
|
||||
);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates configuration by merging with existing config.
|
||||
* Loads current config, shallow merges updates, and writes back to disk.
|
||||
*
|
||||
* @param updates - Partial configuration object with properties to update
|
||||
* @returns `true` if updated on disk, `false` if failed or no changes
|
||||
*/
|
||||
async updateConfig(updates: Partial<T>): Promise<boolean> {
|
||||
try {
|
||||
const currentConfig = await this.loadConfig();
|
||||
const newConfig = await this.definition.validate({
|
||||
...currentConfig,
|
||||
...updates,
|
||||
});
|
||||
return await this.writeConfigFile(newConfig);
|
||||
} catch (error) {
|
||||
this.logger.error("Failed to update config", error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,11 +1,24 @@
|
||||
import { accessSync } from 'fs';
|
||||
import { access } from 'fs/promises';
|
||||
import { access, mkdir, writeFile } from 'fs/promises';
|
||||
import { mkdirSync, writeFileSync } from 'fs';
|
||||
import { F_OK } from 'node:constants';
|
||||
import { dirname } from 'path';
|
||||
|
||||
/**
|
||||
* Checks if a file exists asynchronously.
|
||||
* @param path - The file path to check
|
||||
* @returns Promise that resolves to true if file exists, false otherwise
|
||||
*/
|
||||
export const fileExists = async (path: string) =>
|
||||
access(path, F_OK)
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
|
||||
/**
|
||||
* Checks if a file exists synchronously.
|
||||
* @param path - The file path to check
|
||||
* @returns true if file exists, false otherwise
|
||||
*/
|
||||
export const fileExistsSync = (path: string) => {
|
||||
try {
|
||||
accessSync(path, F_OK);
|
||||
@@ -14,3 +27,44 @@ export const fileExistsSync = (path: string) => {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Writes data to a file, creating parent directories if they don't exist.
|
||||
*
|
||||
* This function ensures the directory structure exists before writing the file,
|
||||
* equivalent to `mkdir -p` followed by file writing.
|
||||
*
|
||||
* @param path - The file path to write to
|
||||
* @param data - The data to write (string or Buffer)
|
||||
* @throws {Error} If path is invalid (null, empty, or not a string)
|
||||
* @throws {Error} For any file system errors (EACCES, EPERM, ENOSPC, EISDIR, etc.)
|
||||
*/
|
||||
export const ensureWrite = async (path: string, data: string | Buffer) => {
|
||||
if (!path || typeof path !== 'string') {
|
||||
throw new Error(`Invalid path provided: ${path}`);
|
||||
}
|
||||
|
||||
await mkdir(dirname(path), { recursive: true });
|
||||
return await writeFile(path, data);
|
||||
};
|
||||
|
||||
/**
|
||||
* Writes data to a file synchronously, creating parent directories if they don't exist.
|
||||
*
|
||||
* This function ensures the directory structure exists before writing the file,
|
||||
* equivalent to `mkdir -p` followed by file writing.
|
||||
*
|
||||
* @param path - The file path to write to
|
||||
* @param data - The data to write (string or Buffer)
|
||||
* @throws {Error} If path is invalid (null, empty, or not a string)
|
||||
* @throws {Error} For any file system errors (EACCES, EPERM, ENOSPC, EISDIR, etc.)
|
||||
*/
|
||||
export const ensureWriteSync = (path: string, data: string | Buffer) => {
|
||||
if (!path || typeof path !== 'string') {
|
||||
throw new Error(`Invalid path provided: ${path}`);
|
||||
}
|
||||
|
||||
mkdirSync(dirname(path), { recursive: true });
|
||||
return writeFileSync(path, data);
|
||||
};
|
||||
|
||||
|
||||
@@ -1,17 +1,17 @@
|
||||
{
|
||||
"name": "@unraid/connect-plugin",
|
||||
"version": "4.9.3",
|
||||
"version": "4.10.0",
|
||||
"private": true,
|
||||
"dependencies": {
|
||||
"commander": "14.0.0",
|
||||
"conventional-changelog": "6.0.0",
|
||||
"date-fns": "4.1.0",
|
||||
"glob": "11.0.1",
|
||||
"glob": "11.0.3",
|
||||
"html-sloppy-escaper": "0.1.0",
|
||||
"semver": "7.7.1",
|
||||
"tsx": "4.19.3",
|
||||
"zod": "3.24.2",
|
||||
"zx": "8.3.2"
|
||||
"semver": "7.7.2",
|
||||
"tsx": "4.20.3",
|
||||
"zod": "3.25.76",
|
||||
"zx": "8.7.1"
|
||||
},
|
||||
"type": "module",
|
||||
"license": "GPL-2.0-or-later",
|
||||
@@ -37,7 +37,7 @@
|
||||
"devDependencies": {
|
||||
"http-server": "14.1.1",
|
||||
"nodemon": "3.1.10",
|
||||
"vitest": "3.0.7"
|
||||
"vitest": "3.2.4"
|
||||
},
|
||||
"packageManager": "pnpm@10.12.4"
|
||||
"packageManager": "pnpm@10.13.1"
|
||||
}
|
||||
|
||||
@@ -155,6 +155,11 @@ exit 0
|
||||
# Remove the old header logo from DefaultPageLayout.php if present
|
||||
if [ -f "/usr/local/emhttp/plugins/dynamix/include/DefaultPageLayout.php" ]; then
|
||||
sed -i 's|<a href="https://unraid.net" target="_blank"><?readfile("$docroot/webGui/images/UN-logotype-gradient.svg")?></a>||g' "/usr/local/emhttp/plugins/dynamix/include/DefaultPageLayout.php"
|
||||
|
||||
# Add unraid-modals element if not already present
|
||||
if ! grep -q '<unraid-modals>' "/usr/local/emhttp/plugins/dynamix/include/DefaultPageLayout.php"; then
|
||||
sed -i 's|<body>|<body>\n<unraid-modals></unraid-modals>|' "/usr/local/emhttp/plugins/dynamix/include/DefaultPageLayout.php"
|
||||
fi
|
||||
fi
|
||||
|
||||
]]>
|
||||
@@ -326,8 +331,7 @@ exit 0
|
||||
<![CDATA[
|
||||
SCRIPTS_DIR="/usr/local/share/dynamix.unraid.net/install/scripts"
|
||||
# Log file for debugging
|
||||
LOGFILE="/var/log/unraid-api/dynamix-unraid-install.log"
|
||||
mkdir -p "$(dirname "$LOGFILE")"
|
||||
mkdir -p "/var/log/unraid-api"
|
||||
|
||||
echo "Starting Unraid Connect installation..."
|
||||
|
||||
@@ -339,26 +343,26 @@ CFG_NEW=/boot/config/plugins/dynamix.my.servers
|
||||
# Setup the API (but don't start it yet)
|
||||
if [ -x "$SCRIPTS_DIR/setup_api.sh" ]; then
|
||||
echo "Setting up Unraid API..."
|
||||
echo "Running setup_api.sh" >> "$LOGFILE"
|
||||
# Capture output and add to log file
|
||||
setup_output=$("$SCRIPTS_DIR/setup_api.sh")
|
||||
echo "$setup_output" >> "$LOGFILE"
|
||||
echo "Running setup_api.sh"
|
||||
# Run and show output to user
|
||||
"$SCRIPTS_DIR/setup_api.sh"
|
||||
else
|
||||
echo "ERROR: setup_api.sh not found or not executable" >> "$LOGFILE"
|
||||
echo "ERROR: setup_api.sh not found or not executable"
|
||||
echo "ERROR: setup_api.sh not found or not executable"
|
||||
fi
|
||||
|
||||
# Run post-installation verification
|
||||
if [ -x "$SCRIPTS_DIR/verify_install.sh" ]; then
|
||||
echo "Running post-installation verification..."
|
||||
echo "Running verify_install.sh" >> "$LOGFILE"
|
||||
# Capture output and add to log file
|
||||
verify_output=$("$SCRIPTS_DIR/verify_install.sh")
|
||||
echo "$verify_output" >> "$LOGFILE"
|
||||
echo "Running verify_install.sh"
|
||||
# Run and show output to user
|
||||
"$SCRIPTS_DIR/verify_install.sh"
|
||||
else
|
||||
echo "ERROR: verify_install.sh not found or not executable" >> "$LOGFILE"
|
||||
echo "ERROR: verify_install.sh not found or not executable"
|
||||
echo "ERROR: verify_install.sh not found or not executable"
|
||||
fi
|
||||
|
||||
echo "Installation completed at $(date)" >> "$LOGFILE"
|
||||
echo "Installation completed at $(date)"
|
||||
]]>
|
||||
</INLINE>
|
||||
</FILE>
|
||||
@@ -374,6 +378,18 @@ echo "Installation completed at $(date)" >> "$LOGFILE"
|
||||
/etc/rc.d/rc.unraid-api cleanup-dependencies
|
||||
|
||||
echo "Starting Unraid API service"
|
||||
echo "DEBUG: Checking PATH: $PATH"
|
||||
echo "DEBUG: Checking if unraid-api files exist:"
|
||||
ls -la /usr/local/unraid-api/dist/
|
||||
echo "DEBUG: Checking symlink:"
|
||||
ls -la /usr/local/bin/unraid-api
|
||||
echo "DEBUG: Checking Node.js version:"
|
||||
node --version
|
||||
echo "DEBUG: Checking if cli.js is executable:"
|
||||
ls -la /usr/local/unraid-api/dist/cli.js
|
||||
echo "DEBUG: Attempting to run unraid-api directly:"
|
||||
/usr/local/unraid-api/dist/cli.js version || echo "Direct execution failed"
|
||||
|
||||
echo "If no additional messages appear within 30 seconds, it is safe to refresh the page."
|
||||
/etc/rc.d/rc.unraid-api plugins add unraid-api-plugin-connect -b --no-restart
|
||||
/etc/rc.d/rc.unraid-api start
|
||||
|
||||
@@ -166,22 +166,23 @@ _enabled() {
|
||||
return 1
|
||||
}
|
||||
_connected() {
|
||||
CFG=$API_CONFIG_HOME/connect.json
|
||||
[[ ! -f "${CFG}" ]] && return 1
|
||||
local connect_config username status_cfg connection_status
|
||||
connect_config=$API_CONFIG_HOME/connect.json
|
||||
[[ ! -f "${connect_config}" ]] && return 1
|
||||
|
||||
username=$(jq -r '.username // empty' "${CFG}" 2>/dev/null)
|
||||
# is the user signed in?
|
||||
username=$(jq -r '.username // empty' "${connect_config}" 2>/dev/null)
|
||||
if [ -z "${username}" ]; then
|
||||
return 1
|
||||
fi
|
||||
# the minigraph status is no longer synced to the connect config file
|
||||
# to avoid a false negative, we'll omit this check for now.
|
||||
#
|
||||
# shellcheck disable=SC1090
|
||||
# source <(sed -nr '/\[connectionStatus\]/,/\[/{/minigraph/p}' "${CFG}" 2>/dev/null)
|
||||
# # ensure connected
|
||||
# if [[ -z "${minigraph}" || "${minigraph}" != "CONNECTED" ]]; then
|
||||
# return 1
|
||||
# fi
|
||||
# are we connected to mothership?
|
||||
status_cfg="/var/local/emhttp/connectStatus.json"
|
||||
[[ ! -f "${status_cfg}" ]] && return 1
|
||||
connection_status=$(jq -r '.connectionStatus // empty' "${status_cfg}" 2>/dev/null)
|
||||
if [[ "${connection_status}" != "CONNECTED" ]]; then
|
||||
return 1
|
||||
fi
|
||||
|
||||
return 0
|
||||
}
|
||||
_haserror() {
|
||||
|
||||
@@ -4,9 +4,6 @@
|
||||
# shellcheck source=/dev/null
|
||||
source /etc/profile
|
||||
|
||||
flash="/boot/config/plugins/dynamix.my.servers"
|
||||
[[ ! -d "${flash}" ]] && echo "Please reinstall the Unraid Connect plugin" && exit 1
|
||||
[[ ! -f "${flash}/env" ]] && echo 'env=production' >"${flash}/env"
|
||||
unraid_binary_path="/usr/local/bin/unraid-api"
|
||||
api_base_dir="/usr/local/unraid-api"
|
||||
scripts_dir="/usr/local/share/dynamix.unraid.net/scripts"
|
||||
|
||||
@@ -18,10 +18,9 @@ $cli = php_sapi_name()=='cli';
|
||||
|
||||
$docroot ??= ($_SERVER['DOCUMENT_ROOT'] ?: '/usr/local/emhttp');
|
||||
require_once "$docroot/webGui/include/Wrappers.php";
|
||||
require_once "$docroot/plugins/dynamix.my.servers/include/connect-config.php";
|
||||
|
||||
$myservers_flash_cfg_path='/boot/config/plugins/dynamix.my.servers/myservers.cfg';
|
||||
$myservers = file_exists($myservers_flash_cfg_path) ? @parse_ini_file($myservers_flash_cfg_path,true) : [];
|
||||
$isRegistered = !empty($myservers['remote']['username']);
|
||||
$isRegistered = ConnectConfig::isUserSignedIn();
|
||||
|
||||
// Read connection status from the new API status file
|
||||
$statusFilePath = '/var/local/emhttp/connectStatus.json';
|
||||
@@ -595,9 +594,31 @@ set_git_config('user.email', 'gitbot@unraid.net');
|
||||
set_git_config('user.name', 'gitbot');
|
||||
|
||||
// ensure dns can resolve backup.unraid.net
|
||||
if (! checkdnsrr("backup.unraid.net","A") ) {
|
||||
$dnsResolved = false;
|
||||
|
||||
// Try multiple DNS resolution methods
|
||||
if (function_exists('dns_get_record')) {
|
||||
$dnsRecords = dns_get_record("backup.unraid.net", DNS_A);
|
||||
$dnsResolved = !empty($dnsRecords);
|
||||
}
|
||||
|
||||
// Fallback to gethostbyname if dns_get_record fails
|
||||
if (!$dnsResolved) {
|
||||
$ip = gethostbyname("backup.unraid.net");
|
||||
$dnsResolved = ($ip !== "backup.unraid.net");
|
||||
}
|
||||
|
||||
// Final fallback to system nslookup
|
||||
if (!$dnsResolved) {
|
||||
$output = [];
|
||||
$return_var = 0;
|
||||
exec('nslookup backup.unraid.net 2>/dev/null', $output, $return_var);
|
||||
$dnsResolved = ($return_var === 0 && !empty($output));
|
||||
}
|
||||
|
||||
if (!$dnsResolved) {
|
||||
$arrState['loading'] = '';
|
||||
$arrState['error'] = 'DNS is unable to resolve backup.unraid.net';
|
||||
$arrState['error'] = 'DNS resolution failed for backup.unraid.net - PHP DNS functions (checkdnsrr, dns_get_record, gethostbyname) and system nslookup all failed to resolve the hostname. This indicates a DNS configuration issue on your Unraid server. Check your DNS settings in Settings > Network Settings.';
|
||||
response_complete(406, array('error' => $arrState['error']));
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,26 @@
|
||||
<?php
|
||||
$docroot = $docroot ?? $_SERVER['DOCUMENT_ROOT'] ?: '/usr/local/emhttp';
|
||||
require_once "$docroot/plugins/dynamix.my.servers/include/api-config.php";
|
||||
|
||||
/**
|
||||
* Wrapper around the API's connect.json configuration file.
|
||||
*/
|
||||
class ConnectConfig
|
||||
{
|
||||
public const CONFIG_PATH = ApiConfig::CONFIG_DIR . '/connect.json';
|
||||
|
||||
public static function getConfig()
|
||||
{
|
||||
try {
|
||||
return json_decode(file_get_contents(self::CONFIG_PATH), true) ?? [];
|
||||
} catch (Throwable $e) {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
public static function isUserSignedIn()
|
||||
{
|
||||
$config = self::getConfig();
|
||||
return ApiConfig::isConnectPluginEnabled() && !empty($config['username'] ?? '');
|
||||
}
|
||||
}
|
||||
@@ -39,6 +39,7 @@ class WebComponentsExtractor
|
||||
return $contents ? json_decode($contents, true) : [];
|
||||
}
|
||||
|
||||
|
||||
private function getRichComponentsFile(): string
|
||||
{
|
||||
$manifestFiles = $this->findManifestFiles('manifest.json');
|
||||
|
||||
@@ -1,10 +1,7 @@
|
||||
#!/bin/sh
|
||||
#!/bin/bash
|
||||
# Unraid API Installation Verification Script
|
||||
# Checks that critical files are installed correctly
|
||||
|
||||
# Exit on errors
|
||||
set -e
|
||||
|
||||
echo "Performing comprehensive installation verification..."
|
||||
|
||||
# Define critical files to check (POSIX-compliant, no arrays)
|
||||
@@ -171,7 +168,7 @@ if [ $TOTAL_ERRORS -eq 0 ]; then
|
||||
else
|
||||
printf 'Found %d total errors.\n' "$TOTAL_ERRORS"
|
||||
echo "Installation verification completed with issues."
|
||||
echo "See log file for details: /var/log/unraid-api/dynamix-unraid-install.log"
|
||||
echo "Please review the errors above and contact support if needed."
|
||||
# We don't exit with error as this is just a verification script
|
||||
exit 0
|
||||
fi
|
||||
5955
pnpm-lock.yaml
generated
5955
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@@ -4,5 +4,5 @@
|
||||
"tabWidth": 2,
|
||||
"printWidth": 105,
|
||||
"singleQuote": true,
|
||||
"plugins": ["prettier-plugin-tailwindcss", "@ianvs/prettier-plugin-sort-imports"]
|
||||
"plugins": ["@ianvs/prettier-plugin-sort-imports"]
|
||||
}
|
||||
|
||||
@@ -19,29 +19,21 @@ const config: StorybookConfig = {
|
||||
staticDirs: ['./static'],
|
||||
|
||||
async viteFinal(config) {
|
||||
const storybookDir = dirname(new URL(import.meta.url).pathname);
|
||||
|
||||
return {
|
||||
...config,
|
||||
root: dirname(require.resolve('@storybook/builder-vite')),
|
||||
plugins: [...(config.plugins ?? [])],
|
||||
resolve: {
|
||||
alias: {
|
||||
'@': join(dirname(new URL(import.meta.url).pathname), '../src'),
|
||||
'@/components': join(dirname(new URL(import.meta.url).pathname), '../src/components'),
|
||||
'@/lib': join(dirname(new URL(import.meta.url).pathname), '../src/lib'),
|
||||
'@': join(storybookDir, '../src'),
|
||||
'@/components': join(storybookDir, '../src/components'),
|
||||
'@/lib': join(storybookDir, '../src/lib'),
|
||||
},
|
||||
},
|
||||
optimizeDeps: {
|
||||
include: [...(config.optimizeDeps?.include ?? []), '@unraid/tailwind-rem-to-rem'],
|
||||
},
|
||||
css: {
|
||||
postcss: {
|
||||
plugins: [
|
||||
(await import('tailwindcss')).default({
|
||||
config: './tailwind.config.ts',
|
||||
}),
|
||||
(await import('autoprefixer')).default,
|
||||
],
|
||||
},
|
||||
},
|
||||
};
|
||||
},
|
||||
};
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import type { Preview } from '@storybook/vue3-vite';
|
||||
import { registerAllComponents } from '../src/register';
|
||||
import '@/styles/index.css';
|
||||
import '@/../.storybook/static/index.css';
|
||||
|
||||
registerAllComponents({
|
||||
pathToSharedCss: '/index.css',
|
||||
|
||||
@@ -27,36 +27,6 @@ Import the component library styles in your main entry file:
|
||||
import '@unraid/ui/style.css';
|
||||
```
|
||||
|
||||
### 2. Configure TailwindCSS
|
||||
|
||||
Create a `tailwind.config.ts` file with the following configuration:
|
||||
|
||||
```typescript
|
||||
import tailwindConfig from '@unraid/ui/tailwind.config.ts';
|
||||
import type { Config } from 'tailwindcss';
|
||||
|
||||
export default {
|
||||
presets: [tailwindConfig],
|
||||
content: [
|
||||
// ... your content paths
|
||||
'./components/**/*.{js,vue,ts}',
|
||||
'./layouts/**/*.vue',
|
||||
'./pages/**/*.vue',
|
||||
],
|
||||
theme: {
|
||||
extend: {
|
||||
// your theme extensions
|
||||
},
|
||||
},
|
||||
} satisfies Partial<Config>;
|
||||
```
|
||||
|
||||
This configuration:
|
||||
|
||||
- Uses the Unraid UI library's Tailwind config as a preset
|
||||
- Properly types your configuration with TypeScript
|
||||
- Allows you to extend the base theme while maintaining all Unraid UI defaults
|
||||
|
||||
## Usage
|
||||
|
||||
```vue
|
||||
@@ -249,7 +219,7 @@ const meta = {
|
||||
argTypes: {
|
||||
variant: {
|
||||
control: 'select',
|
||||
options: ['primary', 'secondary', 'outline'],
|
||||
options: ['primary', 'secondary', 'outline-solid'],
|
||||
},
|
||||
size: {
|
||||
control: 'select',
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
"style": "default",
|
||||
"typescript": true,
|
||||
"tailwind": {
|
||||
"config": "tailwind.config.ts",
|
||||
"css": "src/styles/globals.css",
|
||||
"baseColor": "neutral",
|
||||
"cssVariables": true,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@unraid/ui",
|
||||
"version": "4.9.3",
|
||||
"version": "4.10.0",
|
||||
"private": true,
|
||||
"license": "GPL-2.0-or-later",
|
||||
"type": "module",
|
||||
@@ -9,11 +9,9 @@
|
||||
"types": "./dist/index.d.ts",
|
||||
"sideEffects": false,
|
||||
"files": [
|
||||
"dist",
|
||||
"tailwind.config.ts"
|
||||
"dist"
|
||||
],
|
||||
"scripts": {
|
||||
"prepare": "pnpm build",
|
||||
"// Development": "",
|
||||
"dev": "vite",
|
||||
"preview": "vite preview",
|
||||
@@ -34,11 +32,11 @@
|
||||
"preunraid:deploy": "pnpm build:wc",
|
||||
"unraid:deploy": "just deploy",
|
||||
"// Storybook": "",
|
||||
"prestorybook": "pnpm storybook:css",
|
||||
"storybook": "storybook dev -p 6006",
|
||||
"storybook:css": "node scripts/build-style.mjs",
|
||||
"prebuild-storybook": "pnpm storybook:css",
|
||||
"build-storybook": "storybook build",
|
||||
"storybook": "pnpm tailwind:watch & pnpm storybook:dev",
|
||||
"storybook:dev": "storybook dev -p 6006",
|
||||
"build-storybook": "pnpm tailwind:build && storybook build",
|
||||
"tailwind:build": "tailwindcss -i ./src/styles/globals.css -o ./.storybook/static/index.css",
|
||||
"tailwind:watch": "pnpm tailwind:build --watch",
|
||||
"// Cloudflare Workers Deployment": "",
|
||||
"deploy:storybook": "pnpm build-storybook && wrangler deploy",
|
||||
"deploy:storybook:staging": "pnpm build-storybook && wrangler deploy --env staging"
|
||||
@@ -54,68 +52,65 @@
|
||||
"@jsonforms/core": "3.6.0",
|
||||
"@jsonforms/vue": "3.6.0",
|
||||
"@jsonforms/vue-vanilla": "3.6.0",
|
||||
"@vueuse/core": "13.4.0",
|
||||
"@tailwindcss/cli": "4.1.11",
|
||||
"@vueuse/core": "13.5.0",
|
||||
"class-variance-authority": "0.7.1",
|
||||
"clsx": "2.1.1",
|
||||
"dompurify": "3.2.6",
|
||||
"kebab-case": "2.0.2",
|
||||
"lucide-vue-next": "0.519.0",
|
||||
"lucide-vue-next": "0.525.0",
|
||||
"marked": "16.0.0",
|
||||
"reka-ui": "2.3.1",
|
||||
"reka-ui": "2.3.2",
|
||||
"shadcn-vue": "2.2.0",
|
||||
"tailwind-merge": "2.6.0",
|
||||
"vue-sonner": "1.3.0"
|
||||
"tw-animate-css": "1.3.5",
|
||||
"vue-sonner": "1.3.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@ianvs/prettier-plugin-sort-imports": "4.4.2",
|
||||
"@storybook/addon-docs": "9.0.16",
|
||||
"@storybook/addon-links": "9.0.16",
|
||||
"@storybook/builder-vite": "9.0.16",
|
||||
"@storybook/vue3-vite": "9.0.16",
|
||||
"@tailwindcss/typography": "0.5.16",
|
||||
"@ianvs/prettier-plugin-sort-imports": "4.5.1",
|
||||
"@storybook/addon-docs": "9.0.17",
|
||||
"@storybook/addon-links": "9.0.17",
|
||||
"@storybook/builder-vite": "9.0.17",
|
||||
"@storybook/vue3-vite": "9.0.17",
|
||||
"@tailwindcss/vite": "4.1.11",
|
||||
"@testing-library/vue": "8.1.0",
|
||||
"@types/jsdom": "21.1.7",
|
||||
"@types/node": "22.15.32",
|
||||
"@types/node": "22.16.4",
|
||||
"@types/testing-library__vue": "5.3.0",
|
||||
"@typescript-eslint/eslint-plugin": "8.34.1",
|
||||
"@unraid/tailwind-rem-to-rem": "1.1.0",
|
||||
"@vitejs/plugin-vue": "5.2.4",
|
||||
"@typescript-eslint/eslint-plugin": "8.37.0",
|
||||
"@unraid/tailwind-rem-to-rem": "2.0.0",
|
||||
"@vitejs/plugin-vue": "6.0.0",
|
||||
"@vitest/coverage-v8": "3.2.4",
|
||||
"@vitest/ui": "3.2.4",
|
||||
"@vue/test-utils": "2.4.6",
|
||||
"@vue/tsconfig": "0.7.0",
|
||||
"autoprefixer": "10.4.21",
|
||||
"concurrently": "9.1.2",
|
||||
"eslint": "9.29.0",
|
||||
"concurrently": "9.2.0",
|
||||
"eslint": "9.31.0",
|
||||
"eslint-config-prettier": "10.1.5",
|
||||
"eslint-plugin-import": "2.31.0",
|
||||
"eslint-plugin-import": "2.32.0",
|
||||
"eslint-plugin-no-relative-import-paths": "1.6.1",
|
||||
"eslint-plugin-prettier": "5.5.0",
|
||||
"eslint-plugin-storybook": "9.0.16",
|
||||
"eslint-plugin-vue": "10.2.0",
|
||||
"happy-dom": "18.0.0",
|
||||
"jiti": "^2.4.2",
|
||||
"eslint-plugin-prettier": "5.5.1",
|
||||
"eslint-plugin-storybook": "9.0.17",
|
||||
"eslint-plugin-vue": "10.3.0",
|
||||
"happy-dom": "18.0.1",
|
||||
"jiti": "2.4.2",
|
||||
"postcss": "8.5.6",
|
||||
"postcss-import": "16.1.1",
|
||||
"prettier": "3.5.3",
|
||||
"prettier-plugin-tailwindcss": "0.6.13",
|
||||
"prettier": "3.6.2",
|
||||
"rimraf": "6.0.1",
|
||||
"storybook": "9.0.16",
|
||||
"tailwind-rem-to-rem": "github:unraid/tailwind-rem-to-rem",
|
||||
"tailwindcss": "3.4.17",
|
||||
"tailwindcss-animate": "1.0.7",
|
||||
"storybook": "9.0.17",
|
||||
"tailwindcss": "4.1.11",
|
||||
"typescript": "5.8.3",
|
||||
"typescript-eslint": "8.34.1",
|
||||
"vite": "7.0.3",
|
||||
"typescript-eslint": "8.37.0",
|
||||
"vite": "7.0.4",
|
||||
"vite-plugin-dts": "3.9.1",
|
||||
"vite-plugin-vue-devtools": "7.7.7",
|
||||
"vitest": "3.2.4",
|
||||
"vue": "3.5.17",
|
||||
"vue-tsc": "3.0.1",
|
||||
"wrangler": "^3.87.0"
|
||||
"wrangler": "4.24.3"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@rollup/rollup-linux-x64-gnu": "4.44.0"
|
||||
"@rollup/rollup-linux-x64-gnu": "4.45.1"
|
||||
},
|
||||
"exports": {
|
||||
".": {
|
||||
@@ -124,20 +119,7 @@
|
||||
"require": "./dist/index.cjs"
|
||||
},
|
||||
"./styles": "./dist/style.css",
|
||||
"./styles/*": "./src/styles/*",
|
||||
"./tailwind.config": {
|
||||
"types": "./dist/tailwind.config.d.ts",
|
||||
"import": "./dist/tailwind.config.js",
|
||||
"default": "./dist/tailwind.config.js"
|
||||
},
|
||||
"./tailwind.config.ts": {
|
||||
"import": "./tailwind.config.ts",
|
||||
"default": "./tailwind.config.ts"
|
||||
},
|
||||
"./theme/preset": {
|
||||
"types": "./dist/theme/preset.d.ts",
|
||||
"import": "./dist/theme/preset.js"
|
||||
}
|
||||
"./styles/*": "./src/styles/*"
|
||||
},
|
||||
"packageManager": "pnpm@10.12.4"
|
||||
"packageManager": "pnpm@10.13.1"
|
||||
}
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
export default {
|
||||
plugins: {
|
||||
tailwindcss: {},
|
||||
autoprefixer: {},
|
||||
},
|
||||
};
|
||||
@@ -1,30 +0,0 @@
|
||||
import fs from 'fs/promises';
|
||||
import autoprefixer from 'autoprefixer';
|
||||
import postcss from 'postcss';
|
||||
import postcssImport from 'postcss-import';
|
||||
import tailwindcss from 'tailwindcss';
|
||||
|
||||
/**
|
||||
* Helper script for storybook to build the CSS file for the components. This is used to ensure that modals render using the shadow styles.
|
||||
*/
|
||||
|
||||
process.env.VITE_TAILWIND_BASE_FONT_SIZE = 16;
|
||||
|
||||
const inputPath = './src/styles/index.css';
|
||||
const outputPath = './.storybook/static/index.css'; // served from root: /index.css
|
||||
|
||||
const css = await fs.readFile(inputPath, 'utf8');
|
||||
|
||||
const result = await postcss([
|
||||
postcssImport(),
|
||||
tailwindcss({ config: './tailwind.config.ts' }),
|
||||
autoprefixer(),
|
||||
]).process(css, {
|
||||
from: inputPath,
|
||||
to: outputPath,
|
||||
});
|
||||
|
||||
await fs.mkdir('./.storybook/static', { recursive: true });
|
||||
await fs.writeFile(outputPath, result.css);
|
||||
|
||||
console.log('✅ CSS built for Storybook:', outputPath);
|
||||
@@ -42,18 +42,17 @@ const props = withDefaults(defineProps<BrandButtonProps>(), {
|
||||
defineEmits(['click']);
|
||||
|
||||
const classes = computed(() => {
|
||||
const iconSize = `w-${props.size}`;
|
||||
|
||||
return {
|
||||
button: cn(
|
||||
brandButtonVariants({ variant: props.variant, size: props.size, padding: props.padding }),
|
||||
props.class
|
||||
),
|
||||
icon: `${iconSize} fill-current flex-shrink-0`,
|
||||
icon: 'w-[var(--icon-size)] fill-current shrink-0',
|
||||
iconSize: props.size ?? '16px',
|
||||
};
|
||||
});
|
||||
const needsBrandGradientBackground = computed(() => {
|
||||
return ['outline', 'outline-primary'].includes(props.variant ?? '');
|
||||
return ['outline-solid', 'outline-primary'].includes(props.variant ?? '');
|
||||
});
|
||||
</script>
|
||||
|
||||
@@ -71,15 +70,20 @@ const needsBrandGradientBackground = computed(() => {
|
||||
>
|
||||
<div
|
||||
v-if="variant === 'fill'"
|
||||
class="absolute -top-[2px] -right-[2px] -bottom-[2px] -left-[2px] -z-10 bg-gradient-to-r from-unraid-red to-orange opacity-100 transition-all rounded-md group-hover:opacity-60 group-focus:opacity-60"
|
||||
class="absolute -top-[2px] -right-[2px] -bottom-[2px] -left-[2px] -z-10 bg-linear-to-r from-unraid-red to-orange opacity-100 transition-all rounded-md group-hover:opacity-60 group-focus:opacity-60"
|
||||
/>
|
||||
<!-- gives outline buttons the brand gradient background -->
|
||||
<div
|
||||
v-if="needsBrandGradientBackground"
|
||||
class="absolute -top-[2px] -right-[2px] -bottom-[2px] -left-[2px] -z-10 bg-gradient-to-r from-unraid-red to-orange opacity-0 transition-all rounded-md group-hover:opacity-100 group-focus:opacity-100"
|
||||
class="absolute -top-[2px] -right-[2px] -bottom-[2px] -left-[2px] -z-10 bg-linear-to-r from-unraid-red to-orange opacity-0 transition-all rounded-md group-hover:opacity-100 group-focus:opacity-100"
|
||||
/>
|
||||
|
||||
<component :is="icon" v-if="icon" :class="classes.icon" />
|
||||
<component
|
||||
:is="icon"
|
||||
v-if="icon"
|
||||
:class="classes.icon"
|
||||
:style="{ '--icon-size': classes.iconSize }"
|
||||
/>
|
||||
|
||||
{{ text }}
|
||||
<slot />
|
||||
@@ -92,6 +96,7 @@ const needsBrandGradientBackground = computed(() => {
|
||||
iconRightHoverDisplay &&
|
||||
'opacity-0 group-hover:opacity-100 group-focus:opacity-100 transition-all',
|
||||
]"
|
||||
:style="{ '--icon-size': classes.iconSize }"
|
||||
/>
|
||||
</component>
|
||||
</template>
|
||||
|
||||
@@ -21,15 +21,15 @@ export const brandButtonVariants = cva(
|
||||
'underline-hover-red':
|
||||
'opacity-75 underline border-transparent transition hover:text-white hover:bg-unraid-red hover:border-unraid-red focus:text-white focus:bg-unraid-red focus:border-unraid-red hover:opacity-100 focus:opacity-100',
|
||||
white: 'text-black bg-white transition hover:bg-grey focus:bg-grey',
|
||||
none: '',
|
||||
none: 'border-transparent hover:shadow-none focus:shadow-none',
|
||||
},
|
||||
size: {
|
||||
'12px': 'text-12px gap-4px',
|
||||
'14px': 'text-14px gap-8px',
|
||||
'16px': 'text-16px gap-8px',
|
||||
'18px': 'text-18px gap-8px',
|
||||
'20px': 'text-20px gap-8px',
|
||||
'24px': 'text-24px gap-8px',
|
||||
'12px': 'text-xs gap-1',
|
||||
'14px': 'text-sm gap-2',
|
||||
'16px': 'text-base gap-2',
|
||||
'18px': 'text-lg gap-2',
|
||||
'20px': 'text-xl gap-2',
|
||||
'24px': 'text-2xl gap-2',
|
||||
},
|
||||
padding: {
|
||||
default: '',
|
||||
@@ -41,32 +41,32 @@ export const brandButtonVariants = cva(
|
||||
{
|
||||
size: '12px',
|
||||
padding: 'default',
|
||||
class: 'p-8px',
|
||||
class: 'p-2',
|
||||
},
|
||||
{
|
||||
size: '14px',
|
||||
padding: 'default',
|
||||
class: 'p-8px',
|
||||
class: 'p-2',
|
||||
},
|
||||
{
|
||||
size: '16px',
|
||||
padding: 'default',
|
||||
class: 'p-12px',
|
||||
class: 'p-3',
|
||||
},
|
||||
{
|
||||
size: '18px',
|
||||
padding: 'default',
|
||||
class: 'p-12px',
|
||||
class: 'p-3',
|
||||
},
|
||||
{
|
||||
size: '20px',
|
||||
padding: 'default',
|
||||
class: 'p-16px',
|
||||
class: 'p-4',
|
||||
},
|
||||
{
|
||||
size: '24px',
|
||||
padding: 'default',
|
||||
class: 'p-16px',
|
||||
class: 'p-4',
|
||||
},
|
||||
],
|
||||
defaultVariants: {
|
||||
|
||||
@@ -23,12 +23,12 @@ const props = withDefaults(defineProps<BadgeProps>(), {
|
||||
|
||||
const badgeClasses = computed(() => {
|
||||
const iconSizes = {
|
||||
xs: 'w-12px',
|
||||
sm: 'w-14px',
|
||||
md: 'w-16px',
|
||||
lg: 'w-18px',
|
||||
xl: 'w-20px',
|
||||
'2xl': 'w-24px',
|
||||
xs: 'w-3',
|
||||
sm: 'w-3.5',
|
||||
md: 'w-4',
|
||||
lg: 'w-4.5',
|
||||
xl: 'w-5',
|
||||
'2xl': 'w-6',
|
||||
} as const;
|
||||
|
||||
return {
|
||||
@@ -40,8 +40,8 @@ const badgeClasses = computed(() => {
|
||||
|
||||
<template>
|
||||
<span :class="[badgeClasses.badge, props.class]">
|
||||
<component :is="icon" v-if="icon" class="flex-shrink-0" :class="badgeClasses.icon" />
|
||||
<component :is="icon" v-if="icon" class="shrink-0" :class="badgeClasses.icon" />
|
||||
<slot />
|
||||
<component :is="iconRight" v-if="iconRight" class="flex-shrink-0" :class="badgeClasses.icon" />
|
||||
<component :is="iconRight" v-if="iconRight" class="shrink-0" :class="badgeClasses.icon" />
|
||||
</span>
|
||||
</template>
|
||||
|
||||
@@ -22,12 +22,12 @@ export const badgeVariants = cva(
|
||||
custom: '',
|
||||
},
|
||||
size: {
|
||||
xs: 'text-12px px-8px py-4px gap-4px',
|
||||
sm: 'text-14px px-8px py-4px gap-8px',
|
||||
md: 'text-16px px-12px py-8px gap-8px',
|
||||
lg: 'text-18px px-12px py-8px gap-8px',
|
||||
xl: 'text-20px px-16px py-12px gap-8px',
|
||||
'2xl': 'text-24px px-16px py-12px gap-8px',
|
||||
xs: 'text-xs px-2 py-1 gap-1',
|
||||
sm: 'text-sm px-2 py-1 gap-2',
|
||||
md: 'text-base px-3 py-2 gap-2',
|
||||
lg: 'text-lg px-3 py-2 gap-2',
|
||||
xl: 'text-xl px-4 py-3 gap-2',
|
||||
'2xl': 'text-2xl px-4 py-3 gap-2',
|
||||
},
|
||||
},
|
||||
defaultVariants: {
|
||||
|
||||
@@ -20,7 +20,7 @@ describe('Button', () => {
|
||||
});
|
||||
|
||||
rerender({
|
||||
props: { variant: 'outline' },
|
||||
props: { variant: 'outline-solid' },
|
||||
slots: { default: 'Delete' },
|
||||
});
|
||||
});
|
||||
|
||||
@@ -2,7 +2,7 @@ import type { VariantProps } from 'class-variance-authority';
|
||||
import { cva } from 'class-variance-authority';
|
||||
|
||||
export const buttonVariants = cva(
|
||||
'inline-flex items-center justify-center rounded-md text-base font-medium ring-offset-background transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:pointer-events-none disabled:opacity-50',
|
||||
'inline-flex items-center justify-center rounded-md text-base font-medium ring-offset-background transition-colors focus-visible:outline-hidden focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:pointer-events-none disabled:opacity-50',
|
||||
{
|
||||
variants: {
|
||||
variant: {
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user