mirror of
https://github.com/unraid/api.git
synced 2026-01-02 22:50:02 -06:00
Compare commits
33 Commits
4.9.0-buil
...
feat/pnpm-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f3357ddc5a | ||
|
|
1a8da6d92b | ||
|
|
81808ada0f | ||
|
|
eecd9b1017 | ||
|
|
441e1805c1 | ||
|
|
29dcb7d0f0 | ||
|
|
1a7d35d3f6 | ||
|
|
af33e999a0 | ||
|
|
85a35804c1 | ||
|
|
a35c8ff2f1 | ||
|
|
153e7a1e3a | ||
|
|
e73fc356cb | ||
|
|
e1a7a3d22d | ||
|
|
53b05ebe5e | ||
|
|
2ed1308e40 | ||
|
|
6c03df2b97 | ||
|
|
074370c42c | ||
|
|
f34a33bc9f | ||
|
|
c7801a9236 | ||
|
|
dd759d9f0f | ||
|
|
74da8d81ef | ||
|
|
33e0b1ab24 | ||
|
|
ca4e2db1f2 | ||
|
|
ea20d1e211 | ||
|
|
79c57b8ed0 | ||
|
|
4168f43e3e | ||
|
|
20de3ec8d6 | ||
|
|
39b8f453da | ||
|
|
6bf3f77638 | ||
|
|
a79d049865 | ||
|
|
5b6bcb6043 | ||
|
|
6ee3cae962 | ||
|
|
f3671c3e07 |
@@ -2,7 +2,19 @@
|
||||
"permissions": {
|
||||
"allow": [
|
||||
"Bash(rg:*)",
|
||||
"Bash(find:*)"
|
||||
"Bash(find:*)",
|
||||
"Bash(pnpm codegen:*)",
|
||||
"Bash(pnpm dev:*)",
|
||||
"Bash(pnpm build:*)",
|
||||
"Bash(pnpm test:*)",
|
||||
"Bash(grep:*)",
|
||||
"Bash(pnpm type-check:*)",
|
||||
"Bash(pnpm lint:*)",
|
||||
"Bash(pnpm --filter ./api lint)",
|
||||
"Bash(mv:*)",
|
||||
"Bash(ls:*)",
|
||||
"mcp__ide__getDiagnostics",
|
||||
"Bash(pnpm --filter \"*connect*\" test connect-status-writer.service.spec)"
|
||||
]
|
||||
},
|
||||
"enableAllProjectMcpServers": false
|
||||
|
||||
4
.github/workflows/deploy-storybook.yml
vendored
4
.github/workflows/deploy-storybook.yml
vendored
@@ -25,7 +25,7 @@ jobs:
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
node-version: '22.17.0'
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
@@ -33,7 +33,7 @@ jobs:
|
||||
run_install: false
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.4.3
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.1
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential libvirt-daemon-system
|
||||
version: 1.0
|
||||
|
||||
6
.github/workflows/main.yml
vendored
6
.github/workflows/main.yml
vendored
@@ -45,7 +45,7 @@ jobs:
|
||||
node-version-file: ".nvmrc"
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.4.3
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.1
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential libvirt-daemon-system
|
||||
version: 1.0
|
||||
@@ -190,7 +190,7 @@ jobs:
|
||||
${{ runner.os }}-pnpm-store-
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.4.3
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.1
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential
|
||||
version: 1.0
|
||||
@@ -267,7 +267,7 @@ jobs:
|
||||
${{ runner.os }}-pnpm-store-
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.4.3
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.1
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential
|
||||
version: 1.0
|
||||
|
||||
4
.github/workflows/release-production.yml
vendored
4
.github/workflows/release-production.yml
vendored
@@ -32,7 +32,9 @@ jobs:
|
||||
with:
|
||||
node-version: '22.17.0'
|
||||
- run: |
|
||||
echo '${{ steps.release-info.outputs.body }}' >> release-notes.txt
|
||||
cat << 'EOF' > release-notes.txt
|
||||
${{ steps.release-info.outputs.body }}
|
||||
EOF
|
||||
- run: npm install html-escaper@2 xml2js
|
||||
- name: Update Plugin Changelog
|
||||
uses: actions/github-script@v7
|
||||
|
||||
2
.github/workflows/test-libvirt.yml
vendored
2
.github/workflows/test-libvirt.yml
vendored
@@ -31,7 +31,7 @@ jobs:
|
||||
python-version: "3.13.5"
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.4.3
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.1
|
||||
with:
|
||||
packages: libvirt-dev
|
||||
version: 1.0
|
||||
|
||||
@@ -1 +1 @@
|
||||
{".":"4.9.0"}
|
||||
{".":"4.10.0"}
|
||||
|
||||
@@ -1,5 +1,63 @@
|
||||
# Changelog
|
||||
|
||||
## [4.10.0](https://github.com/unraid/api/compare/v4.9.5...v4.10.0) (2025-07-15)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* trial extension allowed within 5 days of expiration ([#1490](https://github.com/unraid/api/issues/1490)) ([f34a33b](https://github.com/unraid/api/commit/f34a33bc9f1a7e135d453d9d31888789bfc3f878))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* delay `nginx:reload` file mod effect by 10 seconds ([#1512](https://github.com/unraid/api/issues/1512)) ([af33e99](https://github.com/unraid/api/commit/af33e999a0480a77e3e6b2aa833b17b38b835656))
|
||||
* **deps:** update all non-major dependencies ([#1489](https://github.com/unraid/api/issues/1489)) ([53b05eb](https://github.com/unraid/api/commit/53b05ebe5e2050cb0916fcd65e8d41370aee0624))
|
||||
* ensure no crash if emhttp state configs are missing ([#1514](https://github.com/unraid/api/issues/1514)) ([1a7d35d](https://github.com/unraid/api/commit/1a7d35d3f6972fd8aff58c17b2b0fb79725e660e))
|
||||
* **my.servers:** improve DNS resolution robustness for backup server ([#1518](https://github.com/unraid/api/issues/1518)) ([eecd9b1](https://github.com/unraid/api/commit/eecd9b1017a63651d1dc782feaa224111cdee8b6))
|
||||
* over-eager cloud query from web components ([#1506](https://github.com/unraid/api/issues/1506)) ([074370c](https://github.com/unraid/api/commit/074370c42cdecc4dbc58193ff518aa25735c56b3))
|
||||
* replace myservers.cfg reads in UpdateFlashBackup.php ([#1517](https://github.com/unraid/api/issues/1517)) ([441e180](https://github.com/unraid/api/commit/441e1805c108a6c1cd35ee093246b975a03f8474))
|
||||
* rm short-circuit in `rc.unraid-api` if plugin config dir is absent ([#1515](https://github.com/unraid/api/issues/1515)) ([29dcb7d](https://github.com/unraid/api/commit/29dcb7d0f088937cefc5158055f48680e86e5c36))
|
||||
|
||||
## [4.9.5](https://github.com/unraid/api/compare/v4.9.4...v4.9.5) (2025-07-10)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **connect:** rm eager restart on `ERROR_RETYING` connection status ([#1502](https://github.com/unraid/api/issues/1502)) ([dd759d9](https://github.com/unraid/api/commit/dd759d9f0f841b296f8083bc67c6cd3f7a69aa5b))
|
||||
|
||||
## [4.9.4](https://github.com/unraid/api/compare/v4.9.3...v4.9.4) (2025-07-09)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* backport `<unraid-modals>` upon plg install when necessary ([#1499](https://github.com/unraid/api/issues/1499)) ([33e0b1a](https://github.com/unraid/api/commit/33e0b1ab24bedb6a2c7b376ea73dbe65bc3044be))
|
||||
* DefaultPageLayout patch rollback omits legacy header logo ([#1497](https://github.com/unraid/api/issues/1497)) ([ea20d1e](https://github.com/unraid/api/commit/ea20d1e2116fcafa154090fee78b42ec5d9ba584))
|
||||
* event emitter setup for writing status ([#1496](https://github.com/unraid/api/issues/1496)) ([ca4e2db](https://github.com/unraid/api/commit/ca4e2db1f29126a1fa3784af563832edda64b0ca))
|
||||
|
||||
## [4.9.3](https://github.com/unraid/api/compare/v4.9.2...v4.9.3) (2025-07-09)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* duplicated header logo after api stops ([#1493](https://github.com/unraid/api/issues/1493)) ([4168f43](https://github.com/unraid/api/commit/4168f43e3ecd51479bec3aae585abbe6dcd3e416))
|
||||
|
||||
## [4.9.2](https://github.com/unraid/api/compare/v4.9.1...v4.9.2) (2025-07-09)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* invalid configs no longer crash API ([#1491](https://github.com/unraid/api/issues/1491)) ([6bf3f77](https://github.com/unraid/api/commit/6bf3f776380edeff5133517e6aca223556e30144))
|
||||
* invalid state for unraid plugin ([#1492](https://github.com/unraid/api/issues/1492)) ([39b8f45](https://github.com/unraid/api/commit/39b8f453da23793ef51f8e7f7196370aada8c5aa))
|
||||
* release note escaping ([5b6bcb6](https://github.com/unraid/api/commit/5b6bcb6043a5269bff4dc28714d787a5a3f07e22))
|
||||
|
||||
## [4.9.1](https://github.com/unraid/api/compare/v4.9.0...v4.9.1) (2025-07-08)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **HeaderOsVersion:** adjust top margin for header component ([#1485](https://github.com/unraid/api/issues/1485)) ([862b54d](https://github.com/unraid/api/commit/862b54de8cd793606f1d29e76c19d4a0e1ae172f))
|
||||
* sign out doesn't work ([#1486](https://github.com/unraid/api/issues/1486)) ([f3671c3](https://github.com/unraid/api/commit/f3671c3e0750b79be1f19655a07a0e9932289b3f))
|
||||
|
||||
## [4.9.0](https://github.com/unraid/api/compare/v4.8.0...v4.9.0) (2025-07-08)
|
||||
|
||||
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
{
|
||||
"version": "4.8.0",
|
||||
"version": "4.9.5",
|
||||
"extraOrigins": [
|
||||
"https://google.com",
|
||||
"https://test.com"
|
||||
],
|
||||
"sandbox": true,
|
||||
"ssoSubIds": [],
|
||||
"plugins": ["unraid-api-plugin-connect"]
|
||||
"plugins": [
|
||||
"unraid-api-plugin-connect"
|
||||
]
|
||||
}
|
||||
@@ -247,347 +247,6 @@ A field whose value conforms to the standard URL format as specified in RFC3986:
|
||||
"""
|
||||
scalar URL
|
||||
|
||||
type DiskPartition {
|
||||
"""The name of the partition"""
|
||||
name: String!
|
||||
|
||||
"""The filesystem type of the partition"""
|
||||
fsType: DiskFsType!
|
||||
|
||||
"""The size of the partition in bytes"""
|
||||
size: Float!
|
||||
}
|
||||
|
||||
"""The type of filesystem on the disk partition"""
|
||||
enum DiskFsType {
|
||||
XFS
|
||||
BTRFS
|
||||
VFAT
|
||||
ZFS
|
||||
EXT4
|
||||
NTFS
|
||||
}
|
||||
|
||||
type Disk implements Node {
|
||||
id: PrefixedID!
|
||||
|
||||
"""The device path of the disk (e.g. /dev/sdb)"""
|
||||
device: String!
|
||||
|
||||
"""The type of disk (e.g. SSD, HDD)"""
|
||||
type: String!
|
||||
|
||||
"""The model name of the disk"""
|
||||
name: String!
|
||||
|
||||
"""The manufacturer of the disk"""
|
||||
vendor: String!
|
||||
|
||||
"""The total size of the disk in bytes"""
|
||||
size: Float!
|
||||
|
||||
"""The number of bytes per sector"""
|
||||
bytesPerSector: Float!
|
||||
|
||||
"""The total number of cylinders on the disk"""
|
||||
totalCylinders: Float!
|
||||
|
||||
"""The total number of heads on the disk"""
|
||||
totalHeads: Float!
|
||||
|
||||
"""The total number of sectors on the disk"""
|
||||
totalSectors: Float!
|
||||
|
||||
"""The total number of tracks on the disk"""
|
||||
totalTracks: Float!
|
||||
|
||||
"""The number of tracks per cylinder"""
|
||||
tracksPerCylinder: Float!
|
||||
|
||||
"""The number of sectors per track"""
|
||||
sectorsPerTrack: Float!
|
||||
|
||||
"""The firmware revision of the disk"""
|
||||
firmwareRevision: String!
|
||||
|
||||
"""The serial number of the disk"""
|
||||
serialNum: String!
|
||||
|
||||
"""The interface type of the disk"""
|
||||
interfaceType: DiskInterfaceType!
|
||||
|
||||
"""The SMART status of the disk"""
|
||||
smartStatus: DiskSmartStatus!
|
||||
|
||||
"""The current temperature of the disk in Celsius"""
|
||||
temperature: Float
|
||||
|
||||
"""The partitions on the disk"""
|
||||
partitions: [DiskPartition!]!
|
||||
}
|
||||
|
||||
"""The type of interface the disk uses to connect to the system"""
|
||||
enum DiskInterfaceType {
|
||||
SAS
|
||||
SATA
|
||||
USB
|
||||
PCIE
|
||||
UNKNOWN
|
||||
}
|
||||
|
||||
"""
|
||||
The SMART (Self-Monitoring, Analysis and Reporting Technology) status of the disk
|
||||
"""
|
||||
enum DiskSmartStatus {
|
||||
OK
|
||||
UNKNOWN
|
||||
}
|
||||
|
||||
type KeyFile {
|
||||
location: String
|
||||
contents: String
|
||||
}
|
||||
|
||||
type Registration implements Node {
|
||||
id: PrefixedID!
|
||||
type: registrationType
|
||||
keyFile: KeyFile
|
||||
state: RegistrationState
|
||||
expiration: String
|
||||
updateExpiration: String
|
||||
}
|
||||
|
||||
enum registrationType {
|
||||
BASIC
|
||||
PLUS
|
||||
PRO
|
||||
STARTER
|
||||
UNLEASHED
|
||||
LIFETIME
|
||||
INVALID
|
||||
TRIAL
|
||||
}
|
||||
|
||||
enum RegistrationState {
|
||||
TRIAL
|
||||
BASIC
|
||||
PLUS
|
||||
PRO
|
||||
STARTER
|
||||
UNLEASHED
|
||||
LIFETIME
|
||||
EEXPIRED
|
||||
EGUID
|
||||
EGUID1
|
||||
ETRIAL
|
||||
ENOKEYFILE
|
||||
ENOKEYFILE1
|
||||
ENOKEYFILE2
|
||||
ENOFLASH
|
||||
ENOFLASH1
|
||||
ENOFLASH2
|
||||
ENOFLASH3
|
||||
ENOFLASH4
|
||||
ENOFLASH5
|
||||
ENOFLASH6
|
||||
ENOFLASH7
|
||||
EBLACKLISTED
|
||||
EBLACKLISTED1
|
||||
EBLACKLISTED2
|
||||
ENOCONN
|
||||
}
|
||||
|
||||
type Vars implements Node {
|
||||
id: PrefixedID!
|
||||
|
||||
"""Unraid version"""
|
||||
version: String
|
||||
maxArraysz: Int
|
||||
maxCachesz: Int
|
||||
|
||||
"""Machine hostname"""
|
||||
name: String
|
||||
timeZone: String
|
||||
comment: String
|
||||
security: String
|
||||
workgroup: String
|
||||
domain: String
|
||||
domainShort: String
|
||||
hideDotFiles: Boolean
|
||||
localMaster: Boolean
|
||||
enableFruit: String
|
||||
|
||||
"""Should a NTP server be used for time sync?"""
|
||||
useNtp: Boolean
|
||||
|
||||
"""NTP Server 1"""
|
||||
ntpServer1: String
|
||||
|
||||
"""NTP Server 2"""
|
||||
ntpServer2: String
|
||||
|
||||
"""NTP Server 3"""
|
||||
ntpServer3: String
|
||||
|
||||
"""NTP Server 4"""
|
||||
ntpServer4: String
|
||||
domainLogin: String
|
||||
sysModel: String
|
||||
sysArraySlots: Int
|
||||
sysCacheSlots: Int
|
||||
sysFlashSlots: Int
|
||||
useSsl: Boolean
|
||||
|
||||
"""Port for the webui via HTTP"""
|
||||
port: Int
|
||||
|
||||
"""Port for the webui via HTTPS"""
|
||||
portssl: Int
|
||||
localTld: String
|
||||
bindMgt: Boolean
|
||||
|
||||
"""Should telnet be enabled?"""
|
||||
useTelnet: Boolean
|
||||
porttelnet: Int
|
||||
useSsh: Boolean
|
||||
portssh: Int
|
||||
startPage: String
|
||||
startArray: Boolean
|
||||
spindownDelay: String
|
||||
queueDepth: String
|
||||
spinupGroups: Boolean
|
||||
defaultFormat: String
|
||||
defaultFsType: String
|
||||
shutdownTimeout: Int
|
||||
luksKeyfile: String
|
||||
pollAttributes: String
|
||||
pollAttributesDefault: String
|
||||
pollAttributesStatus: String
|
||||
nrRequests: Int
|
||||
nrRequestsDefault: Int
|
||||
nrRequestsStatus: String
|
||||
mdNumStripes: Int
|
||||
mdNumStripesDefault: Int
|
||||
mdNumStripesStatus: String
|
||||
mdSyncWindow: Int
|
||||
mdSyncWindowDefault: Int
|
||||
mdSyncWindowStatus: String
|
||||
mdSyncThresh: Int
|
||||
mdSyncThreshDefault: Int
|
||||
mdSyncThreshStatus: String
|
||||
mdWriteMethod: Int
|
||||
mdWriteMethodDefault: String
|
||||
mdWriteMethodStatus: String
|
||||
shareDisk: String
|
||||
shareUser: String
|
||||
shareUserInclude: String
|
||||
shareUserExclude: String
|
||||
shareSmbEnabled: Boolean
|
||||
shareNfsEnabled: Boolean
|
||||
shareAfpEnabled: Boolean
|
||||
shareInitialOwner: String
|
||||
shareInitialGroup: String
|
||||
shareCacheEnabled: Boolean
|
||||
shareCacheFloor: String
|
||||
shareMoverSchedule: String
|
||||
shareMoverLogging: Boolean
|
||||
fuseRemember: String
|
||||
fuseRememberDefault: String
|
||||
fuseRememberStatus: String
|
||||
fuseDirectio: String
|
||||
fuseDirectioDefault: String
|
||||
fuseDirectioStatus: String
|
||||
shareAvahiEnabled: Boolean
|
||||
shareAvahiSmbName: String
|
||||
shareAvahiSmbModel: String
|
||||
shareAvahiAfpName: String
|
||||
shareAvahiAfpModel: String
|
||||
safeMode: Boolean
|
||||
startMode: String
|
||||
configValid: Boolean
|
||||
configError: ConfigErrorState
|
||||
joinStatus: String
|
||||
deviceCount: Int
|
||||
flashGuid: String
|
||||
flashProduct: String
|
||||
flashVendor: String
|
||||
regCheck: String
|
||||
regFile: String
|
||||
regGuid: String
|
||||
regTy: registrationType
|
||||
regState: RegistrationState
|
||||
|
||||
"""Registration owner"""
|
||||
regTo: String
|
||||
regTm: String
|
||||
regTm2: String
|
||||
regGen: String
|
||||
sbName: String
|
||||
sbVersion: String
|
||||
sbUpdated: String
|
||||
sbEvents: Int
|
||||
sbState: String
|
||||
sbClean: Boolean
|
||||
sbSynced: Int
|
||||
sbSyncErrs: Int
|
||||
sbSynced2: Int
|
||||
sbSyncExit: String
|
||||
sbNumDisks: Int
|
||||
mdColor: String
|
||||
mdNumDisks: Int
|
||||
mdNumDisabled: Int
|
||||
mdNumInvalid: Int
|
||||
mdNumMissing: Int
|
||||
mdNumNew: Int
|
||||
mdNumErased: Int
|
||||
mdResync: Int
|
||||
mdResyncCorr: String
|
||||
mdResyncPos: String
|
||||
mdResyncDb: String
|
||||
mdResyncDt: String
|
||||
mdResyncAction: String
|
||||
mdResyncSize: Int
|
||||
mdState: String
|
||||
mdVersion: String
|
||||
cacheNumDevices: Int
|
||||
cacheSbNumDisks: Int
|
||||
fsState: String
|
||||
|
||||
"""Human friendly string of array events happening"""
|
||||
fsProgress: String
|
||||
|
||||
"""
|
||||
Percentage from 0 - 100 while upgrading a disk or swapping parity drives
|
||||
"""
|
||||
fsCopyPrcnt: Int
|
||||
fsNumMounted: Int
|
||||
fsNumUnmountable: Int
|
||||
fsUnmountableMask: String
|
||||
|
||||
"""Total amount of user shares"""
|
||||
shareCount: Int
|
||||
|
||||
"""Total amount shares with SMB enabled"""
|
||||
shareSmbCount: Int
|
||||
|
||||
"""Total amount shares with NFS enabled"""
|
||||
shareNfsCount: Int
|
||||
|
||||
"""Total amount shares with AFP enabled"""
|
||||
shareAfpCount: Int
|
||||
shareMoverActive: Boolean
|
||||
csrfToken: String
|
||||
}
|
||||
|
||||
"""Possible error states for configuration"""
|
||||
enum ConfigErrorState {
|
||||
UNKNOWN_ERROR
|
||||
INELIGIBLE
|
||||
INVALID
|
||||
NO_KEY_SERVER
|
||||
WITHDRAWN
|
||||
}
|
||||
|
||||
type Permission {
|
||||
resource: Resource!
|
||||
actions: [String!]!
|
||||
@@ -961,6 +620,102 @@ enum ThemeName {
|
||||
white
|
||||
}
|
||||
|
||||
type DiskPartition {
|
||||
"""The name of the partition"""
|
||||
name: String!
|
||||
|
||||
"""The filesystem type of the partition"""
|
||||
fsType: DiskFsType!
|
||||
|
||||
"""The size of the partition in bytes"""
|
||||
size: Float!
|
||||
}
|
||||
|
||||
"""The type of filesystem on the disk partition"""
|
||||
enum DiskFsType {
|
||||
XFS
|
||||
BTRFS
|
||||
VFAT
|
||||
ZFS
|
||||
EXT4
|
||||
NTFS
|
||||
}
|
||||
|
||||
type Disk implements Node {
|
||||
id: PrefixedID!
|
||||
|
||||
"""The device path of the disk (e.g. /dev/sdb)"""
|
||||
device: String!
|
||||
|
||||
"""The type of disk (e.g. SSD, HDD)"""
|
||||
type: String!
|
||||
|
||||
"""The model name of the disk"""
|
||||
name: String!
|
||||
|
||||
"""The manufacturer of the disk"""
|
||||
vendor: String!
|
||||
|
||||
"""The total size of the disk in bytes"""
|
||||
size: Float!
|
||||
|
||||
"""The number of bytes per sector"""
|
||||
bytesPerSector: Float!
|
||||
|
||||
"""The total number of cylinders on the disk"""
|
||||
totalCylinders: Float!
|
||||
|
||||
"""The total number of heads on the disk"""
|
||||
totalHeads: Float!
|
||||
|
||||
"""The total number of sectors on the disk"""
|
||||
totalSectors: Float!
|
||||
|
||||
"""The total number of tracks on the disk"""
|
||||
totalTracks: Float!
|
||||
|
||||
"""The number of tracks per cylinder"""
|
||||
tracksPerCylinder: Float!
|
||||
|
||||
"""The number of sectors per track"""
|
||||
sectorsPerTrack: Float!
|
||||
|
||||
"""The firmware revision of the disk"""
|
||||
firmwareRevision: String!
|
||||
|
||||
"""The serial number of the disk"""
|
||||
serialNum: String!
|
||||
|
||||
"""The interface type of the disk"""
|
||||
interfaceType: DiskInterfaceType!
|
||||
|
||||
"""The SMART status of the disk"""
|
||||
smartStatus: DiskSmartStatus!
|
||||
|
||||
"""The current temperature of the disk in Celsius"""
|
||||
temperature: Float
|
||||
|
||||
"""The partitions on the disk"""
|
||||
partitions: [DiskPartition!]!
|
||||
}
|
||||
|
||||
"""The type of interface the disk uses to connect to the system"""
|
||||
enum DiskInterfaceType {
|
||||
SAS
|
||||
SATA
|
||||
USB
|
||||
PCIE
|
||||
UNKNOWN
|
||||
}
|
||||
|
||||
"""
|
||||
The SMART (Self-Monitoring, Analysis and Reporting Technology) status of the disk
|
||||
"""
|
||||
enum DiskSmartStatus {
|
||||
OK
|
||||
UNKNOWN
|
||||
}
|
||||
|
||||
type InfoApps implements Node {
|
||||
id: PrefixedID!
|
||||
|
||||
@@ -1351,6 +1106,60 @@ type Owner {
|
||||
avatar: String!
|
||||
}
|
||||
|
||||
type KeyFile {
|
||||
location: String
|
||||
contents: String
|
||||
}
|
||||
|
||||
type Registration implements Node {
|
||||
id: PrefixedID!
|
||||
type: registrationType
|
||||
keyFile: KeyFile
|
||||
state: RegistrationState
|
||||
expiration: String
|
||||
updateExpiration: String
|
||||
}
|
||||
|
||||
enum registrationType {
|
||||
BASIC
|
||||
PLUS
|
||||
PRO
|
||||
STARTER
|
||||
UNLEASHED
|
||||
LIFETIME
|
||||
INVALID
|
||||
TRIAL
|
||||
}
|
||||
|
||||
enum RegistrationState {
|
||||
TRIAL
|
||||
BASIC
|
||||
PLUS
|
||||
PRO
|
||||
STARTER
|
||||
UNLEASHED
|
||||
LIFETIME
|
||||
EEXPIRED
|
||||
EGUID
|
||||
EGUID1
|
||||
ETRIAL
|
||||
ENOKEYFILE
|
||||
ENOKEYFILE1
|
||||
ENOKEYFILE2
|
||||
ENOFLASH
|
||||
ENOFLASH1
|
||||
ENOFLASH2
|
||||
ENOFLASH3
|
||||
ENOFLASH4
|
||||
ENOFLASH5
|
||||
ENOFLASH6
|
||||
ENOFLASH7
|
||||
EBLACKLISTED
|
||||
EBLACKLISTED1
|
||||
EBLACKLISTED2
|
||||
ENOCONN
|
||||
}
|
||||
|
||||
type ProfileModel implements Node {
|
||||
id: PrefixedID!
|
||||
username: String!
|
||||
@@ -1416,6 +1225,197 @@ type Settings implements Node {
|
||||
api: ApiConfig!
|
||||
}
|
||||
|
||||
type Vars implements Node {
|
||||
id: PrefixedID!
|
||||
|
||||
"""Unraid version"""
|
||||
version: String
|
||||
maxArraysz: Int
|
||||
maxCachesz: Int
|
||||
|
||||
"""Machine hostname"""
|
||||
name: String
|
||||
timeZone: String
|
||||
comment: String
|
||||
security: String
|
||||
workgroup: String
|
||||
domain: String
|
||||
domainShort: String
|
||||
hideDotFiles: Boolean
|
||||
localMaster: Boolean
|
||||
enableFruit: String
|
||||
|
||||
"""Should a NTP server be used for time sync?"""
|
||||
useNtp: Boolean
|
||||
|
||||
"""NTP Server 1"""
|
||||
ntpServer1: String
|
||||
|
||||
"""NTP Server 2"""
|
||||
ntpServer2: String
|
||||
|
||||
"""NTP Server 3"""
|
||||
ntpServer3: String
|
||||
|
||||
"""NTP Server 4"""
|
||||
ntpServer4: String
|
||||
domainLogin: String
|
||||
sysModel: String
|
||||
sysArraySlots: Int
|
||||
sysCacheSlots: Int
|
||||
sysFlashSlots: Int
|
||||
useSsl: Boolean
|
||||
|
||||
"""Port for the webui via HTTP"""
|
||||
port: Int
|
||||
|
||||
"""Port for the webui via HTTPS"""
|
||||
portssl: Int
|
||||
localTld: String
|
||||
bindMgt: Boolean
|
||||
|
||||
"""Should telnet be enabled?"""
|
||||
useTelnet: Boolean
|
||||
porttelnet: Int
|
||||
useSsh: Boolean
|
||||
portssh: Int
|
||||
startPage: String
|
||||
startArray: Boolean
|
||||
spindownDelay: String
|
||||
queueDepth: String
|
||||
spinupGroups: Boolean
|
||||
defaultFormat: String
|
||||
defaultFsType: String
|
||||
shutdownTimeout: Int
|
||||
luksKeyfile: String
|
||||
pollAttributes: String
|
||||
pollAttributesDefault: String
|
||||
pollAttributesStatus: String
|
||||
nrRequests: Int
|
||||
nrRequestsDefault: Int
|
||||
nrRequestsStatus: String
|
||||
mdNumStripes: Int
|
||||
mdNumStripesDefault: Int
|
||||
mdNumStripesStatus: String
|
||||
mdSyncWindow: Int
|
||||
mdSyncWindowDefault: Int
|
||||
mdSyncWindowStatus: String
|
||||
mdSyncThresh: Int
|
||||
mdSyncThreshDefault: Int
|
||||
mdSyncThreshStatus: String
|
||||
mdWriteMethod: Int
|
||||
mdWriteMethodDefault: String
|
||||
mdWriteMethodStatus: String
|
||||
shareDisk: String
|
||||
shareUser: String
|
||||
shareUserInclude: String
|
||||
shareUserExclude: String
|
||||
shareSmbEnabled: Boolean
|
||||
shareNfsEnabled: Boolean
|
||||
shareAfpEnabled: Boolean
|
||||
shareInitialOwner: String
|
||||
shareInitialGroup: String
|
||||
shareCacheEnabled: Boolean
|
||||
shareCacheFloor: String
|
||||
shareMoverSchedule: String
|
||||
shareMoverLogging: Boolean
|
||||
fuseRemember: String
|
||||
fuseRememberDefault: String
|
||||
fuseRememberStatus: String
|
||||
fuseDirectio: String
|
||||
fuseDirectioDefault: String
|
||||
fuseDirectioStatus: String
|
||||
shareAvahiEnabled: Boolean
|
||||
shareAvahiSmbName: String
|
||||
shareAvahiSmbModel: String
|
||||
shareAvahiAfpName: String
|
||||
shareAvahiAfpModel: String
|
||||
safeMode: Boolean
|
||||
startMode: String
|
||||
configValid: Boolean
|
||||
configError: ConfigErrorState
|
||||
joinStatus: String
|
||||
deviceCount: Int
|
||||
flashGuid: String
|
||||
flashProduct: String
|
||||
flashVendor: String
|
||||
regCheck: String
|
||||
regFile: String
|
||||
regGuid: String
|
||||
regTy: registrationType
|
||||
regState: RegistrationState
|
||||
|
||||
"""Registration owner"""
|
||||
regTo: String
|
||||
regTm: String
|
||||
regTm2: String
|
||||
regGen: String
|
||||
sbName: String
|
||||
sbVersion: String
|
||||
sbUpdated: String
|
||||
sbEvents: Int
|
||||
sbState: String
|
||||
sbClean: Boolean
|
||||
sbSynced: Int
|
||||
sbSyncErrs: Int
|
||||
sbSynced2: Int
|
||||
sbSyncExit: String
|
||||
sbNumDisks: Int
|
||||
mdColor: String
|
||||
mdNumDisks: Int
|
||||
mdNumDisabled: Int
|
||||
mdNumInvalid: Int
|
||||
mdNumMissing: Int
|
||||
mdNumNew: Int
|
||||
mdNumErased: Int
|
||||
mdResync: Int
|
||||
mdResyncCorr: String
|
||||
mdResyncPos: String
|
||||
mdResyncDb: String
|
||||
mdResyncDt: String
|
||||
mdResyncAction: String
|
||||
mdResyncSize: Int
|
||||
mdState: String
|
||||
mdVersion: String
|
||||
cacheNumDevices: Int
|
||||
cacheSbNumDisks: Int
|
||||
fsState: String
|
||||
|
||||
"""Human friendly string of array events happening"""
|
||||
fsProgress: String
|
||||
|
||||
"""
|
||||
Percentage from 0 - 100 while upgrading a disk or swapping parity drives
|
||||
"""
|
||||
fsCopyPrcnt: Int
|
||||
fsNumMounted: Int
|
||||
fsNumUnmountable: Int
|
||||
fsUnmountableMask: String
|
||||
|
||||
"""Total amount of user shares"""
|
||||
shareCount: Int
|
||||
|
||||
"""Total amount shares with SMB enabled"""
|
||||
shareSmbCount: Int
|
||||
|
||||
"""Total amount shares with NFS enabled"""
|
||||
shareNfsCount: Int
|
||||
|
||||
"""Total amount shares with AFP enabled"""
|
||||
shareAfpCount: Int
|
||||
shareMoverActive: Boolean
|
||||
csrfToken: String
|
||||
}
|
||||
|
||||
"""Possible error states for configuration"""
|
||||
enum ConfigErrorState {
|
||||
UNKNOWN_ERROR
|
||||
INELIGIBLE
|
||||
INVALID
|
||||
NO_KEY_SERVER
|
||||
WITHDRAWN
|
||||
}
|
||||
|
||||
type VmDomain implements Node {
|
||||
"""The unique identifier for the vm (uuid)"""
|
||||
id: PrefixedID!
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@unraid/api",
|
||||
"version": "4.9.0",
|
||||
"version": "4.10.0",
|
||||
"main": "src/cli/index.ts",
|
||||
"type": "module",
|
||||
"corepack": {
|
||||
@@ -10,7 +10,7 @@
|
||||
"author": "Lime Technology, Inc. <unraid.net>",
|
||||
"license": "GPL-2.0-or-later",
|
||||
"engines": {
|
||||
"pnpm": "10.12.4"
|
||||
"pnpm": "10.13.1"
|
||||
},
|
||||
"scripts": {
|
||||
"// Development": "",
|
||||
@@ -57,7 +57,7 @@
|
||||
"@as-integrations/fastify": "2.1.1",
|
||||
"@fastify/cookie": "11.0.2",
|
||||
"@fastify/helmet": "13.0.1",
|
||||
"@graphql-codegen/client-preset": "4.8.2",
|
||||
"@graphql-codegen/client-preset": "4.8.3",
|
||||
"@graphql-tools/load-files": "7.0.1",
|
||||
"@graphql-tools/merge": "9.0.24",
|
||||
"@graphql-tools/schema": "10.0.23",
|
||||
@@ -82,7 +82,7 @@
|
||||
"accesscontrol": "2.2.1",
|
||||
"bycontract": "2.0.11",
|
||||
"bytes": "3.1.2",
|
||||
"cache-manager": "7.0.0",
|
||||
"cache-manager": "7.0.1",
|
||||
"cacheable-lookup": "7.0.0",
|
||||
"camelcase-keys": "9.1.3",
|
||||
"casbin": "5.38.0",
|
||||
@@ -94,11 +94,11 @@
|
||||
"command-exists": "1.2.9",
|
||||
"convert": "5.12.0",
|
||||
"cookie": "1.0.2",
|
||||
"cron": "4.3.1",
|
||||
"cron": "4.3.2",
|
||||
"cross-fetch": "4.1.0",
|
||||
"diff": "8.0.2",
|
||||
"dockerode": "4.0.7",
|
||||
"dotenv": "17.1.0",
|
||||
"dotenv": "17.2.0",
|
||||
"execa": "9.6.0",
|
||||
"exit-hook": "4.0.0",
|
||||
"fastify": "5.4.0",
|
||||
@@ -112,7 +112,7 @@
|
||||
"graphql-scalars": "1.24.2",
|
||||
"graphql-subscriptions": "3.0.0",
|
||||
"graphql-tag": "2.12.6",
|
||||
"graphql-ws": "6.0.5",
|
||||
"graphql-ws": "6.0.6",
|
||||
"ini": "5.0.0",
|
||||
"ip": "2.0.1",
|
||||
"jose": "6.0.11",
|
||||
@@ -138,11 +138,11 @@
|
||||
"rxjs": "7.8.2",
|
||||
"semver": "7.7.2",
|
||||
"strftime": "0.10.3",
|
||||
"systeminformation": "5.27.6",
|
||||
"systeminformation": "5.27.7",
|
||||
"uuid": "11.1.0",
|
||||
"ws": "8.18.2",
|
||||
"ws": "8.18.3",
|
||||
"zen-observable-ts": "1.1.0",
|
||||
"zod": "3.25.67"
|
||||
"zod": "3.25.76"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"unraid-api-plugin-connect": "workspace:*"
|
||||
@@ -153,35 +153,35 @@
|
||||
}
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/js": "9.29.0",
|
||||
"@eslint/js": "9.31.0",
|
||||
"@graphql-codegen/add": "5.0.3",
|
||||
"@graphql-codegen/cli": "5.0.7",
|
||||
"@graphql-codegen/fragment-matcher": "5.1.0",
|
||||
"@graphql-codegen/import-types-preset": "3.0.1",
|
||||
"@graphql-codegen/typed-document-node": "5.1.1",
|
||||
"@graphql-codegen/typed-document-node": "5.1.2",
|
||||
"@graphql-codegen/typescript": "4.1.6",
|
||||
"@graphql-codegen/typescript-operations": "4.6.1",
|
||||
"@graphql-codegen/typescript-resolvers": "4.5.1",
|
||||
"@graphql-typed-document-node/core": "3.2.0",
|
||||
"@ianvs/prettier-plugin-sort-imports": "4.4.2",
|
||||
"@ianvs/prettier-plugin-sort-imports": "4.5.1",
|
||||
"@nestjs/testing": "11.1.3",
|
||||
"@originjs/vite-plugin-commonjs": "1.0.3",
|
||||
"@rollup/plugin-node-resolve": "16.0.1",
|
||||
"@swc/core": "1.12.4",
|
||||
"@swc/core": "1.12.14",
|
||||
"@types/async-exit-hook": "2.0.2",
|
||||
"@types/bytes": "3.1.5",
|
||||
"@types/cli-table": "0.3.4",
|
||||
"@types/command-exists": "1.2.3",
|
||||
"@types/cors": "2.8.19",
|
||||
"@types/dockerode": "3.3.41",
|
||||
"@types/dockerode": "3.3.42",
|
||||
"@types/graphql-fields": "1.3.9",
|
||||
"@types/graphql-type-uuid": "0.2.6",
|
||||
"@types/ini": "4.1.1",
|
||||
"@types/ip": "1.1.3",
|
||||
"@types/lodash": "4.17.18",
|
||||
"@types/lodash": "4.17.20",
|
||||
"@types/lodash-es": "4.17.12",
|
||||
"@types/mustache": "4.2.6",
|
||||
"@types/node": "22.15.32",
|
||||
"@types/node": "22.16.4",
|
||||
"@types/pify": "6.1.0",
|
||||
"@types/semver": "7.7.0",
|
||||
"@types/sendmail": "1.4.7",
|
||||
@@ -193,27 +193,27 @@
|
||||
"@vitest/coverage-v8": "3.2.4",
|
||||
"@vitest/ui": "3.2.4",
|
||||
"cz-conventional-changelog": "3.3.0",
|
||||
"eslint": "9.29.0",
|
||||
"eslint-plugin-import": "2.31.0",
|
||||
"eslint-plugin-n": "17.20.0",
|
||||
"eslint": "9.31.0",
|
||||
"eslint-plugin-import": "2.32.0",
|
||||
"eslint-plugin-n": "17.21.0",
|
||||
"eslint-plugin-no-relative-import-paths": "1.6.1",
|
||||
"eslint-plugin-prettier": "5.5.0",
|
||||
"eslint-plugin-prettier": "5.5.1",
|
||||
"graphql-codegen-typescript-validation-schema": "0.17.1",
|
||||
"jiti": "2.4.2",
|
||||
"nodemon": "3.1.10",
|
||||
"prettier": "3.5.3",
|
||||
"prettier": "3.6.2",
|
||||
"rollup-plugin-node-externals": "8.0.1",
|
||||
"commit-and-tag-version": "9.5.0",
|
||||
"commit-and-tag-version": "9.6.0",
|
||||
"tsx": "4.20.3",
|
||||
"type-fest": "4.41.0",
|
||||
"typescript": "5.8.3",
|
||||
"typescript-eslint": "8.34.1",
|
||||
"typescript-eslint": "8.37.0",
|
||||
"unplugin-swc": "1.5.5",
|
||||
"vite": "7.0.3",
|
||||
"vite": "7.0.4",
|
||||
"vite-plugin-node": "7.0.0",
|
||||
"vite-tsconfig-paths": "5.1.4",
|
||||
"vitest": "3.2.4",
|
||||
"zx": "8.5.5"
|
||||
"zx": "8.7.1"
|
||||
},
|
||||
"overrides": {
|
||||
"eslint": {
|
||||
@@ -228,5 +228,5 @@
|
||||
}
|
||||
},
|
||||
"private": true,
|
||||
"packageManager": "pnpm@10.12.4"
|
||||
"packageManager": "pnpm@10.13.1"
|
||||
}
|
||||
|
||||
@@ -17,7 +17,6 @@ exports[`Returns paths 1`] = `
|
||||
"myservers-base",
|
||||
"myservers-config",
|
||||
"myservers-config-states",
|
||||
"myservers-env",
|
||||
"myservers-keepalive",
|
||||
"keyfile-base",
|
||||
"machine-id",
|
||||
|
||||
@@ -24,7 +24,6 @@ test('Returns paths', async () => {
|
||||
'myservers-base': '/boot/config/plugins/dynamix.my.servers/',
|
||||
'myservers-config': expect.stringContaining('api/dev/Unraid.net/myservers.cfg'),
|
||||
'myservers-config-states': expect.stringContaining('api/dev/states/myservers.cfg'),
|
||||
'myservers-env': '/boot/config/plugins/dynamix.my.servers/env',
|
||||
'myservers-keepalive': './dev/Unraid.net/fb_keepalive',
|
||||
'keyfile-base': expect.stringContaining('api/dev/Unraid.net'),
|
||||
'machine-id': expect.stringContaining('api/dev/data/machine-id'),
|
||||
|
||||
@@ -124,7 +124,15 @@ export const parseConfig = <T extends Record<string, any>>(
|
||||
throw new AppError('Invalid Parameters Passed to ParseConfig');
|
||||
}
|
||||
|
||||
const data: Record<string, any> = parseIni(fileContents);
|
||||
let data: Record<string, any>;
|
||||
try {
|
||||
data = parseIni(fileContents);
|
||||
} catch (error) {
|
||||
throw new AppError(
|
||||
`Failed to parse config file: ${error instanceof Error ? error.message : String(error)}`
|
||||
);
|
||||
}
|
||||
|
||||
// Remove quotes around keys
|
||||
const dataWithoutQuoteKeys = Object.fromEntries(
|
||||
Object.entries(data).map(([key, value]) => [key.replace(/^"(.+(?="$))"$/, '$1'), value])
|
||||
|
||||
@@ -67,6 +67,7 @@ export const getPackageJsonDependencies = (): string[] | undefined => {
|
||||
|
||||
export const API_VERSION = process.env.npm_package_version ?? getPackageJson().version;
|
||||
|
||||
/** Controls how the app is built/run (i.e. in terms of optimization) */
|
||||
export const NODE_ENV =
|
||||
(process.env.NODE_ENV as 'development' | 'test' | 'staging' | 'production') ?? 'production';
|
||||
export const environment = {
|
||||
@@ -76,6 +77,7 @@ export const CHOKIDAR_USEPOLLING = process.env.CHOKIDAR_USEPOLLING === 'true';
|
||||
export const IS_DOCKER = process.env.IS_DOCKER === 'true';
|
||||
export const DEBUG = process.env.DEBUG === 'true';
|
||||
export const INTROSPECTION = process.env.INTROSPECTION === 'true';
|
||||
/** Determines the app-level & business logic environment (i.e. what data & infrastructure is used) */
|
||||
export const ENVIRONMENT = process.env.ENVIRONMENT
|
||||
? (process.env.ENVIRONMENT as 'production' | 'staging' | 'development')
|
||||
: 'production';
|
||||
|
||||
@@ -49,7 +49,6 @@ const initialState = {
|
||||
resolvePath(process.env.PATHS_STATES ?? ('/usr/local/emhttp/state/' as const)),
|
||||
'myservers.cfg' as const
|
||||
),
|
||||
'myservers-env': '/boot/config/plugins/dynamix.my.servers/env' as const,
|
||||
'myservers-keepalive':
|
||||
process.env.PATHS_MY_SERVERS_FB ??
|
||||
('/boot/config/plugins/dynamix.my.servers/fb_keepalive' as const),
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { writeFileSync } from 'fs';
|
||||
import { join } from 'path';
|
||||
|
||||
import { ensureWriteSync } from '@unraid/shared/util/file.js';
|
||||
import { isEqual } from 'lodash-es';
|
||||
|
||||
import type { RootState } from '@app/store/index.js';
|
||||
@@ -27,8 +27,11 @@ export const startStoreSync = async () => {
|
||||
!isEqual(state, lastState) &&
|
||||
state.paths['myservers-config-states']
|
||||
) {
|
||||
writeFileSync(join(state.paths.states, 'config.log'), JSON.stringify(state.config, null, 2));
|
||||
writeFileSync(
|
||||
ensureWriteSync(
|
||||
join(state.paths.states, 'config.log'),
|
||||
JSON.stringify(state.config, null, 2)
|
||||
);
|
||||
ensureWriteSync(
|
||||
join(state.paths.states, 'graphql.log'),
|
||||
JSON.stringify(state.minigraph, null, 2)
|
||||
);
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
import { copyFile, readFile, writeFile } from 'fs/promises';
|
||||
import { copyFile } from 'fs/promises';
|
||||
import { join } from 'path';
|
||||
|
||||
import { Command, CommandRunner, Option } from 'nest-commander';
|
||||
|
||||
import { cliLogger } from '@app/core/log.js';
|
||||
import { fileExistsSync } from '@app/core/utils/files/file-exists.js';
|
||||
import { ENVIRONMENT } from '@app/environment.js';
|
||||
import { getters } from '@app/store/index.js';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service.js';
|
||||
import { StartCommand } from '@app/unraid-api/cli/start.command.js';
|
||||
import { StopCommand } from '@app/unraid-api/cli/stop.command.js';
|
||||
import { RestartCommand } from '@app/unraid-api/cli/restart.command.js';
|
||||
|
||||
interface SwitchEnvOptions {
|
||||
environment?: 'staging' | 'production';
|
||||
@@ -31,60 +31,43 @@ export class SwitchEnvCommand extends CommandRunner {
|
||||
|
||||
constructor(
|
||||
private readonly logger: LogService,
|
||||
private readonly stopCommand: StopCommand,
|
||||
private readonly startCommand: StartCommand
|
||||
private readonly restartCommand: RestartCommand
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
private async getEnvironmentFromFile(path: string): Promise<'production' | 'staging'> {
|
||||
const envFile = await readFile(path, 'utf-8').catch(() => '');
|
||||
this.logger.debug(`Checking ${path} for current ENV, found ${envFile}`);
|
||||
|
||||
// Match the env file env="production" which would be [0] = env="production", [1] = env and [2] = production
|
||||
const matchArray = /([a-zA-Z]+)=["]*([a-zA-Z]+)["]*/.exec(envFile);
|
||||
// Get item from index 2 of the regex match or return production
|
||||
const [, , currentEnvInFile] = matchArray && matchArray.length === 3 ? matchArray : [];
|
||||
return this.parseStringToEnv(currentEnvInFile);
|
||||
}
|
||||
|
||||
private switchToOtherEnv(environment: 'production' | 'staging'): 'production' | 'staging' {
|
||||
if (environment === 'production') {
|
||||
return 'staging';
|
||||
}
|
||||
return 'production';
|
||||
}
|
||||
|
||||
async run(_, options: SwitchEnvOptions): Promise<void> {
|
||||
const paths = getters.paths();
|
||||
const basePath = paths['unraid-api-base'];
|
||||
const envFlashFilePath = paths['myservers-env'];
|
||||
const currentEnvPath = join(basePath, '.env');
|
||||
|
||||
this.logger.warn('Stopping the Unraid API');
|
||||
try {
|
||||
await this.stopCommand.run([], { delete: false });
|
||||
} catch (err) {
|
||||
this.logger.warn('Failed to stop the Unraid API (maybe already stopped?)');
|
||||
// Determine target environment
|
||||
const currentEnv = ENVIRONMENT;
|
||||
const targetEnv = options.environment ?? 'production';
|
||||
|
||||
this.logger.info(`Switching environment from ${currentEnv} to ${targetEnv}`);
|
||||
|
||||
// Check if target environment file exists
|
||||
const sourceEnvPath = join(basePath, `.env.${targetEnv}`);
|
||||
if (!fileExistsSync(sourceEnvPath)) {
|
||||
this.logger.error(
|
||||
`Environment file ${sourceEnvPath} does not exist. Cannot switch to ${targetEnv} environment.`
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const newEnv =
|
||||
options.environment ??
|
||||
this.switchToOtherEnv(await this.getEnvironmentFromFile(envFlashFilePath));
|
||||
this.logger.info(`Setting environment to ${newEnv}`);
|
||||
// Copy the target environment file to .env
|
||||
this.logger.debug(`Copying ${sourceEnvPath} to ${currentEnvPath}`);
|
||||
try {
|
||||
await copyFile(sourceEnvPath, currentEnvPath);
|
||||
this.logger.info(`Successfully switched to ${targetEnv} environment`);
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to copy environment file: ${error}`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Write new env to flash
|
||||
const newEnvLine = `env="${newEnv}"`;
|
||||
this.logger.debug('Writing %s to %s', newEnvLine, envFlashFilePath);
|
||||
await writeFile(envFlashFilePath, newEnvLine);
|
||||
|
||||
// Copy the new env over to live location before restarting
|
||||
const source = join(basePath, `.env.${newEnv}`);
|
||||
const destination = join(basePath, '.env');
|
||||
|
||||
cliLogger.debug('Copying %s to %s', source, destination);
|
||||
await copyFile(source, destination);
|
||||
|
||||
cliLogger.info('Now using %s', newEnv);
|
||||
await this.startCommand.run([], {});
|
||||
// Restart the API to pick up the new environment
|
||||
this.logger.info('Restarting Unraid API to apply environment changes...');
|
||||
await this.restartCommand.run();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,6 +12,8 @@ import { ConfigPersistenceHelper } from '@app/unraid-api/config/persistence.help
|
||||
|
||||
export { type ApiConfig };
|
||||
|
||||
const logger = new Logger('ApiConfig');
|
||||
|
||||
const createDefaultConfig = (): ApiConfig => ({
|
||||
version: API_VERSION,
|
||||
extraOrigins: [],
|
||||
@@ -33,21 +35,54 @@ export const persistApiConfig = async (config: ApiConfig) => {
|
||||
};
|
||||
|
||||
export const loadApiConfig = async () => {
|
||||
const defaultConfig = createDefaultConfig();
|
||||
const apiConfig = new ApiStateConfig<ApiConfig>(
|
||||
{
|
||||
name: 'api',
|
||||
defaultConfig,
|
||||
parse: (data) => data as ApiConfig,
|
||||
},
|
||||
new ConfigPersistenceHelper()
|
||||
);
|
||||
const diskConfig = await apiConfig.parseConfig();
|
||||
return {
|
||||
...defaultConfig,
|
||||
...diskConfig,
|
||||
version: API_VERSION,
|
||||
};
|
||||
try {
|
||||
const defaultConfig = createDefaultConfig();
|
||||
const apiConfig = new ApiStateConfig<ApiConfig>(
|
||||
{
|
||||
name: 'api',
|
||||
defaultConfig,
|
||||
parse: (data) => data as ApiConfig,
|
||||
},
|
||||
new ConfigPersistenceHelper()
|
||||
);
|
||||
|
||||
let diskConfig: ApiConfig | undefined;
|
||||
try {
|
||||
diskConfig = await apiConfig.parseConfig();
|
||||
} catch (error) {
|
||||
logger.error('Failed to load API config from disk, using defaults:', error);
|
||||
diskConfig = undefined;
|
||||
|
||||
// Try to overwrite the invalid config with defaults to fix the issue
|
||||
try {
|
||||
const configToWrite = {
|
||||
...defaultConfig,
|
||||
version: API_VERSION,
|
||||
};
|
||||
|
||||
const writeSuccess = await apiConfig.persist(configToWrite);
|
||||
if (writeSuccess) {
|
||||
logger.log('Successfully overwrote invalid config file with defaults.');
|
||||
} else {
|
||||
logger.error(
|
||||
'Failed to overwrite invalid config file. Continuing with defaults in memory only.'
|
||||
);
|
||||
}
|
||||
} catch (persistError) {
|
||||
logger.error('Error during config file repair:', persistError);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
...defaultConfig,
|
||||
...diskConfig,
|
||||
version: API_VERSION,
|
||||
};
|
||||
} catch (outerError) {
|
||||
// This should never happen, but ensures the config factory never throws
|
||||
logger.error('Critical error in loadApiConfig, using minimal defaults:', outerError);
|
||||
return createDefaultConfig();
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -81,21 +116,29 @@ export class ApiConfigPersistence {
|
||||
}
|
||||
|
||||
async onModuleInit() {
|
||||
if (!(await fileExists(this.filePath))) {
|
||||
this.migrateFromMyServersConfig();
|
||||
try {
|
||||
if (!(await fileExists(this.filePath))) {
|
||||
this.migrateFromMyServersConfig();
|
||||
}
|
||||
await this.persistenceHelper.persistIfChanged(this.filePath, this.config);
|
||||
this.configService.changes$.pipe(bufferTime(25)).subscribe({
|
||||
next: async (changes) => {
|
||||
if (changes.some((change) => change.path.startsWith('api'))) {
|
||||
this.logger.verbose(`API Config changed ${JSON.stringify(changes)}`);
|
||||
try {
|
||||
await this.persistenceHelper.persistIfChanged(this.filePath, this.config);
|
||||
} catch (persistError) {
|
||||
this.logger.error('Error persisting config changes:', persistError);
|
||||
}
|
||||
}
|
||||
},
|
||||
error: (err) => {
|
||||
this.logger.error('Error receiving config changes:', err);
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
this.logger.error('Error during API config module initialization:', error);
|
||||
}
|
||||
await this.persistenceHelper.persistIfChanged(this.filePath, this.config);
|
||||
this.configService.changes$.pipe(bufferTime(25)).subscribe({
|
||||
next: async (changes) => {
|
||||
if (changes.some((change) => change.path.startsWith('api'))) {
|
||||
this.logger.verbose(`API Config changed ${JSON.stringify(changes)}`);
|
||||
await this.persistenceHelper.persistIfChanged(this.filePath, this.config);
|
||||
}
|
||||
},
|
||||
error: (err) => {
|
||||
this.logger.error('Error receiving config changes:', err);
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
convertLegacyConfig(
|
||||
|
||||
@@ -2,9 +2,26 @@ import { ConfigService } from '@nestjs/config';
|
||||
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { ApiConfigPersistence } from '@app/unraid-api/config/api-config.module.js';
|
||||
import { fileExists } from '@app/core/utils/files/file-exists.js';
|
||||
import { ApiConfigPersistence, loadApiConfig } from '@app/unraid-api/config/api-config.module.js';
|
||||
import { ConfigPersistenceHelper } from '@app/unraid-api/config/persistence.helper.js';
|
||||
|
||||
// Mock the core file-exists utility used by ApiStateConfig
|
||||
vi.mock('@app/core/utils/files/file-exists.js', () => ({
|
||||
fileExists: vi.fn(),
|
||||
}));
|
||||
|
||||
// Mock the shared file-exists utility used by ConfigPersistenceHelper
|
||||
vi.mock('@unraid/shared/util/file.js', () => ({
|
||||
fileExists: vi.fn(),
|
||||
}));
|
||||
|
||||
// Mock fs/promises for file I/O operations
|
||||
vi.mock('fs/promises', () => ({
|
||||
readFile: vi.fn(),
|
||||
writeFile: vi.fn(),
|
||||
}));
|
||||
|
||||
describe('ApiConfigPersistence', () => {
|
||||
let service: ApiConfigPersistence;
|
||||
let configService: ConfigService;
|
||||
@@ -135,3 +152,127 @@ describe('ApiConfigPersistence', () => {
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('loadApiConfig', () => {
|
||||
let readFile: any;
|
||||
let writeFile: any;
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.clearAllMocks();
|
||||
// Reset modules to ensure fresh imports
|
||||
vi.resetModules();
|
||||
|
||||
// Get mocked functions
|
||||
const fsMocks = await import('fs/promises');
|
||||
readFile = fsMocks.readFile;
|
||||
writeFile = fsMocks.writeFile;
|
||||
});
|
||||
|
||||
it('should return default config when file does not exist', async () => {
|
||||
vi.mocked(fileExists).mockResolvedValue(false);
|
||||
|
||||
const result = await loadApiConfig();
|
||||
|
||||
expect(result).toEqual({
|
||||
version: expect.any(String),
|
||||
extraOrigins: [],
|
||||
sandbox: false,
|
||||
ssoSubIds: [],
|
||||
plugins: [],
|
||||
});
|
||||
});
|
||||
|
||||
it('should merge disk config with defaults when file exists', async () => {
|
||||
const diskConfig = {
|
||||
extraOrigins: ['https://example.com'],
|
||||
sandbox: true,
|
||||
ssoSubIds: ['sub1', 'sub2'],
|
||||
};
|
||||
|
||||
vi.mocked(fileExists).mockResolvedValue(true);
|
||||
vi.mocked(readFile).mockResolvedValue(JSON.stringify(diskConfig));
|
||||
|
||||
const result = await loadApiConfig();
|
||||
|
||||
expect(result).toEqual({
|
||||
version: expect.any(String),
|
||||
extraOrigins: ['https://example.com'],
|
||||
sandbox: true,
|
||||
ssoSubIds: ['sub1', 'sub2'],
|
||||
plugins: [],
|
||||
});
|
||||
});
|
||||
|
||||
it('should use default config and overwrite file when JSON parsing fails', async () => {
|
||||
const { fileExists: sharedFileExists } = await import('@unraid/shared/util/file.js');
|
||||
|
||||
vi.mocked(fileExists).mockResolvedValue(true);
|
||||
vi.mocked(readFile).mockResolvedValue('{ invalid json }');
|
||||
vi.mocked(sharedFileExists).mockResolvedValue(false); // For persist operation
|
||||
vi.mocked(writeFile).mockResolvedValue(undefined);
|
||||
|
||||
const result = await loadApiConfig();
|
||||
|
||||
// Error logging is handled by NestJS Logger, just verify the config is returned
|
||||
expect(writeFile).toHaveBeenCalled();
|
||||
expect(result).toEqual({
|
||||
version: expect.any(String),
|
||||
extraOrigins: [],
|
||||
sandbox: false,
|
||||
ssoSubIds: [],
|
||||
plugins: [],
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle write failure gracefully when JSON parsing fails', async () => {
|
||||
const { fileExists: sharedFileExists } = await import('@unraid/shared/util/file.js');
|
||||
|
||||
vi.mocked(fileExists).mockResolvedValue(true);
|
||||
vi.mocked(readFile).mockResolvedValue('{ invalid json }');
|
||||
vi.mocked(sharedFileExists).mockResolvedValue(false); // For persist operation
|
||||
vi.mocked(writeFile).mockRejectedValue(new Error('Permission denied'));
|
||||
|
||||
const result = await loadApiConfig();
|
||||
|
||||
// Error logging is handled by NestJS Logger, just verify the config is returned
|
||||
expect(writeFile).toHaveBeenCalled();
|
||||
expect(result).toEqual({
|
||||
version: expect.any(String),
|
||||
extraOrigins: [],
|
||||
sandbox: false,
|
||||
ssoSubIds: [],
|
||||
plugins: [],
|
||||
});
|
||||
});
|
||||
|
||||
it('should use default config when file is empty', async () => {
|
||||
vi.mocked(fileExists).mockResolvedValue(true);
|
||||
vi.mocked(readFile).mockResolvedValue('');
|
||||
|
||||
const result = await loadApiConfig();
|
||||
|
||||
// No error logging expected for empty files
|
||||
expect(result).toEqual({
|
||||
version: expect.any(String),
|
||||
extraOrigins: [],
|
||||
sandbox: false,
|
||||
ssoSubIds: [],
|
||||
plugins: [],
|
||||
});
|
||||
});
|
||||
|
||||
it('should always override version with current API_VERSION', async () => {
|
||||
const diskConfig = {
|
||||
version: 'old-version',
|
||||
extraOrigins: ['https://example.com'],
|
||||
};
|
||||
|
||||
vi.mocked(fileExists).mockResolvedValue(true);
|
||||
vi.mocked(readFile).mockResolvedValue(JSON.stringify(diskConfig));
|
||||
|
||||
const result = await loadApiConfig();
|
||||
|
||||
expect(result.version).not.toBe('old-version');
|
||||
expect(result.version).toBeTruthy();
|
||||
});
|
||||
});
|
||||
|
||||
364
api/src/unraid-api/config/factory/api-state.model.test.ts
Normal file
364
api/src/unraid-api/config/factory/api-state.model.test.ts
Normal file
@@ -0,0 +1,364 @@
|
||||
import { Logger } from '@nestjs/common';
|
||||
import { readFile } from 'node:fs/promises';
|
||||
import { join } from 'path';
|
||||
|
||||
import type { Mock } from 'vitest';
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { fileExists } from '@app/core/utils/files/file-exists.js';
|
||||
import { ApiStateConfig } from '@app/unraid-api/config/factory/api-state.model.js';
|
||||
import { ConfigPersistenceHelper } from '@app/unraid-api/config/persistence.helper.js';
|
||||
|
||||
vi.mock('node:fs/promises');
|
||||
vi.mock('@app/core/utils/files/file-exists.js');
|
||||
vi.mock('@app/environment.js', () => ({
|
||||
PATHS_CONFIG_MODULES: '/test/config/path',
|
||||
}));
|
||||
|
||||
describe('ApiStateConfig', () => {
|
||||
let mockPersistenceHelper: ConfigPersistenceHelper;
|
||||
let mockLogger: Logger;
|
||||
|
||||
interface TestConfig {
|
||||
name: string;
|
||||
value: number;
|
||||
enabled: boolean;
|
||||
}
|
||||
|
||||
const defaultConfig: TestConfig = {
|
||||
name: 'test',
|
||||
value: 42,
|
||||
enabled: true,
|
||||
};
|
||||
|
||||
const parseFunction = (data: unknown): TestConfig => {
|
||||
if (!data || typeof data !== 'object') {
|
||||
throw new Error('Invalid config format');
|
||||
}
|
||||
return data as TestConfig;
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
|
||||
mockPersistenceHelper = {
|
||||
persistIfChanged: vi.fn().mockResolvedValue(true),
|
||||
} as any;
|
||||
|
||||
mockLogger = {
|
||||
log: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
} as any;
|
||||
|
||||
vi.spyOn(Logger.prototype, 'log').mockImplementation(mockLogger.log);
|
||||
vi.spyOn(Logger.prototype, 'warn').mockImplementation(mockLogger.warn);
|
||||
vi.spyOn(Logger.prototype, 'error').mockImplementation(mockLogger.error);
|
||||
vi.spyOn(Logger.prototype, 'debug').mockImplementation(mockLogger.debug);
|
||||
});
|
||||
|
||||
describe('constructor', () => {
|
||||
it('should initialize with cloned default config', () => {
|
||||
const config = new ApiStateConfig(
|
||||
{
|
||||
name: 'test-config',
|
||||
defaultConfig,
|
||||
parse: parseFunction,
|
||||
},
|
||||
mockPersistenceHelper
|
||||
);
|
||||
|
||||
expect(config.config).toEqual(defaultConfig);
|
||||
expect(config.config).not.toBe(defaultConfig);
|
||||
});
|
||||
});
|
||||
|
||||
describe('token', () => {
|
||||
it('should generate correct token', () => {
|
||||
const config = new ApiStateConfig(
|
||||
{
|
||||
name: 'my-config',
|
||||
defaultConfig,
|
||||
parse: parseFunction,
|
||||
},
|
||||
mockPersistenceHelper
|
||||
);
|
||||
|
||||
expect(config.token).toBe('ApiConfig.my-config');
|
||||
});
|
||||
});
|
||||
|
||||
describe('file paths', () => {
|
||||
it('should generate correct file name', () => {
|
||||
const config = new ApiStateConfig(
|
||||
{
|
||||
name: 'test-config',
|
||||
defaultConfig,
|
||||
parse: parseFunction,
|
||||
},
|
||||
mockPersistenceHelper
|
||||
);
|
||||
|
||||
expect(config.fileName).toBe('test-config.json');
|
||||
});
|
||||
|
||||
it('should generate correct file path', () => {
|
||||
const config = new ApiStateConfig(
|
||||
{
|
||||
name: 'test-config',
|
||||
defaultConfig,
|
||||
parse: parseFunction,
|
||||
},
|
||||
mockPersistenceHelper
|
||||
);
|
||||
|
||||
expect(config.filePath).toBe(join('/test/config/path', 'test-config.json'));
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseConfig', () => {
|
||||
let config: ApiStateConfig<TestConfig>;
|
||||
|
||||
beforeEach(() => {
|
||||
config = new ApiStateConfig(
|
||||
{
|
||||
name: 'test-config',
|
||||
defaultConfig,
|
||||
parse: parseFunction,
|
||||
},
|
||||
mockPersistenceHelper
|
||||
);
|
||||
});
|
||||
|
||||
it('should return undefined when file does not exist', async () => {
|
||||
(fileExists as Mock).mockResolvedValue(false);
|
||||
|
||||
const result = await config.parseConfig();
|
||||
|
||||
expect(result).toBeUndefined();
|
||||
expect(readFile).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should parse valid JSON config', async () => {
|
||||
const validConfig = { name: 'custom', value: 100, enabled: false };
|
||||
(fileExists as Mock).mockResolvedValue(true);
|
||||
(readFile as Mock).mockResolvedValue(JSON.stringify(validConfig));
|
||||
|
||||
const result = await config.parseConfig();
|
||||
|
||||
expect(result).toEqual(validConfig);
|
||||
expect(readFile).toHaveBeenCalledWith(config.filePath, 'utf8');
|
||||
});
|
||||
|
||||
it('should return undefined for empty file', async () => {
|
||||
(fileExists as Mock).mockResolvedValue(true);
|
||||
(readFile as Mock).mockResolvedValue('');
|
||||
|
||||
const result = await config.parseConfig();
|
||||
|
||||
expect(result).toBeUndefined();
|
||||
expect(mockLogger.warn).toHaveBeenCalledWith(expect.stringContaining('is empty'));
|
||||
});
|
||||
|
||||
it('should return undefined for whitespace-only file', async () => {
|
||||
(fileExists as Mock).mockResolvedValue(true);
|
||||
(readFile as Mock).mockResolvedValue(' \n\t ');
|
||||
|
||||
const result = await config.parseConfig();
|
||||
|
||||
expect(result).toBeUndefined();
|
||||
expect(mockLogger.warn).toHaveBeenCalledWith(expect.stringContaining('is empty'));
|
||||
});
|
||||
|
||||
it('should throw error for invalid JSON', async () => {
|
||||
(fileExists as Mock).mockResolvedValue(true);
|
||||
(readFile as Mock).mockResolvedValue('{ invalid json }');
|
||||
|
||||
await expect(config.parseConfig()).rejects.toThrow();
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Failed to parse JSON')
|
||||
);
|
||||
expect(mockLogger.debug).toHaveBeenCalledWith(expect.stringContaining('{ invalid json }'));
|
||||
});
|
||||
|
||||
it('should throw error for incomplete JSON', async () => {
|
||||
(fileExists as Mock).mockResolvedValue(true);
|
||||
(readFile as Mock).mockResolvedValue('{ "name": "test"');
|
||||
|
||||
await expect(config.parseConfig()).rejects.toThrow();
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Failed to parse JSON')
|
||||
);
|
||||
});
|
||||
|
||||
it('should use custom file path when provided', async () => {
|
||||
const customPath = '/custom/path/config.json';
|
||||
(fileExists as Mock).mockResolvedValue(true);
|
||||
(readFile as Mock).mockResolvedValue(JSON.stringify(defaultConfig));
|
||||
|
||||
await config.parseConfig({ filePath: customPath });
|
||||
|
||||
expect(fileExists).toHaveBeenCalledWith(customPath);
|
||||
expect(readFile).toHaveBeenCalledWith(customPath, 'utf8');
|
||||
});
|
||||
});
|
||||
|
||||
describe('persist', () => {
|
||||
let config: ApiStateConfig<TestConfig>;
|
||||
|
||||
beforeEach(() => {
|
||||
config = new ApiStateConfig(
|
||||
{
|
||||
name: 'test-config',
|
||||
defaultConfig,
|
||||
parse: parseFunction,
|
||||
},
|
||||
mockPersistenceHelper
|
||||
);
|
||||
});
|
||||
|
||||
it('should persist current config when no argument provided', async () => {
|
||||
const result = await config.persist();
|
||||
|
||||
expect(result).toBe(true);
|
||||
expect(mockPersistenceHelper.persistIfChanged).toHaveBeenCalledWith(
|
||||
config.filePath,
|
||||
defaultConfig
|
||||
);
|
||||
});
|
||||
|
||||
it('should persist provided config', async () => {
|
||||
const customConfig = { name: 'custom', value: 999, enabled: false };
|
||||
|
||||
const result = await config.persist(customConfig);
|
||||
|
||||
expect(result).toBe(true);
|
||||
expect(mockPersistenceHelper.persistIfChanged).toHaveBeenCalledWith(
|
||||
config.filePath,
|
||||
customConfig
|
||||
);
|
||||
});
|
||||
|
||||
it('should return false and log error on persistence failure', async () => {
|
||||
(mockPersistenceHelper.persistIfChanged as Mock).mockResolvedValue(false);
|
||||
|
||||
const result = await config.persist();
|
||||
|
||||
expect(result).toBe(false);
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Could not write config')
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('load', () => {
|
||||
let config: ApiStateConfig<TestConfig>;
|
||||
|
||||
beforeEach(() => {
|
||||
config = new ApiStateConfig(
|
||||
{
|
||||
name: 'test-config',
|
||||
defaultConfig,
|
||||
parse: parseFunction,
|
||||
},
|
||||
mockPersistenceHelper
|
||||
);
|
||||
});
|
||||
|
||||
it('should load config from file when it exists', async () => {
|
||||
const savedConfig = { name: 'saved', value: 200, enabled: true };
|
||||
(fileExists as Mock).mockResolvedValue(true);
|
||||
(readFile as Mock).mockResolvedValue(JSON.stringify(savedConfig));
|
||||
|
||||
await config.load();
|
||||
|
||||
expect(config.config).toEqual(savedConfig);
|
||||
});
|
||||
|
||||
it('should create default config when file does not exist', async () => {
|
||||
(fileExists as Mock).mockResolvedValue(false);
|
||||
|
||||
await config.load();
|
||||
|
||||
expect(config.config).toEqual(defaultConfig);
|
||||
expect(mockLogger.log).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Config file does not exist')
|
||||
);
|
||||
expect(mockPersistenceHelper.persistIfChanged).toHaveBeenCalledWith(
|
||||
config.filePath,
|
||||
defaultConfig
|
||||
);
|
||||
});
|
||||
|
||||
it('should not modify config when file is invalid', async () => {
|
||||
(fileExists as Mock).mockResolvedValue(true);
|
||||
(readFile as Mock).mockResolvedValue('invalid json');
|
||||
|
||||
await config.load();
|
||||
|
||||
expect(config.config).toEqual(defaultConfig);
|
||||
expect(mockLogger.warn).toHaveBeenCalledWith(
|
||||
expect.any(Error),
|
||||
expect.stringContaining('is invalid')
|
||||
);
|
||||
});
|
||||
|
||||
it('should not throw even when persist fails', async () => {
|
||||
(fileExists as Mock).mockResolvedValue(false);
|
||||
(mockPersistenceHelper.persistIfChanged as Mock).mockResolvedValue(false);
|
||||
|
||||
await expect(config.load()).resolves.not.toThrow();
|
||||
|
||||
expect(config.config).toEqual(defaultConfig);
|
||||
});
|
||||
});
|
||||
|
||||
describe('update', () => {
|
||||
let config: ApiStateConfig<TestConfig>;
|
||||
|
||||
beforeEach(() => {
|
||||
config = new ApiStateConfig(
|
||||
{
|
||||
name: 'test-config',
|
||||
defaultConfig,
|
||||
parse: parseFunction,
|
||||
},
|
||||
mockPersistenceHelper
|
||||
);
|
||||
});
|
||||
|
||||
it('should update config with partial values', () => {
|
||||
config.update({ value: 123 });
|
||||
|
||||
expect(config.config).toEqual({
|
||||
name: 'test',
|
||||
value: 123,
|
||||
enabled: true,
|
||||
});
|
||||
});
|
||||
|
||||
it('should return self for chaining', () => {
|
||||
const result = config.update({ enabled: false });
|
||||
|
||||
expect(result).toBe(config);
|
||||
});
|
||||
|
||||
it('should validate updated config through parse function', () => {
|
||||
const badParseFunction = vi.fn().mockImplementation(() => {
|
||||
throw new Error('Validation failed');
|
||||
});
|
||||
|
||||
const strictConfig = new ApiStateConfig(
|
||||
{
|
||||
name: 'strict-config',
|
||||
defaultConfig,
|
||||
parse: badParseFunction,
|
||||
},
|
||||
mockPersistenceHelper
|
||||
);
|
||||
|
||||
expect(() => strictConfig.update({ value: -1 })).toThrow('Validation failed');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -56,13 +56,11 @@ export class ApiStateConfig<T> {
|
||||
* @returns True if the config was written successfully, false otherwise.
|
||||
*/
|
||||
async persist(config = this.#config) {
|
||||
try {
|
||||
await this.persistenceHelper.persistIfChanged(this.filePath, config);
|
||||
return true;
|
||||
} catch (error) {
|
||||
this.logger.error(error, `Could not write config to ${this.filePath}.`);
|
||||
return false;
|
||||
const success = await this.persistenceHelper.persistIfChanged(this.filePath, config);
|
||||
if (!success) {
|
||||
this.logger.error(`Could not write config to ${this.filePath}.`);
|
||||
}
|
||||
return success;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -76,8 +74,23 @@ export class ApiStateConfig<T> {
|
||||
const { filePath = this.filePath } = opts;
|
||||
if (!(await fileExists(filePath))) return undefined;
|
||||
|
||||
const rawConfig = JSON.parse(await readFile(filePath, 'utf8'));
|
||||
return this.options.parse(rawConfig);
|
||||
const fileContent = await readFile(filePath, 'utf8');
|
||||
|
||||
if (!fileContent || fileContent.trim() === '') {
|
||||
this.logger.warn(`Config file '${filePath}' is empty.`);
|
||||
return undefined;
|
||||
}
|
||||
|
||||
try {
|
||||
const rawConfig = JSON.parse(fileContent);
|
||||
return this.options.parse(rawConfig);
|
||||
} catch (error) {
|
||||
this.logger.error(
|
||||
`Failed to parse JSON from '${filePath}': ${error instanceof Error ? error.message : String(error)}`
|
||||
);
|
||||
this.logger.debug(`File content: ${fileContent.substring(0, 100)}...`);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -12,24 +12,59 @@ export class ConfigPersistenceHelper {
|
||||
*
|
||||
* @param filePath - The path to the config file.
|
||||
* @param data - The data to persist.
|
||||
* @returns `true` if the config was persisted, `false` otherwise.
|
||||
* @returns `true` if the config was persisted, `false` if no changes were needed or if persistence failed.
|
||||
*
|
||||
* @throws {Error} if the config file does not exist or is unreadable.
|
||||
* @throws {Error} if the config file is not valid JSON.
|
||||
* @throws {Error} if given data is not JSON (de)serializable.
|
||||
* @throws {Error} if the config file is not writable.
|
||||
* This method is designed to never throw errors. If the existing file is corrupted or unreadable,
|
||||
* it will attempt to overwrite it with the new data. If write operations fail, it returns false
|
||||
* but does not crash the application.
|
||||
*/
|
||||
async persistIfChanged(filePath: string, data: unknown): Promise<boolean> {
|
||||
if (!(await fileExists(filePath))) {
|
||||
await writeFile(filePath, JSON.stringify(data ?? {}, null, 2));
|
||||
return true;
|
||||
try {
|
||||
const jsonString = JSON.stringify(data ?? {}, null, 2);
|
||||
await writeFile(filePath, jsonString);
|
||||
return true;
|
||||
} catch (error) {
|
||||
// JSON serialization or write failed, but don't crash - just return false
|
||||
return false;
|
||||
}
|
||||
}
|
||||
const currentData = JSON.parse(await readFile(filePath, 'utf8'));
|
||||
const stagedData = JSON.parse(JSON.stringify(data));
|
||||
|
||||
let currentData: unknown;
|
||||
try {
|
||||
const fileContent = await readFile(filePath, 'utf8');
|
||||
currentData = JSON.parse(fileContent);
|
||||
} catch (error) {
|
||||
// If existing file is corrupted, treat it as if it doesn't exist
|
||||
// and write the new data
|
||||
try {
|
||||
const jsonString = JSON.stringify(data ?? {}, null, 2);
|
||||
await writeFile(filePath, jsonString);
|
||||
return true;
|
||||
} catch (writeError) {
|
||||
// JSON serialization or write failed, but don't crash - just return false
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
let stagedData: unknown;
|
||||
try {
|
||||
stagedData = JSON.parse(JSON.stringify(data));
|
||||
} catch (error) {
|
||||
// If data can't be serialized to JSON, we can't persist it
|
||||
return false;
|
||||
}
|
||||
|
||||
if (isEqual(currentData, stagedData)) {
|
||||
return false;
|
||||
}
|
||||
await writeFile(filePath, JSON.stringify(stagedData, null, 2));
|
||||
return true;
|
||||
|
||||
try {
|
||||
await writeFile(filePath, JSON.stringify(stagedData, null, 2));
|
||||
return true;
|
||||
} catch (error) {
|
||||
// Write failed, but don't crash - just return false
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,6 +10,7 @@ export class NginxService {
|
||||
async reload() {
|
||||
try {
|
||||
await execa('/etc/rc.d/rc.nginx', ['reload']);
|
||||
this.logger.log('Nginx reloaded');
|
||||
return true;
|
||||
} catch (err: unknown) {
|
||||
this.logger.warn('Failed to reload Nginx with error: ', err);
|
||||
|
||||
@@ -65,7 +65,16 @@ export class PluginService {
|
||||
* @returns A tuple of the plugin name and version.
|
||||
*/
|
||||
static async listPlugins(): Promise<[string, string][]> {
|
||||
const { plugins = [] } = await loadApiConfig();
|
||||
let plugins: string[] = [];
|
||||
try {
|
||||
const config = await loadApiConfig();
|
||||
plugins = config.plugins || [];
|
||||
} catch (error) {
|
||||
PluginService.logger.error(
|
||||
'Failed to load API config for plugin discovery, using empty list:',
|
||||
error
|
||||
);
|
||||
}
|
||||
const pluginNames = new Set(
|
||||
plugins.map((plugin) => {
|
||||
const { name } = parsePackageArg(plugin);
|
||||
|
||||
@@ -1,14 +1,18 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
|
||||
import { ONE_SECOND_MS } from '@app/consts.js';
|
||||
import { NginxService } from '@app/unraid-api/nginx/nginx.service.js';
|
||||
import { ModificationEffect } from '@app/unraid-api/unraid-file-modifier/file-modification.js';
|
||||
|
||||
@Injectable()
|
||||
export class FileModificationEffectService {
|
||||
private readonly logger = new Logger(FileModificationEffectService.name);
|
||||
constructor(private readonly nginxService: NginxService) {}
|
||||
async runEffect(effect: ModificationEffect): Promise<void> {
|
||||
switch (effect) {
|
||||
case 'nginx:reload':
|
||||
this.logger.log('Reloading Nginx in 10 seconds...');
|
||||
await new Promise((resolve) => setTimeout(resolve, 10 * ONE_SECOND_MS));
|
||||
await this.nginxService.reload();
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -1 +1 @@
|
||||
1751630630443
|
||||
1752524464371
|
||||
@@ -1 +1 @@
|
||||
1751630630198
|
||||
1752524464066
|
||||
@@ -1 +1 @@
|
||||
1751630630343
|
||||
1752524464213
|
||||
@@ -1 +1 @@
|
||||
1751630630571
|
||||
1752524464631
|
||||
@@ -1 +1 @@
|
||||
1751630630810
|
||||
1752524464761
|
||||
@@ -65,6 +65,13 @@ if (is_localhost() && !is_good_session()) {
|
||||
return this.prependDoctypeWithPhp(source, newPhpCode);
|
||||
}
|
||||
|
||||
private addModalsWebComponent(source: string): string {
|
||||
if (source.includes('<unraid-modals>')) {
|
||||
return source;
|
||||
}
|
||||
return source.replace('<body>', '<body>\n<unraid-modals></unraid-modals>');
|
||||
}
|
||||
|
||||
private hideHeaderLogo(source: string): string {
|
||||
return source.replace(
|
||||
'<a href="https://unraid.net" target="_blank"><?readfile("$docroot/webGui/images/UN-logotype-gradient.svg")?></a>',
|
||||
@@ -72,17 +79,14 @@ if (is_localhost() && !is_good_session()) {
|
||||
);
|
||||
}
|
||||
|
||||
private addModalsWebComponent(source: string): string {
|
||||
return source.replace('<body>', '<body>\n<unraid-modals></unraid-modals>');
|
||||
}
|
||||
private applyToSource(fileContent: string): string {
|
||||
const transformers = [
|
||||
this.removeNotificationBell.bind(this),
|
||||
this.replaceToasts.bind(this),
|
||||
this.addToaster.bind(this),
|
||||
this.patchGuiBootAuth.bind(this),
|
||||
this.hideHeaderLogo.bind(this),
|
||||
this.addModalsWebComponent.bind(this),
|
||||
this.hideHeaderLogo.bind(this),
|
||||
];
|
||||
|
||||
return transformers.reduce((content, transformer) => transformer(content), fileContent);
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
{
|
||||
"name": "unraid-monorepo",
|
||||
"private": true,
|
||||
"version": "4.9.0",
|
||||
"version": "4.10.0",
|
||||
"scripts": {
|
||||
"preinstall": "node web/scripts/check-node-version.js",
|
||||
"build": "pnpm -r build",
|
||||
"build:watch": " pnpm -r --parallel build:watch",
|
||||
"dev": "pnpm -r dev",
|
||||
@@ -57,5 +58,5 @@
|
||||
"pnpm lint:fix"
|
||||
]
|
||||
},
|
||||
"packageManager": "pnpm@10.12.4"
|
||||
"packageManager": "pnpm@10.13.1"
|
||||
}
|
||||
|
||||
@@ -25,10 +25,10 @@
|
||||
"description": "Unraid Connect plugin for Unraid API",
|
||||
"devDependencies": {
|
||||
"@apollo/client": "3.13.8",
|
||||
"@faker-js/faker": "9.8.0",
|
||||
"@faker-js/faker": "9.9.0",
|
||||
"@graphql-codegen/cli": "5.0.7",
|
||||
"@graphql-typed-document-node/core": "3.2.0",
|
||||
"@ianvs/prettier-plugin-sort-imports": "4.4.2",
|
||||
"@ianvs/prettier-plugin-sort-imports": "4.5.1",
|
||||
"@jsonforms/core": "3.6.0",
|
||||
"@nestjs/apollo": "13.1.0",
|
||||
"@nestjs/common": "11.1.3",
|
||||
@@ -41,29 +41,29 @@
|
||||
"@types/ini": "4.1.1",
|
||||
"@types/ip": "1.1.3",
|
||||
"@types/lodash-es": "4.17.12",
|
||||
"@types/node": "22.15.32",
|
||||
"@types/node": "22.16.4",
|
||||
"@types/ws": "8.18.1",
|
||||
"camelcase-keys": "9.1.3",
|
||||
"class-transformer": "0.5.1",
|
||||
"class-validator": "0.14.2",
|
||||
"execa": "9.6.0",
|
||||
"fast-check": "4.1.1",
|
||||
"fast-check": "4.2.0",
|
||||
"got": "14.4.7",
|
||||
"graphql": "16.11.0",
|
||||
"graphql-scalars": "1.24.2",
|
||||
"graphql-subscriptions": "3.0.0",
|
||||
"graphql-ws": "6.0.5",
|
||||
"graphql-ws": "6.0.6",
|
||||
"ini": "5.0.0",
|
||||
"jose": "6.0.11",
|
||||
"lodash-es": "4.17.21",
|
||||
"nest-authz": "2.17.0",
|
||||
"prettier": "3.5.3",
|
||||
"prettier": "3.6.2",
|
||||
"rimraf": "6.0.1",
|
||||
"rxjs": "7.8.2",
|
||||
"type-fest": "4.41.0",
|
||||
"typescript": "5.8.3",
|
||||
"vitest": "3.2.4",
|
||||
"ws": "8.18.2",
|
||||
"ws": "8.18.3",
|
||||
"zen-observable-ts": "1.1.0"
|
||||
},
|
||||
"dependencies": {
|
||||
@@ -91,13 +91,13 @@
|
||||
"graphql": "16.11.0",
|
||||
"graphql-scalars": "1.24.2",
|
||||
"graphql-subscriptions": "3.0.0",
|
||||
"graphql-ws": "6.0.5",
|
||||
"graphql-ws": "6.0.6",
|
||||
"ini": "5.0.0",
|
||||
"jose": "6.0.11",
|
||||
"lodash-es": "4.17.21",
|
||||
"nest-authz": "2.17.0",
|
||||
"rxjs": "7.8.2",
|
||||
"ws": "^8.18.0",
|
||||
"ws": "8.18.3",
|
||||
"zen-observable-ts": "1.1.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,269 @@
|
||||
import { EventEmitter2 } from '@nestjs/event-emitter';
|
||||
|
||||
import { PubSub } from 'graphql-subscriptions';
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { MinigraphStatus } from '../config/connect.config.js';
|
||||
import { EVENTS, GRAPHQL_PUBSUB_CHANNEL } from '../helper/nest-tokens.js';
|
||||
import { MothershipConnectionService } from '../mothership-proxy/connection.service.js';
|
||||
import { MothershipController } from '../mothership-proxy/mothership.controller.js';
|
||||
import { MothershipHandler } from '../mothership-proxy/mothership.events.js';
|
||||
|
||||
describe('MothershipHandler - Behavioral Tests', () => {
|
||||
let handler: MothershipHandler;
|
||||
let connectionService: MothershipConnectionService;
|
||||
let mothershipController: MothershipController;
|
||||
let pubSub: PubSub;
|
||||
let eventEmitter: EventEmitter2;
|
||||
|
||||
// Track actual state changes and effects
|
||||
let connectionAttempts: Array<{ timestamp: number; reason: string }> = [];
|
||||
let publishedMessages: Array<{ channel: string; data: any }> = [];
|
||||
let controllerStops: Array<{ timestamp: number; reason?: string }> = [];
|
||||
|
||||
beforeEach(() => {
|
||||
// Reset tracking arrays
|
||||
connectionAttempts = [];
|
||||
publishedMessages = [];
|
||||
controllerStops = [];
|
||||
|
||||
// Create real event emitter for integration testing
|
||||
eventEmitter = new EventEmitter2();
|
||||
|
||||
// Mock connection service with realistic behavior
|
||||
connectionService = {
|
||||
getIdentityState: vi.fn(),
|
||||
getConnectionState: vi.fn(),
|
||||
} as any;
|
||||
|
||||
// Mock controller that tracks behavior instead of just method calls
|
||||
mothershipController = {
|
||||
initOrRestart: vi.fn().mockImplementation(() => {
|
||||
connectionAttempts.push({
|
||||
timestamp: Date.now(),
|
||||
reason: 'initOrRestart called',
|
||||
});
|
||||
return Promise.resolve();
|
||||
}),
|
||||
stop: vi.fn().mockImplementation(() => {
|
||||
controllerStops.push({
|
||||
timestamp: Date.now(),
|
||||
});
|
||||
return Promise.resolve();
|
||||
}),
|
||||
} as any;
|
||||
|
||||
// Mock PubSub that tracks published messages
|
||||
pubSub = {
|
||||
publish: vi.fn().mockImplementation((channel: string, data: any) => {
|
||||
publishedMessages.push({ channel, data });
|
||||
return Promise.resolve();
|
||||
}),
|
||||
} as any;
|
||||
|
||||
handler = new MothershipHandler(connectionService, mothershipController, pubSub);
|
||||
});
|
||||
|
||||
describe('Connection Recovery Behavior', () => {
|
||||
it('should attempt reconnection when ping fails', async () => {
|
||||
// Given: Connection is in ping failure state
|
||||
vi.mocked(connectionService.getConnectionState).mockReturnValue({
|
||||
status: MinigraphStatus.PING_FAILURE,
|
||||
error: 'Ping timeout after 3 minutes',
|
||||
});
|
||||
|
||||
// When: Connection status change event occurs
|
||||
await handler.onMothershipConnectionStatusChanged();
|
||||
|
||||
// Then: System should attempt to recover the connection
|
||||
expect(connectionAttempts).toHaveLength(1);
|
||||
expect(connectionAttempts[0].reason).toBe('initOrRestart called');
|
||||
});
|
||||
|
||||
it('should NOT interfere with exponential backoff during error retry state', async () => {
|
||||
// Given: Connection is in error retry state (GraphQL client managing backoff)
|
||||
vi.mocked(connectionService.getConnectionState).mockReturnValue({
|
||||
status: MinigraphStatus.ERROR_RETRYING,
|
||||
error: 'Network error',
|
||||
timeout: 20000,
|
||||
timeoutStart: Date.now(),
|
||||
});
|
||||
|
||||
// When: Connection status change event occurs
|
||||
await handler.onMothershipConnectionStatusChanged();
|
||||
|
||||
// Then: System should NOT interfere with ongoing retry logic
|
||||
expect(connectionAttempts).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should remain stable during normal connection states', async () => {
|
||||
const stableStates = [MinigraphStatus.CONNECTED, MinigraphStatus.CONNECTING];
|
||||
|
||||
for (const status of stableStates) {
|
||||
// Reset for each test
|
||||
connectionAttempts.length = 0;
|
||||
|
||||
// Given: Connection is in a stable state
|
||||
vi.mocked(connectionService.getConnectionState).mockReturnValue({
|
||||
status,
|
||||
error: null,
|
||||
});
|
||||
|
||||
// When: Connection status change event occurs
|
||||
await handler.onMothershipConnectionStatusChanged();
|
||||
|
||||
// Then: System should not trigger unnecessary reconnection attempts
|
||||
expect(connectionAttempts).toHaveLength(0);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('Identity-Based Connection Behavior', () => {
|
||||
it('should establish connection when valid API key becomes available', async () => {
|
||||
// Given: Valid API key is present
|
||||
vi.mocked(connectionService.getIdentityState).mockReturnValue({
|
||||
state: {
|
||||
apiKey: 'valid-unraid-key-12345',
|
||||
unraidVersion: '6.12.0',
|
||||
flashGuid: 'test-flash-guid',
|
||||
apiVersion: '1.0.0',
|
||||
},
|
||||
isLoaded: true,
|
||||
});
|
||||
|
||||
// When: Identity changes
|
||||
await handler.onIdentityChanged();
|
||||
|
||||
// Then: System should establish mothership connection
|
||||
expect(connectionAttempts).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('should not attempt connection without valid credentials', async () => {
|
||||
const invalidCredentials = [{ apiKey: undefined }, { apiKey: '' }];
|
||||
|
||||
for (const credentials of invalidCredentials) {
|
||||
// Reset for each test
|
||||
connectionAttempts.length = 0;
|
||||
|
||||
// Given: Invalid or missing API key
|
||||
vi.mocked(connectionService.getIdentityState).mockReturnValue({
|
||||
state: credentials,
|
||||
isLoaded: false,
|
||||
});
|
||||
|
||||
// When: Identity changes
|
||||
await handler.onIdentityChanged();
|
||||
|
||||
// Then: System should not attempt connection
|
||||
expect(connectionAttempts).toHaveLength(0);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('Logout Behavior', () => {
|
||||
it('should properly clean up connections and notify subscribers on logout', async () => {
|
||||
// When: User logs out
|
||||
await handler.logout({ reason: 'User initiated logout' });
|
||||
|
||||
// Then: System should clean up connections
|
||||
expect(controllerStops).toHaveLength(1);
|
||||
|
||||
// And: Subscribers should be notified of empty state
|
||||
expect(publishedMessages).toHaveLength(2);
|
||||
|
||||
const serversMessage = publishedMessages.find(
|
||||
(m) => m.channel === GRAPHQL_PUBSUB_CHANNEL.SERVERS
|
||||
);
|
||||
const ownerMessage = publishedMessages.find(
|
||||
(m) => m.channel === GRAPHQL_PUBSUB_CHANNEL.OWNER
|
||||
);
|
||||
|
||||
expect(serversMessage?.data).toEqual({ servers: [] });
|
||||
expect(ownerMessage?.data).toEqual({
|
||||
owner: { username: 'root', url: '', avatar: '' },
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle logout gracefully even without explicit reason', async () => {
|
||||
// When: System logout occurs without reason
|
||||
await handler.logout({});
|
||||
|
||||
// Then: Cleanup should still occur properly
|
||||
expect(controllerStops).toHaveLength(1);
|
||||
expect(publishedMessages).toHaveLength(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('DDoS Prevention Behavior', () => {
|
||||
it('should demonstrate exponential backoff is respected during network errors', async () => {
|
||||
// Given: Multiple rapid network errors occur
|
||||
const errorStates = [
|
||||
{ status: MinigraphStatus.ERROR_RETRYING, error: 'Network error 1' },
|
||||
{ status: MinigraphStatus.ERROR_RETRYING, error: 'Network error 2' },
|
||||
{ status: MinigraphStatus.ERROR_RETRYING, error: 'Network error 3' },
|
||||
];
|
||||
|
||||
// When: Rapid error retry states occur
|
||||
for (const state of errorStates) {
|
||||
vi.mocked(connectionService.getConnectionState).mockReturnValue(state);
|
||||
await handler.onMothershipConnectionStatusChanged();
|
||||
}
|
||||
|
||||
// Then: No linear retry attempts should be made (respecting exponential backoff)
|
||||
expect(connectionAttempts).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should differentiate between network errors and ping failures', async () => {
|
||||
// Given: Network error followed by ping failure
|
||||
vi.mocked(connectionService.getConnectionState).mockReturnValue({
|
||||
status: MinigraphStatus.ERROR_RETRYING,
|
||||
error: 'Network error',
|
||||
});
|
||||
|
||||
// When: Network error occurs
|
||||
await handler.onMothershipConnectionStatusChanged();
|
||||
|
||||
// Then: No immediate reconnection attempt
|
||||
expect(connectionAttempts).toHaveLength(0);
|
||||
|
||||
// Given: Ping failure occurs (different issue)
|
||||
vi.mocked(connectionService.getConnectionState).mockReturnValue({
|
||||
status: MinigraphStatus.PING_FAILURE,
|
||||
error: 'Ping timeout',
|
||||
});
|
||||
|
||||
// When: Ping failure occurs
|
||||
await handler.onMothershipConnectionStatusChanged();
|
||||
|
||||
// Then: Immediate reconnection attempt should occur
|
||||
expect(connectionAttempts).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge Cases and Error Handling', () => {
|
||||
it('should handle missing connection state gracefully', async () => {
|
||||
// Given: Connection service returns undefined
|
||||
vi.mocked(connectionService.getConnectionState).mockReturnValue(undefined);
|
||||
|
||||
// When: Connection status change occurs
|
||||
await handler.onMothershipConnectionStatusChanged();
|
||||
|
||||
// Then: No errors should occur, no reconnection attempts
|
||||
expect(connectionAttempts).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should handle malformed connection state', async () => {
|
||||
// Given: Malformed connection state
|
||||
vi.mocked(connectionService.getConnectionState).mockReturnValue({
|
||||
status: 'UNKNOWN_STATUS' as any,
|
||||
error: 'Malformed state',
|
||||
});
|
||||
|
||||
// When: Connection status change occurs
|
||||
await handler.onMothershipConnectionStatusChanged();
|
||||
|
||||
// Then: Should not trigger reconnection for unknown states
|
||||
expect(connectionAttempts).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,158 @@
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import { access, constants, mkdir, readFile, rm } from 'fs/promises';
|
||||
import { join } from 'path';
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { ConfigType } from '../config/connect.config.js';
|
||||
import { ConnectStatusWriterService } from './connect-status-writer.service.js';
|
||||
|
||||
describe('ConnectStatusWriterService Config Behavior', () => {
|
||||
let service: ConnectStatusWriterService;
|
||||
let configService: ConfigService<ConfigType, true>;
|
||||
const testDir = '/tmp/connect-status-config-test';
|
||||
const testFilePath = join(testDir, 'connectStatus.json');
|
||||
|
||||
// Simulate config changes
|
||||
let configStore: any = {};
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.clearAllMocks();
|
||||
|
||||
// Reset config store
|
||||
configStore = {};
|
||||
|
||||
// Create test directory
|
||||
await mkdir(testDir, { recursive: true });
|
||||
|
||||
// Create a ConfigService mock that behaves like the real one
|
||||
configService = {
|
||||
get: vi.fn().mockImplementation((key: string) => {
|
||||
console.log(`ConfigService.get('${key}') called, returning:`, configStore[key]);
|
||||
return configStore[key];
|
||||
}),
|
||||
set: vi.fn().mockImplementation((key: string, value: any) => {
|
||||
console.log(`ConfigService.set('${key}', ${JSON.stringify(value)}) called`);
|
||||
configStore[key] = value;
|
||||
}),
|
||||
} as unknown as ConfigService<ConfigType, true>;
|
||||
|
||||
service = new ConnectStatusWriterService(configService);
|
||||
|
||||
// Override the status file path to use our test location
|
||||
Object.defineProperty(service, 'statusFilePath', {
|
||||
get: () => testFilePath,
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await service.onModuleDestroy();
|
||||
await rm(testDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('should write status when config is updated directly', async () => {
|
||||
// Initialize service - should write PRE_INIT
|
||||
await service.onApplicationBootstrap();
|
||||
await new Promise(resolve => setTimeout(resolve, 50));
|
||||
|
||||
let content = await readFile(testFilePath, 'utf-8');
|
||||
let data = JSON.parse(content);
|
||||
console.log('Initial status:', data);
|
||||
expect(data.connectionStatus).toBe('PRE_INIT');
|
||||
|
||||
// Update config directly (simulating what ConnectionService does)
|
||||
console.log('\n=== Updating config to CONNECTED ===');
|
||||
configService.set('connect.mothership', {
|
||||
status: 'CONNECTED',
|
||||
error: null,
|
||||
lastPing: Date.now(),
|
||||
});
|
||||
|
||||
// Call the writeStatus method directly (since @OnEvent handles the event)
|
||||
await service['writeStatus']();
|
||||
|
||||
content = await readFile(testFilePath, 'utf-8');
|
||||
data = JSON.parse(content);
|
||||
console.log('Status after config update:', data);
|
||||
expect(data.connectionStatus).toBe('CONNECTED');
|
||||
});
|
||||
|
||||
it('should test the actual flow with multiple status updates', async () => {
|
||||
await service.onApplicationBootstrap();
|
||||
await new Promise(resolve => setTimeout(resolve, 50));
|
||||
|
||||
const statusUpdates = [
|
||||
{ status: 'CONNECTING', error: null, lastPing: null },
|
||||
{ status: 'CONNECTED', error: null, lastPing: Date.now() },
|
||||
{ status: 'DISCONNECTED', error: 'Lost connection', lastPing: Date.now() - 10000 },
|
||||
{ status: 'RECONNECTING', error: null, lastPing: Date.now() - 10000 },
|
||||
{ status: 'CONNECTED', error: null, lastPing: Date.now() },
|
||||
];
|
||||
|
||||
for (const update of statusUpdates) {
|
||||
console.log(`\n=== Updating to ${update.status} ===`);
|
||||
|
||||
// Update config
|
||||
configService.set('connect.mothership', update);
|
||||
|
||||
// Call writeStatus directly
|
||||
await service['writeStatus']();
|
||||
|
||||
const content = await readFile(testFilePath, 'utf-8');
|
||||
const data = JSON.parse(content);
|
||||
console.log(`Status file shows: ${data.connectionStatus}`);
|
||||
expect(data.connectionStatus).toBe(update.status);
|
||||
}
|
||||
});
|
||||
|
||||
it('should handle case where config is not set before event', async () => {
|
||||
await service.onApplicationBootstrap();
|
||||
await new Promise(resolve => setTimeout(resolve, 50));
|
||||
|
||||
// Delete the config
|
||||
delete configStore['connect.mothership'];
|
||||
|
||||
// Call writeStatus without config
|
||||
console.log('\n=== Calling writeStatus with no config ===');
|
||||
await service['writeStatus']();
|
||||
|
||||
const content = await readFile(testFilePath, 'utf-8');
|
||||
const data = JSON.parse(content);
|
||||
console.log('Status with no config:', data);
|
||||
expect(data.connectionStatus).toBe('PRE_INIT');
|
||||
|
||||
// Now set config and call writeStatus again
|
||||
console.log('\n=== Setting config and calling writeStatus ===');
|
||||
configService.set('connect.mothership', {
|
||||
status: 'CONNECTED',
|
||||
error: null,
|
||||
lastPing: Date.now(),
|
||||
});
|
||||
await service['writeStatus']();
|
||||
|
||||
const content2 = await readFile(testFilePath, 'utf-8');
|
||||
const data2 = JSON.parse(content2);
|
||||
console.log('Status after setting config:', data2);
|
||||
expect(data2.connectionStatus).toBe('CONNECTED');
|
||||
});
|
||||
|
||||
describe('cleanup on shutdown', () => {
|
||||
it('should delete status file on module destroy', async () => {
|
||||
await service.onApplicationBootstrap();
|
||||
await new Promise(resolve => setTimeout(resolve, 50));
|
||||
|
||||
// Verify file exists
|
||||
await expect(access(testFilePath, constants.F_OK)).resolves.not.toThrow();
|
||||
|
||||
// Cleanup
|
||||
await service.onModuleDestroy();
|
||||
|
||||
// Verify file is deleted
|
||||
await expect(access(testFilePath, constants.F_OK)).rejects.toThrow();
|
||||
});
|
||||
|
||||
it('should handle cleanup when file does not exist', async () => {
|
||||
// Don't bootstrap (so no file is written)
|
||||
await expect(service.onModuleDestroy()).resolves.not.toThrow();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,167 @@
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import { access, constants, mkdir, readFile, rm } from 'fs/promises';
|
||||
import { join } from 'path';
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { ConfigType } from '../config/connect.config.js';
|
||||
import { ConnectStatusWriterService } from './connect-status-writer.service.js';
|
||||
|
||||
describe('ConnectStatusWriterService Integration', () => {
|
||||
let service: ConnectStatusWriterService;
|
||||
let configService: ConfigService<ConfigType, true>;
|
||||
const testDir = '/tmp/connect-status-test';
|
||||
const testFilePath = join(testDir, 'connectStatus.json');
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.clearAllMocks();
|
||||
|
||||
// Create test directory
|
||||
await mkdir(testDir, { recursive: true });
|
||||
|
||||
configService = {
|
||||
get: vi.fn().mockImplementation((key: string) => {
|
||||
console.log(`ConfigService.get called with key: ${key}`);
|
||||
return {
|
||||
status: 'CONNECTED',
|
||||
error: null,
|
||||
lastPing: Date.now(),
|
||||
};
|
||||
}),
|
||||
} as unknown as ConfigService<ConfigType, true>;
|
||||
|
||||
service = new ConnectStatusWriterService(configService);
|
||||
|
||||
// Override the status file path to use our test location
|
||||
Object.defineProperty(service, 'statusFilePath', {
|
||||
get: () => testFilePath,
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await service.onModuleDestroy();
|
||||
await rm(testDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('should write initial PRE_INIT status, then update on event', async () => {
|
||||
// First, mock the config to return undefined (no connection metadata)
|
||||
vi.mocked(configService.get).mockReturnValue(undefined);
|
||||
|
||||
console.log('=== Starting onApplicationBootstrap ===');
|
||||
await service.onApplicationBootstrap();
|
||||
|
||||
// Wait a bit for the initial write to complete
|
||||
await new Promise(resolve => setTimeout(resolve, 50));
|
||||
|
||||
// Read initial status
|
||||
const initialContent = await readFile(testFilePath, 'utf-8');
|
||||
const initialData = JSON.parse(initialContent);
|
||||
console.log('Initial status written:', initialData);
|
||||
|
||||
expect(initialData.connectionStatus).toBe('PRE_INIT');
|
||||
expect(initialData.error).toBeNull();
|
||||
expect(initialData.lastPing).toBeNull();
|
||||
|
||||
// Now update the mock to return CONNECTED status
|
||||
vi.mocked(configService.get).mockReturnValue({
|
||||
status: 'CONNECTED',
|
||||
error: null,
|
||||
lastPing: 1234567890,
|
||||
});
|
||||
|
||||
console.log('=== Calling writeStatus directly ===');
|
||||
await service['writeStatus']();
|
||||
|
||||
// Read updated status
|
||||
const updatedContent = await readFile(testFilePath, 'utf-8');
|
||||
const updatedData = JSON.parse(updatedContent);
|
||||
console.log('Updated status after writeStatus:', updatedData);
|
||||
|
||||
expect(updatedData.connectionStatus).toBe('CONNECTED');
|
||||
expect(updatedData.lastPing).toBe(1234567890);
|
||||
});
|
||||
|
||||
it('should handle rapid status changes correctly', async () => {
|
||||
const statusChanges = [
|
||||
{ status: 'PRE_INIT', error: null, lastPing: null },
|
||||
{ status: 'CONNECTING', error: null, lastPing: null },
|
||||
{ status: 'CONNECTED', error: null, lastPing: Date.now() },
|
||||
{ status: 'DISCONNECTED', error: 'Connection lost', lastPing: Date.now() - 5000 },
|
||||
{ status: 'CONNECTED', error: null, lastPing: Date.now() },
|
||||
];
|
||||
|
||||
let changeIndex = 0;
|
||||
vi.mocked(configService.get).mockImplementation(() => {
|
||||
const change = statusChanges[changeIndex];
|
||||
console.log(`Returning status ${changeIndex}: ${change.status}`);
|
||||
return change;
|
||||
});
|
||||
|
||||
await service.onApplicationBootstrap();
|
||||
await new Promise(resolve => setTimeout(resolve, 50));
|
||||
|
||||
// Simulate the final status change
|
||||
changeIndex = statusChanges.length - 1;
|
||||
console.log(`=== Calling writeStatus for final status: ${statusChanges[changeIndex].status} ===`);
|
||||
await service['writeStatus']();
|
||||
|
||||
// Read final status
|
||||
const finalContent = await readFile(testFilePath, 'utf-8');
|
||||
const finalData = JSON.parse(finalContent);
|
||||
console.log('Final status after status change:', finalData);
|
||||
|
||||
// Should have the last status
|
||||
expect(finalData.connectionStatus).toBe('CONNECTED');
|
||||
expect(finalData.error).toBeNull();
|
||||
});
|
||||
|
||||
it('should handle multiple write calls correctly', async () => {
|
||||
const writes: number[] = [];
|
||||
const originalWriteStatus = service['writeStatus'].bind(service);
|
||||
|
||||
service['writeStatus'] = async function() {
|
||||
const timestamp = Date.now();
|
||||
writes.push(timestamp);
|
||||
console.log(`writeStatus called at ${timestamp}`);
|
||||
return originalWriteStatus();
|
||||
};
|
||||
|
||||
await service.onApplicationBootstrap();
|
||||
await new Promise(resolve => setTimeout(resolve, 50));
|
||||
|
||||
const initialWrites = writes.length;
|
||||
console.log(`Initial writes: ${initialWrites}`);
|
||||
|
||||
// Make multiple write calls
|
||||
for (let i = 0; i < 3; i++) {
|
||||
console.log(`Calling writeStatus ${i}`);
|
||||
await service['writeStatus']();
|
||||
}
|
||||
|
||||
console.log(`Total writes: ${writes.length}`);
|
||||
console.log('Write timestamps:', writes);
|
||||
|
||||
// Should have initial write + 3 additional writes
|
||||
expect(writes.length).toBe(initialWrites + 3);
|
||||
});
|
||||
|
||||
describe('cleanup on shutdown', () => {
|
||||
it('should delete status file on module destroy', async () => {
|
||||
await service.onApplicationBootstrap();
|
||||
await new Promise(resolve => setTimeout(resolve, 50));
|
||||
|
||||
// Verify file exists
|
||||
await expect(access(testFilePath, constants.F_OK)).resolves.not.toThrow();
|
||||
|
||||
// Cleanup
|
||||
await service.onModuleDestroy();
|
||||
|
||||
// Verify file is deleted
|
||||
await expect(access(testFilePath, constants.F_OK)).rejects.toThrow();
|
||||
});
|
||||
|
||||
it('should handle cleanup gracefully when file does not exist', async () => {
|
||||
// Don't bootstrap (so no file is created)
|
||||
await expect(service.onModuleDestroy()).resolves.not.toThrow();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,140 @@
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import { unlink, writeFile } from 'fs/promises';
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { ConfigType } from '../config/connect.config.js';
|
||||
import { ConnectStatusWriterService } from './connect-status-writer.service.js';
|
||||
|
||||
vi.mock('fs/promises', () => ({
|
||||
writeFile: vi.fn(),
|
||||
unlink: vi.fn(),
|
||||
}));
|
||||
|
||||
describe('ConnectStatusWriterService', () => {
|
||||
let service: ConnectStatusWriterService;
|
||||
let configService: ConfigService<ConfigType, true>;
|
||||
let writeFileMock: ReturnType<typeof vi.fn>;
|
||||
let unlinkMock: ReturnType<typeof vi.fn>;
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.clearAllMocks();
|
||||
vi.useFakeTimers();
|
||||
|
||||
writeFileMock = vi.mocked(writeFile);
|
||||
unlinkMock = vi.mocked(unlink);
|
||||
|
||||
configService = {
|
||||
get: vi.fn().mockReturnValue({
|
||||
status: 'CONNECTED',
|
||||
error: null,
|
||||
lastPing: Date.now(),
|
||||
}),
|
||||
} as unknown as ConfigService<ConfigType, true>;
|
||||
|
||||
service = new ConnectStatusWriterService(configService);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
vi.useRealTimers();
|
||||
});
|
||||
|
||||
describe('onApplicationBootstrap', () => {
|
||||
it('should write initial status on bootstrap', async () => {
|
||||
await service.onApplicationBootstrap();
|
||||
|
||||
expect(writeFileMock).toHaveBeenCalledTimes(1);
|
||||
expect(writeFileMock).toHaveBeenCalledWith(
|
||||
'/var/local/emhttp/connectStatus.json',
|
||||
expect.stringContaining('CONNECTED')
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle event-driven status changes', async () => {
|
||||
await service.onApplicationBootstrap();
|
||||
writeFileMock.mockClear();
|
||||
|
||||
// The service uses @OnEvent decorator, so we need to call the method directly
|
||||
await service['writeStatus']();
|
||||
|
||||
expect(writeFileMock).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('write content', () => {
|
||||
it('should write correct JSON structure with all fields', async () => {
|
||||
const mockMetadata = {
|
||||
status: 'CONNECTED',
|
||||
error: 'Some error',
|
||||
lastPing: 1234567890,
|
||||
};
|
||||
|
||||
vi.mocked(configService.get).mockReturnValue(mockMetadata);
|
||||
|
||||
await service.onApplicationBootstrap();
|
||||
|
||||
const writeCall = writeFileMock.mock.calls[0];
|
||||
const writtenData = JSON.parse(writeCall[1] as string);
|
||||
|
||||
expect(writtenData).toMatchObject({
|
||||
connectionStatus: 'CONNECTED',
|
||||
error: 'Some error',
|
||||
lastPing: 1234567890,
|
||||
allowedOrigins: '',
|
||||
});
|
||||
expect(writtenData.timestamp).toBeDefined();
|
||||
expect(typeof writtenData.timestamp).toBe('number');
|
||||
});
|
||||
|
||||
it('should handle missing connection metadata', async () => {
|
||||
vi.mocked(configService.get).mockReturnValue(undefined);
|
||||
|
||||
await service.onApplicationBootstrap();
|
||||
|
||||
const writeCall = writeFileMock.mock.calls[0];
|
||||
const writtenData = JSON.parse(writeCall[1] as string);
|
||||
|
||||
expect(writtenData).toMatchObject({
|
||||
connectionStatus: 'PRE_INIT',
|
||||
error: null,
|
||||
lastPing: null,
|
||||
allowedOrigins: '',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('error handling', () => {
|
||||
it('should handle write errors gracefully', async () => {
|
||||
writeFileMock.mockRejectedValue(new Error('Write failed'));
|
||||
|
||||
await expect(service.onApplicationBootstrap()).resolves.not.toThrow();
|
||||
|
||||
// Test direct write error handling
|
||||
await expect(service['writeStatus']()).resolves.not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('cleanup on shutdown', () => {
|
||||
it('should delete status file on module destroy', async () => {
|
||||
await service.onModuleDestroy();
|
||||
|
||||
expect(unlinkMock).toHaveBeenCalledTimes(1);
|
||||
expect(unlinkMock).toHaveBeenCalledWith('/var/local/emhttp/connectStatus.json');
|
||||
});
|
||||
|
||||
it('should handle file deletion errors gracefully', async () => {
|
||||
unlinkMock.mockRejectedValue(new Error('File not found'));
|
||||
|
||||
await expect(service.onModuleDestroy()).resolves.not.toThrow();
|
||||
|
||||
expect(unlinkMock).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should ensure file is deleted even if it was never written', async () => {
|
||||
// Don't bootstrap (so no file is written)
|
||||
await service.onModuleDestroy();
|
||||
|
||||
expect(unlinkMock).toHaveBeenCalledTimes(1);
|
||||
expect(unlinkMock).toHaveBeenCalledWith('/var/local/emhttp/connectStatus.json');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,69 @@
|
||||
import { Injectable, Logger, OnApplicationBootstrap, OnModuleDestroy } from '@nestjs/common';
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import { OnEvent } from '@nestjs/event-emitter';
|
||||
import { unlink } from 'fs/promises';
|
||||
import { writeFile } from 'fs/promises';
|
||||
|
||||
import { ConfigType, ConnectionMetadata } from '../config/connect.config.js';
|
||||
import { EVENTS } from '../helper/nest-tokens.js';
|
||||
|
||||
@Injectable()
|
||||
export class ConnectStatusWriterService implements OnApplicationBootstrap, OnModuleDestroy {
|
||||
constructor(private readonly configService: ConfigService<ConfigType, true>) {}
|
||||
|
||||
private logger = new Logger(ConnectStatusWriterService.name);
|
||||
|
||||
get statusFilePath() {
|
||||
// Write to /var/local/emhttp/connectStatus.json so PHP can read it
|
||||
return '/var/local/emhttp/connectStatus.json';
|
||||
}
|
||||
|
||||
async onApplicationBootstrap() {
|
||||
this.logger.verbose(`Status file path: ${this.statusFilePath}`);
|
||||
|
||||
// Write initial status
|
||||
await this.writeStatus();
|
||||
}
|
||||
|
||||
async onModuleDestroy() {
|
||||
try {
|
||||
await unlink(this.statusFilePath);
|
||||
this.logger.verbose(`Status file deleted: ${this.statusFilePath}`);
|
||||
} catch (error) {
|
||||
this.logger.debug(`Could not delete status file: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
@OnEvent(EVENTS.MOTHERSHIP_CONNECTION_STATUS_CHANGED, { async: true })
|
||||
private async writeStatus() {
|
||||
try {
|
||||
const connectionMetadata = this.configService.get<ConnectionMetadata>('connect.mothership');
|
||||
|
||||
// Try to get allowed origins from the store
|
||||
let allowedOrigins = '';
|
||||
try {
|
||||
// We can't import from @app here, so we'll skip allowed origins for now
|
||||
// This can be added later if needed
|
||||
allowedOrigins = '';
|
||||
} catch (error) {
|
||||
this.logger.debug('Could not get allowed origins:', error);
|
||||
}
|
||||
|
||||
const statusData = {
|
||||
connectionStatus: connectionMetadata?.status || 'PRE_INIT',
|
||||
error: connectionMetadata?.error || null,
|
||||
lastPing: connectionMetadata?.lastPing || null,
|
||||
allowedOrigins: allowedOrigins,
|
||||
timestamp: Date.now(),
|
||||
};
|
||||
|
||||
const data = JSON.stringify(statusData, null, 2);
|
||||
this.logger.verbose(`Writing connection status: ${data}`);
|
||||
|
||||
await writeFile(this.statusFilePath, data);
|
||||
this.logger.verbose(`Status written to ${this.statusFilePath}`);
|
||||
} catch (error) {
|
||||
this.logger.error(error, `Error writing status to '${this.statusFilePath}'`);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -130,11 +130,19 @@ export class MothershipConnectionService implements OnModuleInit, OnModuleDestro
|
||||
}
|
||||
|
||||
async onModuleInit() {
|
||||
// Crash on startup if these config values are not set initially
|
||||
// Warn on startup if these config values are not set initially
|
||||
const { unraidVersion, flashGuid, apiVersion } = this.configKeys;
|
||||
const warnings: string[] = [];
|
||||
[unraidVersion, flashGuid, apiVersion].forEach((key) => {
|
||||
this.configService.getOrThrow(key);
|
||||
try {
|
||||
this.configService.getOrThrow(key);
|
||||
} catch (error) {
|
||||
warnings.push(`${key} is not set`);
|
||||
}
|
||||
});
|
||||
if (warnings.length > 0) {
|
||||
this.logger.warn('Missing config values: %s', warnings.join(', '));
|
||||
}
|
||||
// Setup IDENTITY_CHANGED & METADATA_CHANGED events
|
||||
this.setupIdentitySubscription();
|
||||
this.setupMetadataChangedEvent();
|
||||
|
||||
@@ -32,7 +32,7 @@ export class MothershipHandler {
|
||||
const state = this.connectionService.getConnectionState();
|
||||
if (
|
||||
state &&
|
||||
[MinigraphStatus.PING_FAILURE, MinigraphStatus.ERROR_RETRYING].includes(state.status)
|
||||
[MinigraphStatus.PING_FAILURE].includes(state.status)
|
||||
) {
|
||||
this.logger.verbose(
|
||||
'Mothership connection status changed to %s; setting up mothership subscription',
|
||||
|
||||
@@ -3,18 +3,20 @@ import { Module } from '@nestjs/common';
|
||||
import { ConnectApiKeyService } from '../authn/connect-api-key.service.js';
|
||||
import { CloudResolver } from '../connection-status/cloud.resolver.js';
|
||||
import { CloudService } from '../connection-status/cloud.service.js';
|
||||
import { ConnectStatusWriterService } from '../connection-status/connect-status-writer.service.js';
|
||||
import { TimeoutCheckerJob } from '../connection-status/timeout-checker.job.js';
|
||||
import { InternalClientService } from '../internal-rpc/internal.client.js';
|
||||
import { RemoteAccessModule } from '../remote-access/remote-access.module.js';
|
||||
import { MothershipConnectionService } from './connection.service.js';
|
||||
import { MothershipGraphqlClientService } from './graphql.client.js';
|
||||
import { MothershipSubscriptionHandler } from './mothership-subscription.handler.js';
|
||||
import { MothershipHandler } from './mothership.events.js';
|
||||
import { MothershipController } from './mothership.controller.js';
|
||||
import { MothershipHandler } from './mothership.events.js';
|
||||
|
||||
@Module({
|
||||
imports: [RemoteAccessModule],
|
||||
providers: [
|
||||
ConnectStatusWriterService,
|
||||
ConnectApiKeyService,
|
||||
MothershipConnectionService,
|
||||
MothershipGraphqlClientService,
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
"commander": "14.0.0",
|
||||
"create-create-app": "7.3.0",
|
||||
"fs-extra": "11.3.0",
|
||||
"inquirer": "12.6.3",
|
||||
"inquirer": "12.7.0",
|
||||
"validate-npm-package-name": "6.0.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
@@ -25,7 +25,7 @@
|
||||
"@nestjs/graphql": "13.1.0",
|
||||
"@types/fs-extra": "11.0.4",
|
||||
"@types/inquirer": "9.0.8",
|
||||
"@types/node": "22.15.32",
|
||||
"@types/node": "22.16.4",
|
||||
"@types/validate-npm-package-name": "4.0.2",
|
||||
"class-transformer": "0.5.1",
|
||||
"class-validator": "0.14.2",
|
||||
|
||||
9
packages/unraid-shared/justfile
Normal file
9
packages/unraid-shared/justfile
Normal file
@@ -0,0 +1,9 @@
|
||||
# Justfile for unraid-shared
|
||||
|
||||
# Default recipe to run when just is called without arguments
|
||||
default:
|
||||
@just --list
|
||||
|
||||
# Watch for changes in src files and run clean + build
|
||||
watch:
|
||||
watchexec -r -e ts,tsx -w src -- pnpm build
|
||||
@@ -31,9 +31,9 @@
|
||||
"@jsonforms/core": "3.6.0",
|
||||
"@nestjs/common": "11.1.3",
|
||||
"@nestjs/graphql": "13.1.0",
|
||||
"@types/bun": "1.2.16",
|
||||
"@types/bun": "1.2.18",
|
||||
"@types/lodash-es": "4.17.12",
|
||||
"@types/node": "22.15.32",
|
||||
"@types/node": "22.16.4",
|
||||
"class-validator": "0.14.2",
|
||||
"graphql": "16.11.0",
|
||||
"graphql-scalars": "1.24.2",
|
||||
|
||||
@@ -1,11 +1,24 @@
|
||||
import { accessSync } from 'fs';
|
||||
import { access } from 'fs/promises';
|
||||
import { access, mkdir, writeFile } from 'fs/promises';
|
||||
import { mkdirSync, writeFileSync } from 'fs';
|
||||
import { F_OK } from 'node:constants';
|
||||
import { dirname } from 'path';
|
||||
|
||||
/**
|
||||
* Checks if a file exists asynchronously.
|
||||
* @param path - The file path to check
|
||||
* @returns Promise that resolves to true if file exists, false otherwise
|
||||
*/
|
||||
export const fileExists = async (path: string) =>
|
||||
access(path, F_OK)
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
|
||||
/**
|
||||
* Checks if a file exists synchronously.
|
||||
* @param path - The file path to check
|
||||
* @returns true if file exists, false otherwise
|
||||
*/
|
||||
export const fileExistsSync = (path: string) => {
|
||||
try {
|
||||
accessSync(path, F_OK);
|
||||
@@ -14,3 +27,44 @@ export const fileExistsSync = (path: string) => {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Writes data to a file, creating parent directories if they don't exist.
|
||||
*
|
||||
* This function ensures the directory structure exists before writing the file,
|
||||
* equivalent to `mkdir -p` followed by file writing.
|
||||
*
|
||||
* @param path - The file path to write to
|
||||
* @param data - The data to write (string or Buffer)
|
||||
* @throws {Error} If path is invalid (null, empty, or not a string)
|
||||
* @throws {Error} For any file system errors (EACCES, EPERM, ENOSPC, EISDIR, etc.)
|
||||
*/
|
||||
export const ensureWrite = async (path: string, data: string | Buffer) => {
|
||||
if (!path || typeof path !== 'string') {
|
||||
throw new Error(`Invalid path provided: ${path}`);
|
||||
}
|
||||
|
||||
await mkdir(dirname(path), { recursive: true });
|
||||
return await writeFile(path, data);
|
||||
};
|
||||
|
||||
/**
|
||||
* Writes data to a file synchronously, creating parent directories if they don't exist.
|
||||
*
|
||||
* This function ensures the directory structure exists before writing the file,
|
||||
* equivalent to `mkdir -p` followed by file writing.
|
||||
*
|
||||
* @param path - The file path to write to
|
||||
* @param data - The data to write (string or Buffer)
|
||||
* @throws {Error} If path is invalid (null, empty, or not a string)
|
||||
* @throws {Error} For any file system errors (EACCES, EPERM, ENOSPC, EISDIR, etc.)
|
||||
*/
|
||||
export const ensureWriteSync = (path: string, data: string | Buffer) => {
|
||||
if (!path || typeof path !== 'string') {
|
||||
throw new Error(`Invalid path provided: ${path}`);
|
||||
}
|
||||
|
||||
mkdirSync(dirname(path), { recursive: true });
|
||||
return writeFileSync(path, data);
|
||||
};
|
||||
|
||||
|
||||
@@ -1,17 +1,17 @@
|
||||
{
|
||||
"name": "@unraid/connect-plugin",
|
||||
"version": "4.9.0",
|
||||
"version": "4.10.0",
|
||||
"private": true,
|
||||
"dependencies": {
|
||||
"commander": "14.0.0",
|
||||
"conventional-changelog": "6.0.0",
|
||||
"date-fns": "4.1.0",
|
||||
"glob": "11.0.1",
|
||||
"glob": "11.0.3",
|
||||
"html-sloppy-escaper": "0.1.0",
|
||||
"semver": "7.7.1",
|
||||
"tsx": "4.19.3",
|
||||
"zod": "3.24.2",
|
||||
"zx": "8.3.2"
|
||||
"semver": "7.7.2",
|
||||
"tsx": "4.20.3",
|
||||
"zod": "3.25.76",
|
||||
"zx": "8.7.1"
|
||||
},
|
||||
"type": "module",
|
||||
"license": "GPL-2.0-or-later",
|
||||
@@ -37,7 +37,7 @@
|
||||
"devDependencies": {
|
||||
"http-server": "14.1.1",
|
||||
"nodemon": "3.1.10",
|
||||
"vitest": "3.0.7"
|
||||
"vitest": "3.2.4"
|
||||
},
|
||||
"packageManager": "pnpm@10.12.4"
|
||||
"packageManager": "pnpm@10.13.1"
|
||||
}
|
||||
|
||||
@@ -138,6 +138,34 @@ exit 0
|
||||
</INLINE>
|
||||
</FILE>
|
||||
|
||||
<FILE Run="/bin/bash" Method="install">
|
||||
<INLINE>
|
||||
<![CDATA[
|
||||
echo "Patching header logo if necessary..."
|
||||
|
||||
# We do this here instead of via API FileModification to avoid undesirable
|
||||
# rollback when the API is stopped.
|
||||
#
|
||||
# This is necessary on < 7.2 because the unraid-header-os-version web component
|
||||
# that ships with the base OS only displayes the version, not the logo as well.
|
||||
#
|
||||
# Rolling back in this case (i.e when stopping the API) yields a duplicate logo
|
||||
# that blocks interaction with the navigation menu.
|
||||
|
||||
# Remove the old header logo from DefaultPageLayout.php if present
|
||||
if [ -f "/usr/local/emhttp/plugins/dynamix/include/DefaultPageLayout.php" ]; then
|
||||
sed -i 's|<a href="https://unraid.net" target="_blank"><?readfile("$docroot/webGui/images/UN-logotype-gradient.svg")?></a>||g' "/usr/local/emhttp/plugins/dynamix/include/DefaultPageLayout.php"
|
||||
|
||||
# Add unraid-modals element if not already present
|
||||
if ! grep -q '<unraid-modals>' "/usr/local/emhttp/plugins/dynamix/include/DefaultPageLayout.php"; then
|
||||
sed -i 's|<body>|<body>\n<unraid-modals></unraid-modals>|' "/usr/local/emhttp/plugins/dynamix/include/DefaultPageLayout.php"
|
||||
fi
|
||||
fi
|
||||
|
||||
]]>
|
||||
</INLINE>
|
||||
</FILE>
|
||||
|
||||
<FILE Run="/bin/bash" Method="remove">
|
||||
<INLINE>
|
||||
MAINNAME="&name;"
|
||||
@@ -303,8 +331,7 @@ exit 0
|
||||
<![CDATA[
|
||||
SCRIPTS_DIR="/usr/local/share/dynamix.unraid.net/install/scripts"
|
||||
# Log file for debugging
|
||||
LOGFILE="/var/log/unraid-api/dynamix-unraid-install.log"
|
||||
mkdir -p "$(dirname "$LOGFILE")"
|
||||
mkdir -p "/var/log/unraid-api"
|
||||
|
||||
echo "Starting Unraid Connect installation..."
|
||||
|
||||
@@ -316,26 +343,26 @@ CFG_NEW=/boot/config/plugins/dynamix.my.servers
|
||||
# Setup the API (but don't start it yet)
|
||||
if [ -x "$SCRIPTS_DIR/setup_api.sh" ]; then
|
||||
echo "Setting up Unraid API..."
|
||||
echo "Running setup_api.sh" >> "$LOGFILE"
|
||||
# Capture output and add to log file
|
||||
setup_output=$("$SCRIPTS_DIR/setup_api.sh")
|
||||
echo "$setup_output" >> "$LOGFILE"
|
||||
echo "Running setup_api.sh"
|
||||
# Run and show output to user
|
||||
"$SCRIPTS_DIR/setup_api.sh"
|
||||
else
|
||||
echo "ERROR: setup_api.sh not found or not executable" >> "$LOGFILE"
|
||||
echo "ERROR: setup_api.sh not found or not executable"
|
||||
echo "ERROR: setup_api.sh not found or not executable"
|
||||
fi
|
||||
|
||||
# Run post-installation verification
|
||||
if [ -x "$SCRIPTS_DIR/verify_install.sh" ]; then
|
||||
echo "Running post-installation verification..."
|
||||
echo "Running verify_install.sh" >> "$LOGFILE"
|
||||
# Capture output and add to log file
|
||||
verify_output=$("$SCRIPTS_DIR/verify_install.sh")
|
||||
echo "$verify_output" >> "$LOGFILE"
|
||||
echo "Running verify_install.sh"
|
||||
# Run and show output to user
|
||||
"$SCRIPTS_DIR/verify_install.sh"
|
||||
else
|
||||
echo "ERROR: verify_install.sh not found or not executable" >> "$LOGFILE"
|
||||
echo "ERROR: verify_install.sh not found or not executable"
|
||||
echo "ERROR: verify_install.sh not found or not executable"
|
||||
fi
|
||||
|
||||
echo "Installation completed at $(date)" >> "$LOGFILE"
|
||||
echo "Installation completed at $(date)"
|
||||
]]>
|
||||
</INLINE>
|
||||
</FILE>
|
||||
@@ -351,6 +378,18 @@ echo "Installation completed at $(date)" >> "$LOGFILE"
|
||||
/etc/rc.d/rc.unraid-api cleanup-dependencies
|
||||
|
||||
echo "Starting Unraid API service"
|
||||
echo "DEBUG: Checking PATH: $PATH"
|
||||
echo "DEBUG: Checking if unraid-api files exist:"
|
||||
ls -la /usr/local/unraid-api/dist/
|
||||
echo "DEBUG: Checking symlink:"
|
||||
ls -la /usr/local/bin/unraid-api
|
||||
echo "DEBUG: Checking Node.js version:"
|
||||
node --version
|
||||
echo "DEBUG: Checking if cli.js is executable:"
|
||||
ls -la /usr/local/unraid-api/dist/cli.js
|
||||
echo "DEBUG: Attempting to run unraid-api directly:"
|
||||
/usr/local/unraid-api/dist/cli.js version || echo "Direct execution failed"
|
||||
|
||||
echo "If no additional messages appear within 30 seconds, it is safe to refresh the page."
|
||||
/etc/rc.d/rc.unraid-api plugins add unraid-api-plugin-connect -b --no-restart
|
||||
/etc/rc.d/rc.unraid-api start
|
||||
|
||||
@@ -166,22 +166,23 @@ _enabled() {
|
||||
return 1
|
||||
}
|
||||
_connected() {
|
||||
CFG=$API_CONFIG_HOME/connect.json
|
||||
[[ ! -f "${CFG}" ]] && return 1
|
||||
local connect_config username status_cfg connection_status
|
||||
connect_config=$API_CONFIG_HOME/connect.json
|
||||
[[ ! -f "${connect_config}" ]] && return 1
|
||||
|
||||
username=$(jq -r '.username // empty' "${CFG}" 2>/dev/null)
|
||||
# is the user signed in?
|
||||
username=$(jq -r '.username // empty' "${connect_config}" 2>/dev/null)
|
||||
if [ -z "${username}" ]; then
|
||||
return 1
|
||||
fi
|
||||
# the minigraph status is no longer synced to the connect config file
|
||||
# to avoid a false negative, we'll omit this check for now.
|
||||
#
|
||||
# shellcheck disable=SC1090
|
||||
# source <(sed -nr '/\[connectionStatus\]/,/\[/{/minigraph/p}' "${CFG}" 2>/dev/null)
|
||||
# # ensure connected
|
||||
# if [[ -z "${minigraph}" || "${minigraph}" != "CONNECTED" ]]; then
|
||||
# return 1
|
||||
# fi
|
||||
# are we connected to mothership?
|
||||
status_cfg="/var/local/emhttp/connectStatus.json"
|
||||
[[ ! -f "${status_cfg}" ]] && return 1
|
||||
connection_status=$(jq -r '.connectionStatus // empty' "${status_cfg}" 2>/dev/null)
|
||||
if [[ "${connection_status}" != "CONNECTED" ]]; then
|
||||
return 1
|
||||
fi
|
||||
|
||||
return 0
|
||||
}
|
||||
_haserror() {
|
||||
|
||||
@@ -4,9 +4,6 @@
|
||||
# shellcheck source=/dev/null
|
||||
source /etc/profile
|
||||
|
||||
flash="/boot/config/plugins/dynamix.my.servers"
|
||||
[[ ! -d "${flash}" ]] && echo "Please reinstall the Unraid Connect plugin" && exit 1
|
||||
[[ ! -f "${flash}/env" ]] && echo 'env=production' >"${flash}/env"
|
||||
unraid_binary_path="/usr/local/bin/unraid-api"
|
||||
api_base_dir="/usr/local/unraid-api"
|
||||
scripts_dir="/usr/local/share/dynamix.unraid.net/scripts"
|
||||
|
||||
@@ -18,14 +18,20 @@ $cli = php_sapi_name()=='cli';
|
||||
|
||||
$docroot ??= ($_SERVER['DOCUMENT_ROOT'] ?: '/usr/local/emhttp');
|
||||
require_once "$docroot/webGui/include/Wrappers.php";
|
||||
require_once "$docroot/plugins/dynamix.my.servers/include/connect-config.php";
|
||||
|
||||
$myservers_flash_cfg_path='/boot/config/plugins/dynamix.my.servers/myservers.cfg';
|
||||
$myservers = file_exists($myservers_flash_cfg_path) ? @parse_ini_file($myservers_flash_cfg_path,true) : [];
|
||||
$isRegistered = !empty($myservers['remote']['username']);
|
||||
$isRegistered = ConnectConfig::isUserSignedIn();
|
||||
|
||||
$myservers_memory_cfg_path ='/var/local/emhttp/myservers.cfg';
|
||||
$mystatus = (file_exists($myservers_memory_cfg_path)) ? @parse_ini_file($myservers_memory_cfg_path) : [];
|
||||
$isConnected = (($mystatus['minigraph']??'')==='CONNECTED') ? true : false;
|
||||
// Read connection status from the new API status file
|
||||
$statusFilePath = '/var/local/emhttp/connectStatus.json';
|
||||
$connectionStatus = '';
|
||||
|
||||
if (file_exists($statusFilePath)) {
|
||||
$statusData = @json_decode(file_get_contents($statusFilePath), true);
|
||||
$connectionStatus = $statusData['connectionStatus'] ?? '';
|
||||
}
|
||||
|
||||
$isConnected = ($connectionStatus === 'CONNECTED') ? true : false;
|
||||
|
||||
$flashbackup_ini = '/var/local/emhttp/flashbackup.ini';
|
||||
|
||||
@@ -588,9 +594,31 @@ set_git_config('user.email', 'gitbot@unraid.net');
|
||||
set_git_config('user.name', 'gitbot');
|
||||
|
||||
// ensure dns can resolve backup.unraid.net
|
||||
if (! checkdnsrr("backup.unraid.net","A") ) {
|
||||
$dnsResolved = false;
|
||||
|
||||
// Try multiple DNS resolution methods
|
||||
if (function_exists('dns_get_record')) {
|
||||
$dnsRecords = dns_get_record("backup.unraid.net", DNS_A);
|
||||
$dnsResolved = !empty($dnsRecords);
|
||||
}
|
||||
|
||||
// Fallback to gethostbyname if dns_get_record fails
|
||||
if (!$dnsResolved) {
|
||||
$ip = gethostbyname("backup.unraid.net");
|
||||
$dnsResolved = ($ip !== "backup.unraid.net");
|
||||
}
|
||||
|
||||
// Final fallback to system nslookup
|
||||
if (!$dnsResolved) {
|
||||
$output = [];
|
||||
$return_var = 0;
|
||||
exec('nslookup backup.unraid.net 2>/dev/null', $output, $return_var);
|
||||
$dnsResolved = ($return_var === 0 && !empty($output));
|
||||
}
|
||||
|
||||
if (!$dnsResolved) {
|
||||
$arrState['loading'] = '';
|
||||
$arrState['error'] = 'DNS is unable to resolve backup.unraid.net';
|
||||
$arrState['error'] = 'DNS resolution failed for backup.unraid.net - PHP DNS functions (checkdnsrr, dns_get_record, gethostbyname) and system nslookup all failed to resolve the hostname. This indicates a DNS configuration issue on your Unraid server. Check your DNS settings in Settings > Network Settings.';
|
||||
response_complete(406, array('error' => $arrState['error']));
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,26 @@
|
||||
<?php
|
||||
$docroot = $docroot ?? $_SERVER['DOCUMENT_ROOT'] ?: '/usr/local/emhttp';
|
||||
require_once "$docroot/plugins/dynamix.my.servers/include/api-config.php";
|
||||
|
||||
/**
|
||||
* Wrapper around the API's connect.json configuration file.
|
||||
*/
|
||||
class ConnectConfig
|
||||
{
|
||||
public const CONFIG_PATH = ApiConfig::CONFIG_DIR . '/connect.json';
|
||||
|
||||
public static function getConfig()
|
||||
{
|
||||
try {
|
||||
return json_decode(file_get_contents(self::CONFIG_PATH), true) ?? [];
|
||||
} catch (Throwable $e) {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
public static function isUserSignedIn()
|
||||
{
|
||||
$config = self::getConfig();
|
||||
return ApiConfig::isConnectPluginEnabled() && !empty($config['username'] ?? '');
|
||||
}
|
||||
}
|
||||
@@ -168,9 +168,8 @@ class ServerState
|
||||
private function getMyServersCfgValues()
|
||||
{
|
||||
/**
|
||||
* @todo can we read this from somewhere other than the flash? Connect page uses this path and /boot/config/plugins/dynamix.my.servers/myservers.cfg…
|
||||
* - $myservers_memory_cfg_path ='/var/local/emhttp/myservers.cfg';
|
||||
* - $mystatus = (file_exists($myservers_memory_cfg_path)) ? @parse_ini_file($myservers_memory_cfg_path) : [];
|
||||
* Memory config is now written by the new API to /usr/local/emhttp/state/myservers.cfg
|
||||
* This contains runtime state including connection status.
|
||||
*/
|
||||
$flashCfgPath = '/boot/config/plugins/dynamix.my.servers/myservers.cfg';
|
||||
$this->myServersFlashCfg = file_exists($flashCfgPath) ? @parse_ini_file($flashCfgPath, true) : [];
|
||||
@@ -212,11 +211,19 @@ class ServerState
|
||||
* Include localhost in the test, but only display HTTP(S) URLs that do not include localhost.
|
||||
*/
|
||||
$this->host = $_SERVER['HTTP_HOST'] ?? "unknown";
|
||||
$memoryCfgPath = '/var/local/emhttp/myservers.cfg';
|
||||
$this->myServersMemoryCfg = (file_exists($memoryCfgPath)) ? @parse_ini_file($memoryCfgPath) : [];
|
||||
$this->myServersMiniGraphConnected = (($this->myServersMemoryCfg['minigraph'] ?? '') === 'CONNECTED');
|
||||
// Read connection status and allowed origins from the new API status file
|
||||
$statusFilePath = '/var/local/emhttp/connectStatus.json';
|
||||
$connectionStatus = '';
|
||||
$allowedOrigins = '';
|
||||
|
||||
if (file_exists($statusFilePath)) {
|
||||
$statusData = @json_decode(file_get_contents($statusFilePath), true);
|
||||
$connectionStatus = $statusData['connectionStatus'] ?? '';
|
||||
$allowedOrigins = $statusData['allowedOrigins'] ?? '';
|
||||
}
|
||||
|
||||
$this->myServersMiniGraphConnected = ($connectionStatus === 'CONNECTED');
|
||||
|
||||
$allowedOrigins = $this->myServersMemoryCfg['allowedOrigins'] ?? "";
|
||||
$extraOrigins = $this->myServersFlashCfg['api']['extraOrigins'] ?? "";
|
||||
$combinedOrigins = $allowedOrigins . "," . $extraOrigins; // combine the two strings for easier searching
|
||||
$combinedOrigins = str_replace(" ", "", $combinedOrigins); // replace any spaces with nothing
|
||||
|
||||
@@ -1,10 +1,7 @@
|
||||
#!/bin/sh
|
||||
#!/bin/bash
|
||||
# Unraid API Installation Verification Script
|
||||
# Checks that critical files are installed correctly
|
||||
|
||||
# Exit on errors
|
||||
set -e
|
||||
|
||||
echo "Performing comprehensive installation verification..."
|
||||
|
||||
# Define critical files to check (POSIX-compliant, no arrays)
|
||||
@@ -171,7 +168,7 @@ if [ $TOTAL_ERRORS -eq 0 ]; then
|
||||
else
|
||||
printf 'Found %d total errors.\n' "$TOTAL_ERRORS"
|
||||
echo "Installation verification completed with issues."
|
||||
echo "See log file for details: /var/log/unraid-api/dynamix-unraid-install.log"
|
||||
echo "Please review the errors above and contact support if needed."
|
||||
# We don't exit with error as this is just a verification script
|
||||
exit 0
|
||||
fi
|
||||
4350
pnpm-lock.yaml
generated
4350
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@unraid/ui",
|
||||
"version": "4.9.0",
|
||||
"version": "4.10.0",
|
||||
"private": true,
|
||||
"license": "GPL-2.0-or-later",
|
||||
"type": "module",
|
||||
@@ -54,68 +54,68 @@
|
||||
"@jsonforms/core": "3.6.0",
|
||||
"@jsonforms/vue": "3.6.0",
|
||||
"@jsonforms/vue-vanilla": "3.6.0",
|
||||
"@vueuse/core": "13.4.0",
|
||||
"@vueuse/core": "13.5.0",
|
||||
"class-variance-authority": "0.7.1",
|
||||
"clsx": "2.1.1",
|
||||
"dompurify": "3.2.6",
|
||||
"kebab-case": "2.0.2",
|
||||
"lucide-vue-next": "0.519.0",
|
||||
"lucide-vue-next": "0.525.0",
|
||||
"marked": "16.0.0",
|
||||
"reka-ui": "2.3.1",
|
||||
"reka-ui": "2.3.2",
|
||||
"shadcn-vue": "2.2.0",
|
||||
"tailwind-merge": "2.6.0",
|
||||
"vue-sonner": "1.3.0"
|
||||
"vue-sonner": "1.3.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@ianvs/prettier-plugin-sort-imports": "4.4.2",
|
||||
"@storybook/addon-docs": "9.0.16",
|
||||
"@storybook/addon-links": "9.0.16",
|
||||
"@storybook/builder-vite": "9.0.16",
|
||||
"@storybook/vue3-vite": "9.0.16",
|
||||
"@ianvs/prettier-plugin-sort-imports": "4.5.1",
|
||||
"@storybook/addon-docs": "9.0.17",
|
||||
"@storybook/addon-links": "9.0.17",
|
||||
"@storybook/builder-vite": "9.0.17",
|
||||
"@storybook/vue3-vite": "9.0.17",
|
||||
"@tailwindcss/typography": "0.5.16",
|
||||
"@testing-library/vue": "8.1.0",
|
||||
"@types/jsdom": "21.1.7",
|
||||
"@types/node": "22.15.32",
|
||||
"@types/node": "22.16.4",
|
||||
"@types/testing-library__vue": "5.3.0",
|
||||
"@typescript-eslint/eslint-plugin": "8.34.1",
|
||||
"@typescript-eslint/eslint-plugin": "8.37.0",
|
||||
"@unraid/tailwind-rem-to-rem": "1.1.0",
|
||||
"@vitejs/plugin-vue": "5.2.4",
|
||||
"@vitejs/plugin-vue": "6.0.0",
|
||||
"@vitest/coverage-v8": "3.2.4",
|
||||
"@vitest/ui": "3.2.4",
|
||||
"@vue/test-utils": "2.4.6",
|
||||
"@vue/tsconfig": "0.7.0",
|
||||
"autoprefixer": "10.4.21",
|
||||
"concurrently": "9.1.2",
|
||||
"eslint": "9.29.0",
|
||||
"concurrently": "9.2.0",
|
||||
"eslint": "9.31.0",
|
||||
"eslint-config-prettier": "10.1.5",
|
||||
"eslint-plugin-import": "2.31.0",
|
||||
"eslint-plugin-import": "2.32.0",
|
||||
"eslint-plugin-no-relative-import-paths": "1.6.1",
|
||||
"eslint-plugin-prettier": "5.5.0",
|
||||
"eslint-plugin-storybook": "9.0.16",
|
||||
"eslint-plugin-vue": "10.2.0",
|
||||
"happy-dom": "18.0.0",
|
||||
"jiti": "^2.4.2",
|
||||
"eslint-plugin-prettier": "5.5.1",
|
||||
"eslint-plugin-storybook": "9.0.17",
|
||||
"eslint-plugin-vue": "10.3.0",
|
||||
"happy-dom": "18.0.1",
|
||||
"jiti": "2.4.2",
|
||||
"postcss": "8.5.6",
|
||||
"postcss-import": "16.1.1",
|
||||
"prettier": "3.5.3",
|
||||
"prettier-plugin-tailwindcss": "0.6.13",
|
||||
"prettier": "3.6.2",
|
||||
"prettier-plugin-tailwindcss": "0.6.14",
|
||||
"rimraf": "6.0.1",
|
||||
"storybook": "9.0.16",
|
||||
"storybook": "9.0.17",
|
||||
"tailwind-rem-to-rem": "github:unraid/tailwind-rem-to-rem",
|
||||
"tailwindcss": "3.4.17",
|
||||
"tailwindcss-animate": "1.0.7",
|
||||
"typescript": "5.8.3",
|
||||
"typescript-eslint": "8.34.1",
|
||||
"vite": "7.0.3",
|
||||
"typescript-eslint": "8.37.0",
|
||||
"vite": "7.0.4",
|
||||
"vite-plugin-dts": "3.9.1",
|
||||
"vite-plugin-vue-devtools": "7.7.7",
|
||||
"vitest": "3.2.4",
|
||||
"vue": "3.5.17",
|
||||
"vue-tsc": "3.0.1",
|
||||
"wrangler": "^3.87.0"
|
||||
"wrangler": "4.24.3"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@rollup/rollup-linux-x64-gnu": "4.44.0"
|
||||
"@rollup/rollup-linux-x64-gnu": "4.45.1"
|
||||
},
|
||||
"exports": {
|
||||
".": {
|
||||
@@ -139,5 +139,5 @@
|
||||
"import": "./dist/theme/preset.js"
|
||||
}
|
||||
},
|
||||
"packageManager": "pnpm@10.12.4"
|
||||
"packageManager": "pnpm@10.13.1"
|
||||
}
|
||||
|
||||
@@ -38,6 +38,11 @@ vi.mock('@vue/apollo-composable', () => ({
|
||||
onResult: vi.fn(),
|
||||
onError: vi.fn(),
|
||||
}),
|
||||
useMutation: () => ({
|
||||
mutate: vi.fn(),
|
||||
onDone: vi.fn(),
|
||||
onError: vi.fn(),
|
||||
}),
|
||||
provideApolloClient: vi.fn(),
|
||||
}));
|
||||
|
||||
|
||||
@@ -41,6 +41,11 @@ vi.mock('@vue/apollo-composable', () => ({
|
||||
onResult: vi.fn(),
|
||||
onError: vi.fn(),
|
||||
}),
|
||||
useMutation: () => ({
|
||||
mutate: vi.fn(),
|
||||
onDone: vi.fn(),
|
||||
onError: vi.fn(),
|
||||
}),
|
||||
provideApolloClient: vi.fn(),
|
||||
}));
|
||||
|
||||
|
||||
@@ -764,4 +764,323 @@ describe('useServerStore', () => {
|
||||
expect(store.cloudError).toBeDefined();
|
||||
expect((store.cloudError as { message: string })?.message).toBe('Test error');
|
||||
});
|
||||
|
||||
describe('trial extension features', () => {
|
||||
it('should determine trial extension eligibility correctly', () => {
|
||||
const store = getStore();
|
||||
|
||||
// Add trialExtensionEligible property to the store
|
||||
Object.defineProperty(store, 'trialExtensionEligible', {
|
||||
get: () => !store.regGen || store.regGen < 2,
|
||||
});
|
||||
|
||||
// Eligible - no regGen
|
||||
store.setServer({ regGen: 0 });
|
||||
expect(store.trialExtensionEligible).toBe(true);
|
||||
|
||||
// Eligible - regGen = 1
|
||||
store.setServer({ regGen: 1 });
|
||||
expect(store.trialExtensionEligible).toBe(true);
|
||||
|
||||
// Not eligible - regGen = 2
|
||||
store.setServer({ regGen: 2 });
|
||||
expect(store.trialExtensionEligible).toBe(false);
|
||||
|
||||
// Not eligible - regGen > 2
|
||||
store.setServer({ regGen: 3 });
|
||||
expect(store.trialExtensionEligible).toBe(false);
|
||||
});
|
||||
|
||||
it('should calculate trial within 5 days of expiration correctly', () => {
|
||||
const store = getStore();
|
||||
|
||||
// Add properties to the store
|
||||
Object.defineProperty(store, 'expireTime', { value: 0, writable: true });
|
||||
Object.defineProperty(store, 'trialWithin5DaysOfExpiration', {
|
||||
get: () => {
|
||||
if (!store.expireTime || store.state !== 'TRIAL') {
|
||||
return false;
|
||||
}
|
||||
const today = dayjs();
|
||||
const expirationDate = dayjs(store.expireTime);
|
||||
const daysUntilExpiration = expirationDate.diff(today, 'day');
|
||||
return daysUntilExpiration <= 5 && daysUntilExpiration >= 0;
|
||||
},
|
||||
});
|
||||
|
||||
// Not a trial
|
||||
store.setServer({ state: 'PRO' as ServerState, expireTime: dayjs().add(3, 'day').unix() * 1000 });
|
||||
expect(store.trialWithin5DaysOfExpiration).toBe(false);
|
||||
|
||||
// Trial but no expireTime
|
||||
store.setServer({ state: 'TRIAL' as ServerState, expireTime: 0 });
|
||||
expect(store.trialWithin5DaysOfExpiration).toBe(false);
|
||||
|
||||
// Trial expiring in 3 days
|
||||
store.setServer({ state: 'TRIAL' as ServerState, expireTime: dayjs().add(3, 'day').unix() * 1000 });
|
||||
expect(store.trialWithin5DaysOfExpiration).toBe(true);
|
||||
|
||||
// Trial expiring in exactly 5 days
|
||||
store.setServer({ state: 'TRIAL' as ServerState, expireTime: dayjs().add(5, 'day').unix() * 1000 });
|
||||
expect(store.trialWithin5DaysOfExpiration).toBe(true);
|
||||
|
||||
// Trial expiring in 7 days (to ensure it's clearly outside the 5-day window)
|
||||
store.setServer({ state: 'TRIAL' as ServerState, expireTime: dayjs().add(7, 'day').unix() * 1000 });
|
||||
expect(store.trialWithin5DaysOfExpiration).toBe(false);
|
||||
|
||||
// Trial already expired
|
||||
store.setServer({ state: 'TRIAL' as ServerState, expireTime: dayjs().subtract(1, 'day').unix() * 1000 });
|
||||
expect(store.trialWithin5DaysOfExpiration).toBe(false);
|
||||
});
|
||||
|
||||
it('should calculate trial extension renewal window conditions correctly', () => {
|
||||
const store = getStore();
|
||||
|
||||
// Add all necessary properties
|
||||
Object.defineProperty(store, 'expireTime', { value: 0, writable: true });
|
||||
Object.defineProperty(store, 'trialExtensionEligible', {
|
||||
get: () => !store.regGen || store.regGen < 2,
|
||||
});
|
||||
Object.defineProperty(store, 'trialWithin5DaysOfExpiration', {
|
||||
get: () => {
|
||||
if (!store.expireTime || store.state !== 'TRIAL') {
|
||||
return false;
|
||||
}
|
||||
const today = dayjs();
|
||||
const expirationDate = dayjs(store.expireTime);
|
||||
const daysUntilExpiration = expirationDate.diff(today, 'day');
|
||||
return daysUntilExpiration <= 5 && daysUntilExpiration >= 0;
|
||||
},
|
||||
});
|
||||
Object.defineProperty(store, 'trialExtensionEligibleInsideRenewalWindow', {
|
||||
get: () => store.trialExtensionEligible && store.trialWithin5DaysOfExpiration,
|
||||
});
|
||||
Object.defineProperty(store, 'trialExtensionEligibleOutsideRenewalWindow', {
|
||||
get: () => store.trialExtensionEligible && !store.trialWithin5DaysOfExpiration,
|
||||
});
|
||||
Object.defineProperty(store, 'trialExtensionIneligibleInsideRenewalWindow', {
|
||||
get: () => !store.trialExtensionEligible && store.trialWithin5DaysOfExpiration,
|
||||
});
|
||||
|
||||
// Eligible inside renewal window
|
||||
store.setServer({
|
||||
state: 'TRIAL' as ServerState,
|
||||
regGen: 1,
|
||||
expireTime: dayjs().add(3, 'day').unix() * 1000,
|
||||
});
|
||||
expect(store.trialExtensionEligibleInsideRenewalWindow).toBe(true);
|
||||
expect(store.trialExtensionEligibleOutsideRenewalWindow).toBe(false);
|
||||
expect(store.trialExtensionIneligibleInsideRenewalWindow).toBe(false);
|
||||
|
||||
// Eligible outside renewal window
|
||||
store.setServer({
|
||||
state: 'TRIAL' as ServerState,
|
||||
regGen: 1,
|
||||
expireTime: dayjs().add(10, 'day').unix() * 1000,
|
||||
});
|
||||
expect(store.trialExtensionEligibleInsideRenewalWindow).toBe(false);
|
||||
expect(store.trialExtensionEligibleOutsideRenewalWindow).toBe(true);
|
||||
expect(store.trialExtensionIneligibleInsideRenewalWindow).toBe(false);
|
||||
|
||||
// Ineligible inside renewal window
|
||||
store.setServer({
|
||||
state: 'TRIAL' as ServerState,
|
||||
regGen: 2,
|
||||
expireTime: dayjs().add(3, 'day').unix() * 1000,
|
||||
});
|
||||
expect(store.trialExtensionEligibleInsideRenewalWindow).toBe(false);
|
||||
expect(store.trialExtensionEligibleOutsideRenewalWindow).toBe(false);
|
||||
expect(store.trialExtensionIneligibleInsideRenewalWindow).toBe(true);
|
||||
|
||||
// Ineligible outside renewal window
|
||||
store.setServer({
|
||||
state: 'TRIAL' as ServerState,
|
||||
regGen: 2,
|
||||
expireTime: dayjs().add(10, 'day').unix() * 1000,
|
||||
});
|
||||
expect(store.trialExtensionEligibleInsideRenewalWindow).toBe(false);
|
||||
expect(store.trialExtensionEligibleOutsideRenewalWindow).toBe(false);
|
||||
expect(store.trialExtensionIneligibleInsideRenewalWindow).toBe(false);
|
||||
});
|
||||
|
||||
it('should display correct trial messages based on extension eligibility and renewal window', () => {
|
||||
const store = getStore();
|
||||
|
||||
// Add all necessary properties
|
||||
Object.defineProperty(store, 'expireTime', { value: 0, writable: true });
|
||||
Object.defineProperty(store, 'trialExtensionEligible', {
|
||||
get: () => !store.regGen || store.regGen < 2,
|
||||
});
|
||||
Object.defineProperty(store, 'trialWithin5DaysOfExpiration', {
|
||||
get: () => {
|
||||
if (!store.expireTime || store.state !== 'TRIAL') {
|
||||
return false;
|
||||
}
|
||||
const today = dayjs();
|
||||
const expirationDate = dayjs(store.expireTime);
|
||||
const daysUntilExpiration = expirationDate.diff(today, 'day');
|
||||
return daysUntilExpiration <= 5 && daysUntilExpiration >= 0;
|
||||
},
|
||||
});
|
||||
Object.defineProperty(store, 'trialExtensionEligibleInsideRenewalWindow', {
|
||||
get: () => store.trialExtensionEligible && store.trialWithin5DaysOfExpiration,
|
||||
});
|
||||
Object.defineProperty(store, 'trialExtensionEligibleOutsideRenewalWindow', {
|
||||
get: () => store.trialExtensionEligible && !store.trialWithin5DaysOfExpiration,
|
||||
});
|
||||
Object.defineProperty(store, 'trialExtensionIneligibleInsideRenewalWindow', {
|
||||
get: () => !store.trialExtensionEligible && store.trialWithin5DaysOfExpiration,
|
||||
});
|
||||
|
||||
// Mock stateData getter to include trial message logic
|
||||
Object.defineProperty(store, 'stateData', {
|
||||
get: () => {
|
||||
if (store.state !== 'TRIAL') {
|
||||
return {
|
||||
humanReadable: '',
|
||||
heading: '',
|
||||
message: '',
|
||||
actions: [],
|
||||
};
|
||||
}
|
||||
|
||||
let trialMessage = '';
|
||||
if (store.trialExtensionEligibleInsideRenewalWindow) {
|
||||
trialMessage = '<p>Your <em>Trial</em> key includes all the functionality and device support of an <em>Unleashed</em> key.</p><p>Your trial is expiring soon. When it expires, <strong>the array will stop</strong>. You may extend your trial now, purchase a license key, or wait until expiration to take action.</p>';
|
||||
} else if (store.trialExtensionIneligibleInsideRenewalWindow) {
|
||||
trialMessage = '<p>Your <em>Trial</em> key includes all the functionality and device support of an <em>Unleashed</em> key.</p><p>Your trial is expiring soon and you have used all available extensions. When it expires, <strong>the array will stop</strong>. To continue using Unraid OS, you must purchase a license key.</p>';
|
||||
} else if (store.trialExtensionEligibleOutsideRenewalWindow) {
|
||||
trialMessage = '<p>Your <em>Trial</em> key includes all the functionality and device support of an <em>Unleashed</em> key.</p><p>When your <em>Trial</em> expires, <strong>the array will stop</strong>. At that point you may either purchase a license key or request a <em>Trial</em> extension.</p>';
|
||||
} else {
|
||||
trialMessage = '<p>Your <em>Trial</em> key includes all the functionality and device support of an <em>Unleashed</em> key.</p><p>You have used all available trial extensions. When your <em>Trial</em> expires, <strong>the array will stop</strong>. To continue using Unraid OS after expiration, you must purchase a license key.</p>';
|
||||
}
|
||||
|
||||
return {
|
||||
humanReadable: 'Trial',
|
||||
heading: 'Thank you for choosing Unraid OS!',
|
||||
message: trialMessage,
|
||||
actions: [],
|
||||
};
|
||||
},
|
||||
});
|
||||
|
||||
// Test case 1: Eligible inside renewal window
|
||||
store.setServer({
|
||||
state: 'TRIAL' as ServerState,
|
||||
regGen: 1,
|
||||
expireTime: dayjs().add(3, 'day').unix() * 1000,
|
||||
});
|
||||
expect(store.stateData.message).toContain('Your trial is expiring soon');
|
||||
expect(store.stateData.message).toContain('You may extend your trial now');
|
||||
|
||||
// Test case 2: Ineligible inside renewal window
|
||||
store.setServer({
|
||||
state: 'TRIAL' as ServerState,
|
||||
regGen: 2,
|
||||
expireTime: dayjs().add(3, 'day').unix() * 1000,
|
||||
});
|
||||
expect(store.stateData.message).toContain('Your trial is expiring soon and you have used all available extensions');
|
||||
expect(store.stateData.message).toContain('To continue using Unraid OS, you must purchase a license key');
|
||||
|
||||
// Test case 3: Eligible outside renewal window
|
||||
store.setServer({
|
||||
state: 'TRIAL' as ServerState,
|
||||
regGen: 0,
|
||||
expireTime: dayjs().add(10, 'day').unix() * 1000,
|
||||
});
|
||||
expect(store.stateData.message).toContain('At that point you may either purchase a license key or request a <em>Trial</em> extension');
|
||||
|
||||
// Test case 4: Ineligible outside renewal window
|
||||
store.setServer({
|
||||
state: 'TRIAL' as ServerState,
|
||||
regGen: 2,
|
||||
expireTime: dayjs().add(10, 'day').unix() * 1000,
|
||||
});
|
||||
expect(store.stateData.message).toContain('You have used all available trial extensions');
|
||||
expect(store.stateData.message).toContain('To continue using Unraid OS after expiration, you must purchase a license key');
|
||||
});
|
||||
|
||||
it('should include trial extend action only when eligible inside renewal window', () => {
|
||||
const store = getStore();
|
||||
|
||||
// Add necessary properties
|
||||
Object.defineProperty(store, 'expireTime', { value: 0, writable: true });
|
||||
Object.defineProperty(store, 'trialExtensionEligible', {
|
||||
get: () => !store.regGen || store.regGen < 2,
|
||||
});
|
||||
Object.defineProperty(store, 'trialWithin5DaysOfExpiration', {
|
||||
get: () => {
|
||||
if (!store.expireTime || store.state !== 'TRIAL') {
|
||||
return false;
|
||||
}
|
||||
const today = dayjs();
|
||||
const expirationDate = dayjs(store.expireTime);
|
||||
const daysUntilExpiration = expirationDate.diff(today, 'day');
|
||||
return daysUntilExpiration <= 5 && daysUntilExpiration >= 0;
|
||||
},
|
||||
});
|
||||
Object.defineProperty(store, 'trialExtensionEligibleInsideRenewalWindow', {
|
||||
get: () => store.trialExtensionEligible && store.trialWithin5DaysOfExpiration,
|
||||
});
|
||||
|
||||
// Mock the trialExtendAction
|
||||
const trialExtendAction = { name: 'trialExtend', text: 'Extend Trial' };
|
||||
|
||||
// Mock stateData getter to include actions logic
|
||||
Object.defineProperty(store, 'stateData', {
|
||||
get: () => {
|
||||
if (store.state !== 'TRIAL') {
|
||||
return {
|
||||
humanReadable: '',
|
||||
heading: '',
|
||||
message: '',
|
||||
actions: [],
|
||||
};
|
||||
}
|
||||
|
||||
const actions = [];
|
||||
if (store.trialExtensionEligibleInsideRenewalWindow) {
|
||||
actions.push(trialExtendAction);
|
||||
}
|
||||
|
||||
return {
|
||||
humanReadable: 'Trial',
|
||||
heading: 'Thank you for choosing Unraid OS!',
|
||||
message: '',
|
||||
actions,
|
||||
};
|
||||
},
|
||||
});
|
||||
|
||||
// Test case 1: Eligible inside renewal window - should include trialExtend action
|
||||
store.setServer({
|
||||
state: 'TRIAL' as ServerState,
|
||||
regGen: 1,
|
||||
expireTime: dayjs().add(3, 'day').unix() * 1000,
|
||||
registered: true,
|
||||
connectPluginInstalled: 'true' as ServerconnectPluginInstalled,
|
||||
});
|
||||
expect(store.stateData.actions?.some((action: { name: string }) => action.name === 'trialExtend')).toBe(true);
|
||||
|
||||
// Test case 2: Not eligible inside renewal window - should NOT include trialExtend action
|
||||
store.setServer({
|
||||
state: 'TRIAL' as ServerState,
|
||||
regGen: 2,
|
||||
expireTime: dayjs().add(3, 'day').unix() * 1000,
|
||||
registered: true,
|
||||
connectPluginInstalled: 'true' as ServerconnectPluginInstalled,
|
||||
});
|
||||
expect(store.stateData.actions?.some((action: { name: string }) => action.name === 'trialExtend')).toBe(false);
|
||||
|
||||
// Test case 3: Eligible outside renewal window - should NOT include trialExtend action
|
||||
store.setServer({
|
||||
state: 'TRIAL' as ServerState,
|
||||
regGen: 1,
|
||||
expireTime: dayjs().add(10, 'day').unix() * 1000,
|
||||
registered: true,
|
||||
connectPluginInstalled: 'true' as ServerconnectPluginInstalled,
|
||||
});
|
||||
expect(store.stateData.actions?.some((action: { name: string }) => action.name === 'trialExtend')).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
<script setup lang="ts">
|
||||
import { computed, type Component } from 'vue';
|
||||
import { computed } from 'vue';
|
||||
import type { Component } from 'vue';
|
||||
import { BellIcon, ExclamationTriangleIcon, ShieldExclamationIcon } from '@heroicons/vue/24/solid';
|
||||
import { cn } from '@unraid/ui';
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
<script setup lang="ts">
|
||||
import { computed, reactive, type Component } from 'vue';
|
||||
import { computed, reactive } from 'vue';
|
||||
import type { Component } from 'vue';
|
||||
import { computedAsync } from '@vueuse/core';
|
||||
import { Markdown } from '@/helpers/markdown';
|
||||
import {
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { InMemoryCache, type InMemoryCacheConfig } from '@apollo/client/core';
|
||||
import { InMemoryCache } from '@apollo/client/core';
|
||||
import type { InMemoryCacheConfig } from '@apollo/client/core';
|
||||
|
||||
import type { NotificationOverview } from '~/composables/gql/graphql';
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import DOMPurify from 'isomorphic-dompurify';
|
||||
import { Marked, type MarkedExtension } from 'marked';
|
||||
import { Marked } from 'marked';
|
||||
import type { MarkedExtension } from 'marked';
|
||||
|
||||
const defaultMarkedExtension: MarkedExtension = {
|
||||
hooks: {
|
||||
|
||||
@@ -23,6 +23,10 @@
|
||||
"<p>To support more storage devices as your server grows, click Upgrade Key.</p>": "<p>To support more storage devices as your server grows, click Upgrade Key.</p>",
|
||||
"<p>You have used all your Trial extensions. To continue using Unraid OS you may purchase a license key.</p>": "<p>You have used all your Trial extensions. To continue using Unraid OS you may purchase a license key.</p>",
|
||||
"<p>Your <em>Trial</em> key includes all the functionality and device support of an <em>Unleashed</em> key.</p><p>After your <em>Trial</em> has reached expiration, your server <strong>still functions normally</strong> until the next time you Stop the array or reboot your server.</p><p>At that point you may either purchase a license key or request a <em>Trial</em> extension.</p>": "<p>Your <em>Trial</em> key includes all the functionality and device support of an <em>Unleashed</em> key.</p><p>After your <em>Trial</em> has reached expiration, your server <strong>still functions normally</strong> until the next time you Stop the array or reboot your server.</p><p>At that point you may either purchase a license key or request a <em>Trial</em> extension.</p>",
|
||||
"<p>Your <em>Trial</em> key includes all the functionality and device support of an <em>Unleashed</em> key.</p><p>When your <em>Trial</em> expires, <strong>the array will stop</strong>. At that point you may either purchase a license key or request a <em>Trial</em> extension.</p>": "<p>Your <em>Trial</em> key includes all the functionality and device support of an <em>Unleashed</em> key.</p><p>When your <em>Trial</em> expires, <strong>the array will stop</strong>. At that point you may either purchase a license key or request a <em>Trial</em> extension.</p>",
|
||||
"<p>Your <em>Trial</em> key includes all the functionality and device support of an <em>Unleashed</em> key.</p><p>Your trial is expiring soon. When it expires, <strong>the array will stop</strong>. You may extend your trial now, purchase a license key, or wait until expiration to take action.</p>": "<p>Your <em>Trial</em> key includes all the functionality and device support of an <em>Unleashed</em> key.</p><p>Your trial is expiring soon. When it expires, <strong>the array will stop</strong>. You may extend your trial now, purchase a license key, or wait until expiration to take action.</p>",
|
||||
"<p>Your <em>Trial</em> key includes all the functionality and device support of an <em>Unleashed</em> key.</p><p>Your trial is expiring soon and you have used all available extensions. When it expires, <strong>the array will stop</strong>. To continue using Unraid OS, you must purchase a license key.</p>": "<p>Your <em>Trial</em> key includes all the functionality and device support of an <em>Unleashed</em> key.</p><p>Your trial is expiring soon and you have used all available extensions. When it expires, <strong>the array will stop</strong>. To continue using Unraid OS, you must purchase a license key.</p>",
|
||||
"<p>Your <em>Trial</em> key includes all the functionality and device support of an <em>Unleashed</em> key.</p><p>You have used all available trial extensions. When your <em>Trial</em> expires, <strong>the array will stop</strong>. To continue using Unraid OS after expiration, you must purchase a license key.</p>": "<p>Your <em>Trial</em> key includes all the functionality and device support of an <em>Unleashed</em> key.</p><p>You have used all available trial extensions. When your <em>Trial</em> expires, <strong>the array will stop</strong>. To continue using Unraid OS after expiration, you must purchase a license key.</p>",
|
||||
"<p>Your license key file is corrupted or missing. The key file should be located in the /config directory on your USB Flash boot device.</p><p>If you do not have a backup copy of your license key file you may attempt to recover your key.</p><p>If this was an expired Trial installation, you may purchase a license key.</p>": "<p>Your license key file is corrupted or missing. The key file should be located in the /config directory on your USB Flash boot device.</p><p>If you do not have a backup copy of your license key file you may attempt to recover your key.</p><p>If this was an expired Trial installation, you may purchase a license key.</p>",
|
||||
"<p>Your license key file is corrupted or missing. The key file should be located in the /config directory on your USB Flash boot device.</p><p>You may attempt to recover your key with your Unraid.net account.</p><p>If this was an expired Trial installation, you may purchase a license key.</p>": "<p>Your license key file is corrupted or missing. The key file should be located in the /config directory on your USB Flash boot device.</p><p>You may attempt to recover your key with your Unraid.net account.</p><p>If this was an expired Trial installation, you may purchase a license key.</p>",
|
||||
"<p>Your server will not be usable until you purchase a Registration key or install a free 30 day <em>Trial</em> key. A <em>Trial</em> key provides all the functionality of an Unleashed Registration key.</p><p>Registration keys are bound to your USB Flash boot device serial number (GUID). Please use a high quality name brand device at least 1GB in size.</p><p>Note: USB memory card readers are generally not supported because most do not present unique serial numbers.</p><p><strong>Important:</strong></p><ul class='list-disc pl-16px'><li>Please make sure your server time is accurate to within 5 minutes</li><li>Please make sure there is a DNS server specified</li></ul>": "<p>Your server will not be usable until you purchase a Registration key or install a free 30 day <em>Trial</em> key. A <em>Trial</em> key provides all the functionality of an Unleashed Registration key.</p><p>Registration keys are bound to your USB Flash boot device serial number (GUID). Please use a high quality name brand device at least 1GB in size.</p><p>Note: USB memory card readers are generally not supported because most do not present unique serial numbers.</p><p><strong>Important:</strong></p><ul class='list-disc pl-16px'><li>Please make sure your server time is accurate to within 5 minutes</li><li>Please make sure there is a DNS server specified</li></ul>",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@unraid/web",
|
||||
"version": "4.9.0",
|
||||
"version": "4.10.0",
|
||||
"private": true,
|
||||
"license": "GPL-2.0-or-later",
|
||||
"scripts": {
|
||||
@@ -39,13 +39,13 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@graphql-codegen/cli": "5.0.7",
|
||||
"@graphql-codegen/client-preset": "4.8.2",
|
||||
"@graphql-codegen/client-preset": "4.8.3",
|
||||
"@graphql-codegen/introspection": "4.0.3",
|
||||
"@graphql-typed-document-node/core": "3.2.0",
|
||||
"@ianvs/prettier-plugin-sort-imports": "4.4.2",
|
||||
"@nuxt/devtools": "2.5.0",
|
||||
"@nuxt/eslint": "1.4.1",
|
||||
"@nuxt/test-utils": "3.19.1",
|
||||
"@ianvs/prettier-plugin-sort-imports": "4.5.1",
|
||||
"@nuxt/devtools": "2.6.2",
|
||||
"@nuxt/eslint": "1.5.2",
|
||||
"@nuxt/test-utils": "3.19.2",
|
||||
"@nuxtjs/tailwindcss": "6.14.0",
|
||||
"@pinia/testing": "1.0.2",
|
||||
"@rollup/plugin-strip": "3.0.4",
|
||||
@@ -53,34 +53,34 @@
|
||||
"@testing-library/vue": "8.1.0",
|
||||
"@types/crypto-js": "4.2.2",
|
||||
"@types/eslint-config-prettier": "6.11.3",
|
||||
"@types/node": "22.15.32",
|
||||
"@types/node": "22.16.4",
|
||||
"@types/semver": "7.7.0",
|
||||
"@typescript-eslint/eslint-plugin": "8.34.1",
|
||||
"@typescript-eslint/eslint-plugin": "8.37.0",
|
||||
"@unraid/tailwind-rem-to-rem": "1.1.0",
|
||||
"@vitejs/plugin-vue": "5.2.4",
|
||||
"@vitejs/plugin-vue": "6.0.0",
|
||||
"@vitest/coverage-v8": "3.2.4",
|
||||
"@vue/apollo-util": "4.2.2",
|
||||
"@vue/test-utils": "2.4.6",
|
||||
"@vueuse/core": "13.4.0",
|
||||
"@vueuse/nuxt": "13.4.0",
|
||||
"eslint": "9.29.0",
|
||||
"@vueuse/core": "13.5.0",
|
||||
"@vueuse/nuxt": "13.5.0",
|
||||
"eslint": "9.31.0",
|
||||
"eslint-config-prettier": "10.1.5",
|
||||
"eslint-import-resolver-typescript": "4.4.4",
|
||||
"eslint-plugin-import": "2.31.0",
|
||||
"happy-dom": "18.0.0",
|
||||
"eslint-plugin-import": "2.32.0",
|
||||
"happy-dom": "18.0.1",
|
||||
"lodash-es": "4.17.21",
|
||||
"nuxt": "3.17.5",
|
||||
"nuxt": "3.17.7",
|
||||
"nuxt-custom-elements": "2.0.0-beta.32",
|
||||
"prettier": "3.5.3",
|
||||
"prettier-plugin-tailwindcss": "0.6.13",
|
||||
"prettier": "3.6.2",
|
||||
"prettier-plugin-tailwindcss": "0.6.14",
|
||||
"shadcn-nuxt": "2.2.0",
|
||||
"tailwindcss": "3.4.17",
|
||||
"tailwindcss-animate": "1.0.7",
|
||||
"terser": "5.43.1",
|
||||
"typescript": "5.8.3",
|
||||
"vite": "7.0.3",
|
||||
"vite": "7.0.4",
|
||||
"vite-plugin-remove-console": "2.2.0",
|
||||
"vite-plugin-vue-tracer": "0.1.4",
|
||||
"vite-plugin-vue-tracer": "1.0.0",
|
||||
"vitest": "3.2.4",
|
||||
"vue": "3.5.17",
|
||||
"vue-tsc": "3.0.1",
|
||||
@@ -88,9 +88,9 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@apollo/client": "3.13.8",
|
||||
"@floating-ui/dom": "1.7.1",
|
||||
"@floating-ui/utils": "0.2.9",
|
||||
"@floating-ui/vue": "1.1.6",
|
||||
"@floating-ui/dom": "1.7.2",
|
||||
"@floating-ui/utils": "0.2.10",
|
||||
"@floating-ui/vue": "1.1.7",
|
||||
"@headlessui/vue": "1.7.23",
|
||||
"@heroicons/vue": "2.2.0",
|
||||
"@jsonforms/core": "3.6.0",
|
||||
@@ -102,8 +102,8 @@
|
||||
"@unraid/shared-callbacks": "1.1.1",
|
||||
"@unraid/ui": "link:../unraid-ui",
|
||||
"@vue/apollo-composable": "4.2.2",
|
||||
"@vueuse/components": "13.4.0",
|
||||
"@vueuse/integrations": "13.4.0",
|
||||
"@vueuse/components": "13.5.0",
|
||||
"@vueuse/integrations": "13.5.0",
|
||||
"class-variance-authority": "0.7.1",
|
||||
"clsx": "2.1.1",
|
||||
"crypto-js": "4.2.0",
|
||||
@@ -111,26 +111,26 @@
|
||||
"focus-trap": "7.6.5",
|
||||
"graphql": "16.11.0",
|
||||
"graphql-tag": "2.12.6",
|
||||
"graphql-ws": "6.0.5",
|
||||
"graphql-ws": "6.0.6",
|
||||
"hex-to-rgba": "2.0.1",
|
||||
"highlight.js": "11.11.1",
|
||||
"isomorphic-dompurify": "2.25.0",
|
||||
"lucide-vue-next": "0.519.0",
|
||||
"isomorphic-dompurify": "2.26.0",
|
||||
"lucide-vue-next": "0.525.0",
|
||||
"marked": "16.0.0",
|
||||
"marked-base-url": "1.1.6",
|
||||
"marked-base-url": "1.1.7",
|
||||
"pinia": "3.0.3",
|
||||
"semver": "7.7.2",
|
||||
"tailwind-merge": "2.6.0",
|
||||
"vue-i18n": "11.1.6",
|
||||
"vue-i18n": "11.1.9",
|
||||
"vue-web-component-wrapper": "1.7.7",
|
||||
"vuetify": "3.8.10",
|
||||
"vuetify": "3.9.0",
|
||||
"wretch": "2.11.0"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@rollup/rollup-linux-x64-gnu": "4.44.0"
|
||||
"@rollup/rollup-linux-x64-gnu": "4.45.1"
|
||||
},
|
||||
"overrides": {
|
||||
"vue": "latest"
|
||||
},
|
||||
"packageManager": "pnpm@10.12.4"
|
||||
"packageManager": "pnpm@10.13.1"
|
||||
}
|
||||
|
||||
40
web/scripts/check-node-version.js
Normal file
40
web/scripts/check-node-version.js
Normal file
@@ -0,0 +1,40 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { execSync } = require('child_process');
|
||||
|
||||
function getRequiredVersion() {
|
||||
const nvmrcPath = path.join(__dirname, '..', '..', '.nvmrc');
|
||||
return fs.readFileSync(nvmrcPath, 'utf8').trim();
|
||||
}
|
||||
|
||||
function getCurrentVersion() {
|
||||
return execSync('node -v', { encoding: 'utf8' }).trim().replace('v', '');
|
||||
}
|
||||
|
||||
function checkNodeVersion() {
|
||||
try {
|
||||
const requiredVersion = getRequiredVersion();
|
||||
const currentVersion = getCurrentVersion();
|
||||
|
||||
const [reqMajor, reqMinor, reqPatch] = requiredVersion.split('.').map(Number);
|
||||
const [curMajor, curMinor, curPatch] = currentVersion.split('.').map(Number);
|
||||
|
||||
if (curMajor < reqMajor ||
|
||||
(curMajor === reqMajor && curMinor < reqMinor) ||
|
||||
(curMajor === reqMajor && curMinor === reqMinor && curPatch < reqPatch)) {
|
||||
console.error(`\x1b[31mError: Node.js version ${requiredVersion} or higher is required.\x1b[0m`);
|
||||
console.error(`\x1b[31mCurrent version: ${currentVersion}\x1b[0m`);
|
||||
console.error(`\x1b[33mPlease install Node.js ${requiredVersion} or use nvm to switch versions.\x1b[0m`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
console.log(`\x1b[32m✓ Node.js version ${currentVersion} meets requirement (>= ${requiredVersion})\x1b[0m`);
|
||||
} catch (error) {
|
||||
console.error('Error checking Node.js version:', error.message);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
checkNodeVersion();
|
||||
@@ -61,6 +61,59 @@ export const useAccountStore = defineStore('account', () => {
|
||||
accountActionStatus.value = 'waiting';
|
||||
}
|
||||
};
|
||||
|
||||
// Initialize mutations during store setup to maintain Apollo context
|
||||
const { mutate: signOutMutation, onDone: onSignOutDone, onError: onSignOutError } = useMutation(CONNECT_SIGN_OUT);
|
||||
const { mutate: signInMutation, onDone: onSignInDone, onError: onSignInError } = useMutation(CONNECT_SIGN_IN);
|
||||
|
||||
// Handle sign out mutation results
|
||||
onSignOutDone((res) => {
|
||||
console.debug('[connectSignOutMutation]', res);
|
||||
accountActionStatus.value = 'success';
|
||||
setQueueConnectSignOut(false); // reset
|
||||
});
|
||||
|
||||
onSignOutError((error) => {
|
||||
logErrorMessages(error);
|
||||
accountActionStatus.value = 'failed';
|
||||
errorsStore.setError({
|
||||
heading: 'Failed to update Connect account configuration',
|
||||
message: error.message,
|
||||
level: 'error',
|
||||
ref: 'connectSignOutMutation',
|
||||
type: 'account',
|
||||
});
|
||||
});
|
||||
|
||||
// Handle sign in mutation results
|
||||
onSignInDone((res) => {
|
||||
if (res.data?.connectSignIn) {
|
||||
accountActionStatus.value = 'success';
|
||||
setConnectSignInPayload(undefined); // reset
|
||||
return;
|
||||
}
|
||||
accountActionStatus.value = 'failed';
|
||||
errorsStore.setError({
|
||||
heading: 'unraid-api failed to update Connect account configuration',
|
||||
message: 'Sign In mutation unsuccessful',
|
||||
level: 'error',
|
||||
ref: 'connectSignInMutation',
|
||||
type: 'account',
|
||||
});
|
||||
});
|
||||
|
||||
onSignInError((error) => {
|
||||
logErrorMessages(error);
|
||||
accountActionStatus.value = 'failed';
|
||||
errorsStore.setError({
|
||||
heading: 'unraid-api failed to update Connect account configuration',
|
||||
message: error.message,
|
||||
level: 'error',
|
||||
ref: 'connectSignInMutation',
|
||||
type: 'account',
|
||||
});
|
||||
});
|
||||
|
||||
watchEffect(() => {
|
||||
if (unraidApiClient.value && connectSignInPayload.value) {
|
||||
// connectSignInMutation();
|
||||
@@ -258,7 +311,7 @@ export const useAccountStore = defineStore('account', () => {
|
||||
);
|
||||
};
|
||||
|
||||
const connectSignInMutation = async () => {
|
||||
const connectSignInMutation = () => {
|
||||
if (
|
||||
!connectSignInPayload.value ||
|
||||
(connectSignInPayload.value &&
|
||||
@@ -271,83 +324,21 @@ export const useAccountStore = defineStore('account', () => {
|
||||
}
|
||||
|
||||
accountActionStatus.value = 'updating';
|
||||
const {
|
||||
mutate: signInMutation,
|
||||
onDone,
|
||||
onError,
|
||||
} = await useMutation(CONNECT_SIGN_IN, {
|
||||
variables: {
|
||||
input: {
|
||||
apiKey: connectSignInPayload.value.apiKey,
|
||||
userInfo: {
|
||||
email: connectSignInPayload.value.email,
|
||||
preferred_username: connectSignInPayload.value.preferred_username,
|
||||
},
|
||||
|
||||
return signInMutation({
|
||||
input: {
|
||||
apiKey: connectSignInPayload.value.apiKey,
|
||||
userInfo: {
|
||||
email: connectSignInPayload.value.email,
|
||||
preferred_username: connectSignInPayload.value.preferred_username,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
signInMutation();
|
||||
|
||||
onDone((res) => {
|
||||
if (res.data?.connectSignIn) {
|
||||
accountActionStatus.value = 'success';
|
||||
setConnectSignInPayload(undefined); // reset
|
||||
return;
|
||||
}
|
||||
accountActionStatus.value = 'failed';
|
||||
errorsStore.setError({
|
||||
heading: 'unraid-api failed to update Connect account configuration',
|
||||
message: 'Sign In mutation unsuccessful',
|
||||
level: 'error',
|
||||
ref: 'connectSignInMutation',
|
||||
type: 'account',
|
||||
});
|
||||
});
|
||||
|
||||
onError((error) => {
|
||||
logErrorMessages(error);
|
||||
accountActionStatus.value = 'failed';
|
||||
errorsStore.setError({
|
||||
heading: 'unraid-api failed to update Connect account configuration',
|
||||
message: error.message,
|
||||
level: 'error',
|
||||
ref: 'connectSignInMutation',
|
||||
type: 'account',
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
const connectSignOutMutation = async () => {
|
||||
const connectSignOutMutation = () => {
|
||||
accountActionStatus.value = 'updating';
|
||||
// @todo is this still needed here with the change to a mutation?
|
||||
// if (!serverStore.registered && accountAction.value && !accountAction.value?.user) {
|
||||
// accountActionHide.value = true;
|
||||
// accountActionStatus.value = 'success';
|
||||
// return;
|
||||
// }
|
||||
|
||||
const { mutate: signOutMutation, onDone, onError } = await useMutation(CONNECT_SIGN_OUT);
|
||||
|
||||
signOutMutation();
|
||||
|
||||
onDone((res) => {
|
||||
console.debug('[connectSignOutMutation]', res);
|
||||
accountActionStatus.value = 'success';
|
||||
setQueueConnectSignOut(false); // reset
|
||||
});
|
||||
|
||||
onError((error) => {
|
||||
logErrorMessages(error);
|
||||
accountActionStatus.value = 'failed';
|
||||
errorsStore.setError({
|
||||
heading: 'Failed to update Connect account configuration',
|
||||
message: error.message,
|
||||
level: 'error',
|
||||
ref: 'connectSignOutMutation',
|
||||
type: 'account',
|
||||
});
|
||||
});
|
||||
return signOutMutation();
|
||||
};
|
||||
|
||||
const setAccountAction = (action: ExternalSignIn | ExternalSignOut) => {
|
||||
|
||||
@@ -495,6 +495,7 @@ export const useServerStore = defineStore('server', () => {
|
||||
});
|
||||
|
||||
let messageEGUID = '';
|
||||
let trialMessage = '';
|
||||
const stateData = computed((): ServerStateData => {
|
||||
switch (state.value) {
|
||||
case 'ENOKEYFILE':
|
||||
@@ -510,16 +511,26 @@ export const useServerStore = defineStore('server', () => {
|
||||
'<p>Choose an option below, then use our <a href="https://unraid.net/getting-started" target="_blank" rel="noreffer noopener">Getting Started Guide</a> to configure your array in less than 15 minutes.</p>',
|
||||
};
|
||||
case 'TRIAL':
|
||||
if (trialExtensionEligibleInsideRenewalWindow.value) {
|
||||
trialMessage = '<p>Your <em>Trial</em> key includes all the functionality and device support of an <em>Unleashed</em> key.</p><p>Your trial is expiring soon. When it expires, <strong>the array will stop</strong>. You may extend your trial now, purchase a license key, or wait until expiration to take action.</p>';
|
||||
} else if (trialExtensionIneligibleInsideRenewalWindow.value) {
|
||||
trialMessage = '<p>Your <em>Trial</em> key includes all the functionality and device support of an <em>Unleashed</em> key.</p><p>Your trial is expiring soon and you have used all available extensions. When it expires, <strong>the array will stop</strong>. To continue using Unraid OS, you must purchase a license key.</p>';
|
||||
} else if (trialExtensionEligibleOutsideRenewalWindow.value) {
|
||||
trialMessage = '<p>Your <em>Trial</em> key includes all the functionality and device support of an <em>Unleashed</em> key.</p><p>When your <em>Trial</em> expires, <strong>the array will stop</strong>. At that point you may either purchase a license key or request a <em>Trial</em> extension.</p>';
|
||||
} else { // would be trialExtensionIneligibleOutsideRenewalWindow if it wasn't an else conditionally
|
||||
trialMessage = '<p>Your <em>Trial</em> key includes all the functionality and device support of an <em>Unleashed</em> key.</p><p>You have used all available trial extensions. When your <em>Trial</em> expires, <strong>the array will stop</strong>. To continue using Unraid OS after expiration, you must purchase a license key.</p>';
|
||||
}
|
||||
|
||||
return {
|
||||
actions: [
|
||||
...(!registered.value && connectPluginInstalled.value ? [signInAction.value] : []),
|
||||
...[purchaseAction.value, redeemAction.value],
|
||||
...(trialExtensionEligibleInsideRenewalWindow.value ? [trialExtendAction.value] : []),
|
||||
...(registered.value && connectPluginInstalled.value ? [signOutAction.value] : []),
|
||||
],
|
||||
humanReadable: 'Trial',
|
||||
heading: 'Thank you for choosing Unraid OS!',
|
||||
message:
|
||||
'<p>Your <em>Trial</em> key includes all the functionality and device support of an <em>Unleashed</em> key.</p><p>After your <em>Trial</em> has reached expiration, your server <strong>still functions normally</strong> until the next time you Stop the array or reboot your server.</p><p>At that point you may either purchase a license key or request a <em>Trial</em> extension.</p>',
|
||||
message: trialMessage,
|
||||
};
|
||||
case 'EEXPIRED':
|
||||
return {
|
||||
@@ -773,6 +784,18 @@ export const useServerStore = defineStore('server', () => {
|
||||
return stateData.value.actions.filter((action) => !authActionsNames.includes(action.name));
|
||||
});
|
||||
const trialExtensionEligible = computed(() => !regGen.value || regGen.value < 2);
|
||||
const trialWithin5DaysOfExpiration = computed(() => {
|
||||
if (!expireTime.value || state.value !== 'TRIAL') {
|
||||
return false;
|
||||
}
|
||||
const today = dayjs();
|
||||
const expirationDate = dayjs(expireTime.value);
|
||||
const daysUntilExpiration = expirationDate.diff(today, 'day');
|
||||
return daysUntilExpiration <= 5 && daysUntilExpiration >= 0;
|
||||
});
|
||||
const trialExtensionEligibleInsideRenewalWindow = computed(() => trialExtensionEligible.value && trialWithin5DaysOfExpiration.value);
|
||||
const trialExtensionEligibleOutsideRenewalWindow = computed(() => trialExtensionEligible.value && !trialWithin5DaysOfExpiration.value);
|
||||
const trialExtensionIneligibleInsideRenewalWindow = computed(() => !trialExtensionEligible.value && trialWithin5DaysOfExpiration.value);
|
||||
|
||||
const serverConfigError = computed((): Error | undefined => {
|
||||
if (!config.value?.valid && config.value?.error) {
|
||||
@@ -1175,7 +1198,9 @@ export const useServerStore = defineStore('server', () => {
|
||||
|
||||
setTimeout(() => {
|
||||
load();
|
||||
loadCloudState();
|
||||
if (connectPluginInstalled.value) {
|
||||
loadCloudState();
|
||||
}
|
||||
}, 500);
|
||||
|
||||
onResult((result) => {
|
||||
|
||||
Reference in New Issue
Block a user