Compare commits

...

81 Commits

Author SHA1 Message Date
Eli Bosley
b663293f1b chore: update lint-staged and settings.local.json commands
- Change lint-staged command from pnpm to npx pnpm for consistency
- Add nvm and git config commands to settings.local.json for better tooling support
2025-07-12 09:42:07 -04:00
Eli Bosley
4c0b967164 refactor(test): update vitest config and improve rclone service tests
- Replace vitest.workspace.js with vitest.config.ts for better project configuration
- Add vitest dependency and update test script in package.json
- Improve RCloneApiService tests with better mocking and error handling
- Update test snapshots to include backupBase path
2025-07-12 09:41:39 -04:00
Eli Bosley
0d864fa948 chore: frontend functionality 2025-07-11 21:42:03 -04:00
Eli Bosley
984b8748ef chore: type cleanup 2025-07-11 21:31:01 -04:00
Eli Bosley
a406fdc5fe chore: types 2025-07-11 20:39:40 -04:00
Eli Bosley
e7066c0e09 chore: update pnpm lock 2025-07-11 15:53:38 -04:00
Eli Bosley
872559ce56 chore: build.ts removed 2025-07-11 15:43:52 -04:00
Eli Bosley
f99264e73d chore: remove .bivvy 2025-07-11 15:41:41 -04:00
Eli Bosley
73ba3f074a Merge branch 'main' into feat/flash-backup-implementation 2025-07-11 15:38:01 -04:00
renovate[bot]
e1a7a3d22d chore(deps): update dependency node to v22 (#1507)
This PR contains the following updates:

| Package | Type | Update | Change |
|---|---|---|---|
| [node](https://redirect.github.com/actions/node-versions) | uses-with
| major | `20` -> `22` |

---

### Release Notes

<details>
<summary>actions/node-versions (node)</summary>

###
[`v22.17.0`](https://redirect.github.com/actions/node-versions/releases/tag/22.17.0-15866718879):
22.17.0

[Compare
Source](https://redirect.github.com/actions/node-versions/compare/22.16.0-15177438473...22.17.0-15866718879)

Node.js 22.17.0

###
[`v22.16.0`](https://redirect.github.com/actions/node-versions/releases/tag/22.16.0-15177438473):
22.16.0

[Compare
Source](https://redirect.github.com/actions/node-versions/compare/22.15.1-15035854612...22.16.0-15177438473)

Node.js 22.16.0

###
[`v22.15.1`](https://redirect.github.com/actions/node-versions/releases/tag/22.15.1-15035854612):
22.15.1

[Compare
Source](https://redirect.github.com/actions/node-versions/compare/22.15.0-14621731016...22.15.1-15035854612)

Node.js 22.15.1

###
[`v22.15.0`](https://redirect.github.com/actions/node-versions/releases/tag/22.15.0-14621731016):
22.15.0

[Compare
Source](https://redirect.github.com/actions/node-versions/compare/22.14.0-13265982013...22.15.0-14621731016)

Node.js 22.15.0

###
[`v22.14.0`](https://redirect.github.com/actions/node-versions/releases/tag/22.14.0-13265982013):
22.14.0

[Compare
Source](https://redirect.github.com/actions/node-versions/compare/22.13.1-12900459766...22.14.0-13265982013)

Node.js 22.14.0

###
[`v22.13.1`](https://redirect.github.com/actions/node-versions/releases/tag/22.13.1-12900459766):
22.13.1

[Compare
Source](https://redirect.github.com/actions/node-versions/compare/22.13.0-12671059536...22.13.1-12900459766)

Node.js 22.13.1

###
[`v22.13.0`](https://redirect.github.com/actions/node-versions/releases/tag/22.13.0-12671059536):
22.13.0

[Compare
Source](https://redirect.github.com/actions/node-versions/compare/22.12.0-12152383658...22.13.0-12671059536)

Node.js 22.13.0

###
[`v22.12.0`](https://redirect.github.com/actions/node-versions/releases/tag/22.12.0-12152383658):
22.12.0

[Compare
Source](https://redirect.github.com/actions/node-versions/compare/22.11.0-11593095476...22.12.0-12152383658)

Node.js 22.12.0

###
[`v22.11.0`](https://redirect.github.com/actions/node-versions/releases/tag/22.11.0-11593095476):
22.11.0

[Compare
Source](https://redirect.github.com/actions/node-versions/compare/22.10.0-11377615849...22.11.0-11593095476)

Node.js 22.11.0

###
[`v22.10.0`](https://redirect.github.com/actions/node-versions/releases/tag/22.10.0-11377615849):
22.10.0

[Compare
Source](https://redirect.github.com/actions/node-versions/compare/22.9.0-10914884886...22.10.0-11377615849)

Node.js 22.10.0

###
[`v22.9.0`](https://redirect.github.com/actions/node-versions/releases/tag/22.9.0-10914884886):
22.9.0

[Compare
Source](https://redirect.github.com/actions/node-versions/compare/22.8.0-10685632420...22.9.0-10914884886)

Node.js 22.9.0

###
[`v22.8.0`](https://redirect.github.com/actions/node-versions/releases/tag/22.8.0-10685632420):
22.8.0

[Compare
Source](https://redirect.github.com/actions/node-versions/compare/22.7.0-10511334152...22.8.0-10685632420)

Node.js 22.8.0

###
[`v22.7.0`](https://redirect.github.com/actions/node-versions/releases/tag/22.7.0-10511334152):
22.7.0

[Compare
Source](https://redirect.github.com/actions/node-versions/compare/22.6.0-10277432289...22.7.0-10511334152)

Node.js 22.7.0

###
[`v22.6.0`](https://redirect.github.com/actions/node-versions/releases/tag/22.6.0-10277432289):
22.6.0

[Compare
Source](https://redirect.github.com/actions/node-versions/compare/22.5.1-10010673511...22.6.0-10277432289)

Node.js 22.6.0

###
[`v22.5.1`](https://redirect.github.com/actions/node-versions/releases/tag/22.5.1-10010673511):
22.5.1

[Compare
Source](https://redirect.github.com/actions/node-versions/compare/22.5.0-9985144103...22.5.1-10010673511)

Node.js 22.5.1

###
[`v22.5.0`](https://redirect.github.com/actions/node-versions/releases/tag/22.5.0-9985144103):
22.5.0

[Compare
Source](https://redirect.github.com/actions/node-versions/compare/22.4.1-9860948056...22.5.0-9985144103)

Node.js 22.5.0

###
[`v22.4.1`](https://redirect.github.com/actions/node-versions/releases/tag/22.4.1-9860948056):
22.4.1

[Compare
Source](https://redirect.github.com/actions/node-versions/compare/22.4.0-9766506602...22.4.1-9860948056)

Node.js 22.4.1

###
[`v22.4.0`](https://redirect.github.com/actions/node-versions/releases/tag/22.4.0-9766506602):
22.4.0

[Compare
Source](https://redirect.github.com/actions/node-versions/compare/22.3.0-9569309553...22.4.0-9766506602)

Node.js 22.4.0

###
[`v22.3.0`](https://redirect.github.com/actions/node-versions/releases/tag/22.3.0-9569309553):
22.3.0

[Compare
Source](https://redirect.github.com/actions/node-versions/compare/22.2.0-9105861751...22.3.0-9569309553)

Node.js 22.3.0

###
[`v22.2.0`](https://redirect.github.com/actions/node-versions/releases/tag/22.2.0-9105861751):
22.2.0

[Compare
Source](https://redirect.github.com/actions/node-versions/compare/22.1.0-8926142033...22.2.0-9105861751)

Node.js 22.2.0

###
[`v22.1.0`](https://redirect.github.com/actions/node-versions/releases/tag/22.1.0-8926142033):
22.1.0

[Compare
Source](https://redirect.github.com/actions/node-versions/compare/22.0.0-8879734543...22.1.0-8926142033)

Node.js 22.1.0

###
[`v22.0.0`](https://redirect.github.com/actions/node-versions/releases/tag/22.0.0-8879734543):
22.0.0

[Compare
Source](https://redirect.github.com/actions/node-versions/compare/20.19.3-15828158811...22.0.0-8879734543)

Node.js 22.0.0

</details>

---

### Configuration

📅 **Schedule**: Branch creation - At any time (no schedule defined),
Automerge - At any time (no schedule defined).

🚦 **Automerge**: Disabled by config. Please merge this manually once you
are satisfied.

♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the
rebase/retry checkbox.

🔕 **Ignore**: Close this PR and you won't be reminded about this update
again.

---

- [ ] <!-- rebase-check -->If you want to rebase/retry this PR, check
this box

---

This PR was generated by [Mend Renovate](https://mend.io/renovate/).
View the [repository job
log](https://developer.mend.io/github/unraid/api).

<!--renovate-debug:eyJjcmVhdGVkSW5WZXIiOiI0MS4yMy4yIiwidXBkYXRlZEluVmVyIjoiNDEuMjMuMiIsInRhcmdldEJyYW5jaCI6Im1haW4iLCJsYWJlbHMiOltdfQ==-->

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-07-11 14:41:02 -04:00
renovate[bot]
53b05ebe5e fix(deps): update all non-major dependencies (#1489)
This PR contains the following updates:

| Package | Change | Age | Confidence | Type | Update |
|---|---|---|---|---|---|
| [@eslint/js](https://eslint.org)
([source](https://redirect.github.com/eslint/eslint/tree/HEAD/packages/js))
| [`9.29.0` ->
`9.30.1`](https://renovatebot.com/diffs/npm/@eslint%2fjs/9.29.0/9.30.1)
|
[![age](https://developer.mend.io/api/mc/badges/age/npm/@eslint%2fjs/9.30.1?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/@eslint%2fjs/9.29.0/9.30.1?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| devDependencies | minor |
| [@faker-js/faker](https://fakerjs.dev)
([source](https://redirect.github.com/faker-js/faker)) | [`9.8.0` ->
`9.9.0`](https://renovatebot.com/diffs/npm/@faker-js%2ffaker/9.8.0/9.9.0)
|
[![age](https://developer.mend.io/api/mc/badges/age/npm/@faker-js%2ffaker/9.9.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/@faker-js%2ffaker/9.8.0/9.9.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| devDependencies | minor |
| [@floating-ui/dom](https://floating-ui.com)
([source](https://redirect.github.com/floating-ui/floating-ui/tree/HEAD/packages/dom))
| [`1.7.1` ->
`1.7.2`](https://renovatebot.com/diffs/npm/@floating-ui%2fdom/1.7.1/1.7.2)
|
[![age](https://developer.mend.io/api/mc/badges/age/npm/@floating-ui%2fdom/1.7.2?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/@floating-ui%2fdom/1.7.1/1.7.2?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| dependencies | patch |
| [@floating-ui/utils](https://floating-ui.com)
([source](https://redirect.github.com/floating-ui/floating-ui/tree/HEAD/packages/utils))
| [`0.2.9` ->
`0.2.10`](https://renovatebot.com/diffs/npm/@floating-ui%2futils/0.2.9/0.2.10)
|
[![age](https://developer.mend.io/api/mc/badges/age/npm/@floating-ui%2futils/0.2.10?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/@floating-ui%2futils/0.2.9/0.2.10?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| dependencies | patch |
| [@floating-ui/vue](https://floating-ui.com/docs/vue)
([source](https://redirect.github.com/floating-ui/floating-ui/tree/HEAD/packages/vue))
| [`1.1.6` ->
`1.1.7`](https://renovatebot.com/diffs/npm/@floating-ui%2fvue/1.1.6/1.1.7)
|
[![age](https://developer.mend.io/api/mc/badges/age/npm/@floating-ui%2fvue/1.1.7?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/@floating-ui%2fvue/1.1.6/1.1.7?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| dependencies | patch |
|
[@graphql-codegen/client-preset](https://redirect.github.com/dotansimha/graphql-code-generator)
([source](https://redirect.github.com/dotansimha/graphql-code-generator/tree/HEAD/packages/presets/client))
| [`4.8.2` ->
`4.8.3`](https://renovatebot.com/diffs/npm/@graphql-codegen%2fclient-preset/4.8.2/4.8.3)
|
[![age](https://developer.mend.io/api/mc/badges/age/npm/@graphql-codegen%2fclient-preset/4.8.3?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/@graphql-codegen%2fclient-preset/4.8.2/4.8.3?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| devDependencies | patch |
|
[@graphql-codegen/client-preset](https://redirect.github.com/dotansimha/graphql-code-generator)
([source](https://redirect.github.com/dotansimha/graphql-code-generator/tree/HEAD/packages/presets/client))
| [`4.8.2` ->
`4.8.3`](https://renovatebot.com/diffs/npm/@graphql-codegen%2fclient-preset/4.8.2/4.8.3)
|
[![age](https://developer.mend.io/api/mc/badges/age/npm/@graphql-codegen%2fclient-preset/4.8.3?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/@graphql-codegen%2fclient-preset/4.8.2/4.8.3?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| dependencies | patch |
|
[@graphql-codegen/typed-document-node](https://redirect.github.com/dotansimha/graphql-code-generator)
([source](https://redirect.github.com/dotansimha/graphql-code-generator/tree/HEAD/packages/plugins/typescript/typed-document-node))
| [`5.1.1` ->
`5.1.2`](https://renovatebot.com/diffs/npm/@graphql-codegen%2ftyped-document-node/5.1.1/5.1.2)
|
[![age](https://developer.mend.io/api/mc/badges/age/npm/@graphql-codegen%2ftyped-document-node/5.1.2?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/@graphql-codegen%2ftyped-document-node/5.1.1/5.1.2?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| devDependencies | patch |
|
[@ianvs/prettier-plugin-sort-imports](https://redirect.github.com/ianvs/prettier-plugin-sort-imports)
| [`4.4.2` ->
`4.5.1`](https://renovatebot.com/diffs/npm/@ianvs%2fprettier-plugin-sort-imports/4.4.2/4.5.1)
|
[![age](https://developer.mend.io/api/mc/badges/age/npm/@ianvs%2fprettier-plugin-sort-imports/4.5.1?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/@ianvs%2fprettier-plugin-sort-imports/4.4.2/4.5.1?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| devDependencies | minor |
| [@nuxt/devtools](https://devtools.nuxt.com)
([source](https://redirect.github.com/nuxt/devtools/tree/HEAD/packages/devtools))
| [`2.5.0` ->
`2.6.2`](https://renovatebot.com/diffs/npm/@nuxt%2fdevtools/2.5.0/2.6.2)
|
[![age](https://developer.mend.io/api/mc/badges/age/npm/@nuxt%2fdevtools/2.6.2?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/@nuxt%2fdevtools/2.5.0/2.6.2?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| devDependencies | minor |
| [@nuxt/eslint](https://redirect.github.com/nuxt/eslint)
([source](https://redirect.github.com/nuxt/eslint/tree/HEAD/packages/module))
| [`1.4.1` ->
`1.5.2`](https://renovatebot.com/diffs/npm/@nuxt%2feslint/1.4.1/1.5.2) |
[![age](https://developer.mend.io/api/mc/badges/age/npm/@nuxt%2feslint/1.5.2?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/@nuxt%2feslint/1.4.1/1.5.2?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| devDependencies | minor |
| [@nuxt/test-utils](https://redirect.github.com/nuxt/test-utils) |
[`3.19.1` ->
`3.19.2`](https://renovatebot.com/diffs/npm/@nuxt%2ftest-utils/3.19.1/3.19.2)
|
[![age](https://developer.mend.io/api/mc/badges/age/npm/@nuxt%2ftest-utils/3.19.2?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/@nuxt%2ftest-utils/3.19.1/3.19.2?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| devDependencies | patch |
| [@rollup/rollup-linux-x64-gnu](https://rollupjs.org/)
([source](https://redirect.github.com/rollup/rollup)) | [`4.44.0` ->
`4.44.2`](https://renovatebot.com/diffs/npm/@rollup%2frollup-linux-x64-gnu/4.44.0/4.44.2)
|
[![age](https://developer.mend.io/api/mc/badges/age/npm/@rollup%2frollup-linux-x64-gnu/4.44.2?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/@rollup%2frollup-linux-x64-gnu/4.44.0/4.44.2?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| optionalDependencies | patch |
| [@swc/core](https://swc.rs)
([source](https://redirect.github.com/swc-project/swc)) | [`1.12.4` ->
`1.12.11`](https://renovatebot.com/diffs/npm/@swc%2fcore/1.12.4/1.12.11)
|
[![age](https://developer.mend.io/api/mc/badges/age/npm/@swc%2fcore/1.12.11?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/@swc%2fcore/1.12.4/1.12.11?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| devDependencies | patch |
|
[@types/bun](https://redirect.github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/bun)
([source](https://redirect.github.com/DefinitelyTyped/DefinitelyTyped/tree/HEAD/types/bun))
| [`1.2.16` ->
`1.2.18`](https://renovatebot.com/diffs/npm/@types%2fbun/1.2.16/1.2.18)
|
[![age](https://developer.mend.io/api/mc/badges/age/npm/@types%2fbun/1.2.18?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/@types%2fbun/1.2.16/1.2.18?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| devDependencies | patch |
|
[@types/dockerode](https://redirect.github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/dockerode)
([source](https://redirect.github.com/DefinitelyTyped/DefinitelyTyped/tree/HEAD/types/dockerode))
| [`3.3.41` ->
`3.3.42`](https://renovatebot.com/diffs/npm/@types%2fdockerode/3.3.41/3.3.42)
|
[![age](https://developer.mend.io/api/mc/badges/age/npm/@types%2fdockerode/3.3.42?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/@types%2fdockerode/3.3.41/3.3.42?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| devDependencies | patch |
|
[@types/lodash](https://redirect.github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/lodash)
([source](https://redirect.github.com/DefinitelyTyped/DefinitelyTyped/tree/HEAD/types/lodash))
| [`4.17.18` ->
`4.17.20`](https://renovatebot.com/diffs/npm/@types%2flodash/4.17.18/4.17.20)
|
[![age](https://developer.mend.io/api/mc/badges/age/npm/@types%2flodash/4.17.20?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/@types%2flodash/4.17.18/4.17.20?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| devDependencies | patch |
|
[@types/node](https://redirect.github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/node)
([source](https://redirect.github.com/DefinitelyTyped/DefinitelyTyped/tree/HEAD/types/node))
| [`22.15.32` ->
`22.16.3`](https://renovatebot.com/diffs/npm/@types%2fnode/22.15.32/22.16.3)
|
[![age](https://developer.mend.io/api/mc/badges/age/npm/@types%2fnode/22.16.3?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/@types%2fnode/22.15.32/22.16.3?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| devDependencies | minor |
|
[@typescript-eslint/eslint-plugin](https://typescript-eslint.io/packages/eslint-plugin)
([source](https://redirect.github.com/typescript-eslint/typescript-eslint/tree/HEAD/packages/eslint-plugin))
| [`8.34.1` ->
`8.36.0`](https://renovatebot.com/diffs/npm/@typescript-eslint%2feslint-plugin/8.34.1/8.36.0)
|
[![age](https://developer.mend.io/api/mc/badges/age/npm/@typescript-eslint%2feslint-plugin/8.36.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/@typescript-eslint%2feslint-plugin/8.34.1/8.36.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| devDependencies | minor |
|
[@vueuse/components](https://redirect.github.com/vueuse/vueuse/tree/main/packages/components#readme)
([source](https://redirect.github.com/vueuse/vueuse/tree/HEAD/packages/components))
| [`13.4.0` ->
`13.5.0`](https://renovatebot.com/diffs/npm/@vueuse%2fcomponents/13.4.0/13.5.0)
|
[![age](https://developer.mend.io/api/mc/badges/age/npm/@vueuse%2fcomponents/13.5.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/@vueuse%2fcomponents/13.4.0/13.5.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| dependencies | minor |
| [@vueuse/core](https://redirect.github.com/vueuse/vueuse)
([source](https://redirect.github.com/vueuse/vueuse/tree/HEAD/packages/core))
| [`13.4.0` ->
`13.5.0`](https://renovatebot.com/diffs/npm/@vueuse%2fcore/13.4.0/13.5.0)
|
[![age](https://developer.mend.io/api/mc/badges/age/npm/@vueuse%2fcore/13.5.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/@vueuse%2fcore/13.4.0/13.5.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| devDependencies | minor |
| [@vueuse/core](https://redirect.github.com/vueuse/vueuse)
([source](https://redirect.github.com/vueuse/vueuse/tree/HEAD/packages/core))
| [`13.4.0` ->
`13.5.0`](https://renovatebot.com/diffs/npm/@vueuse%2fcore/13.4.0/13.5.0)
|
[![age](https://developer.mend.io/api/mc/badges/age/npm/@vueuse%2fcore/13.5.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/@vueuse%2fcore/13.4.0/13.5.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| dependencies | minor |
|
[@vueuse/integrations](https://redirect.github.com/vueuse/vueuse/tree/main/packages/integrations#readme)
([source](https://redirect.github.com/vueuse/vueuse/tree/HEAD/packages/integrations))
| [`13.4.0` ->
`13.5.0`](https://renovatebot.com/diffs/npm/@vueuse%2fintegrations/13.4.0/13.5.0)
|
[![age](https://developer.mend.io/api/mc/badges/age/npm/@vueuse%2fintegrations/13.5.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/@vueuse%2fintegrations/13.4.0/13.5.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| dependencies | minor |
|
[@vueuse/nuxt](https://redirect.github.com/vueuse/vueuse/tree/main/packages/nuxt#readme)
([source](https://redirect.github.com/vueuse/vueuse/tree/HEAD/packages/nuxt))
| [`13.4.0` ->
`13.5.0`](https://renovatebot.com/diffs/npm/@vueuse%2fnuxt/13.4.0/13.5.0)
|
[![age](https://developer.mend.io/api/mc/badges/age/npm/@vueuse%2fnuxt/13.5.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/@vueuse%2fnuxt/13.4.0/13.5.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| devDependencies | minor |
|
[awalsh128/cache-apt-pkgs-action](https://redirect.github.com/awalsh128/cache-apt-pkgs-action)
| `v1.4.3` -> `v1.5.1` |
[![age](https://developer.mend.io/api/mc/badges/age/github-tags/awalsh128%2fcache-apt-pkgs-action/v1.5.1?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/github-tags/awalsh128%2fcache-apt-pkgs-action/v1.4.3/v1.5.1?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| action | minor |
| [cache-manager](https://redirect.github.com/jaredwray/cacheable)
([source](https://redirect.github.com/jaredwray/cacheable/tree/HEAD/packages/cache-manager))
| [`7.0.0` ->
`7.0.1`](https://renovatebot.com/diffs/npm/cache-manager/7.0.0/7.0.1) |
[![age](https://developer.mend.io/api/mc/badges/age/npm/cache-manager/7.0.1?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/cache-manager/7.0.0/7.0.1?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| dependencies | patch |
|
[commit-and-tag-version](https://redirect.github.com/absolute-version/commit-and-tag-version)
| [`9.5.0` ->
`9.6.0`](https://renovatebot.com/diffs/npm/commit-and-tag-version/9.5.0/9.6.0)
|
[![age](https://developer.mend.io/api/mc/badges/age/npm/commit-and-tag-version/9.6.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/commit-and-tag-version/9.5.0/9.6.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| devDependencies | minor |
|
[concurrently](https://redirect.github.com/open-cli-tools/concurrently)
| [`9.1.2` ->
`9.2.0`](https://renovatebot.com/diffs/npm/concurrently/9.1.2/9.2.0) |
[![age](https://developer.mend.io/api/mc/badges/age/npm/concurrently/9.2.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/concurrently/9.1.2/9.2.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| devDependencies | minor |
| [dotenv](https://redirect.github.com/motdotla/dotenv) | [`17.1.0` ->
`17.2.0`](https://renovatebot.com/diffs/npm/dotenv/17.1.0/17.2.0) |
[![age](https://developer.mend.io/api/mc/badges/age/npm/dotenv/17.2.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/dotenv/17.1.0/17.2.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| dependencies | minor |
| [eslint](https://eslint.org)
([source](https://redirect.github.com/eslint/eslint)) | [`9.29.0` ->
`9.30.1`](https://renovatebot.com/diffs/npm/eslint/9.29.0/9.30.1) |
[![age](https://developer.mend.io/api/mc/badges/age/npm/eslint/9.30.1?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/eslint/9.29.0/9.30.1?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| devDependencies | minor |
|
[eslint-plugin-import](https://redirect.github.com/import-js/eslint-plugin-import)
| [`2.31.0` ->
`2.32.0`](https://renovatebot.com/diffs/npm/eslint-plugin-import/2.31.0/2.32.0)
|
[![age](https://developer.mend.io/api/mc/badges/age/npm/eslint-plugin-import/2.32.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/eslint-plugin-import/2.31.0/2.32.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| devDependencies | minor |
|
[eslint-plugin-n](https://redirect.github.com/eslint-community/eslint-plugin-n)
| [`17.20.0` ->
`17.21.0`](https://renovatebot.com/diffs/npm/eslint-plugin-n/17.20.0/17.21.0)
|
[![age](https://developer.mend.io/api/mc/badges/age/npm/eslint-plugin-n/17.21.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/eslint-plugin-n/17.20.0/17.21.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| devDependencies | minor |
|
[eslint-plugin-prettier](https://redirect.github.com/prettier/eslint-plugin-prettier)
| [`5.5.0` ->
`5.5.1`](https://renovatebot.com/diffs/npm/eslint-plugin-prettier/5.5.0/5.5.1)
|
[![age](https://developer.mend.io/api/mc/badges/age/npm/eslint-plugin-prettier/5.5.1?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/eslint-plugin-prettier/5.5.0/5.5.1?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| devDependencies | patch |
| [eslint-plugin-vue](https://eslint.vuejs.org)
([source](https://redirect.github.com/vuejs/eslint-plugin-vue)) |
[`10.2.0` ->
`10.3.0`](https://renovatebot.com/diffs/npm/eslint-plugin-vue/10.2.0/10.3.0)
|
[![age](https://developer.mend.io/api/mc/badges/age/npm/eslint-plugin-vue/10.3.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/eslint-plugin-vue/10.2.0/10.3.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| devDependencies | minor |
| [fast-check](https://fast-check.dev/)
([source](https://redirect.github.com/dubzzz/fast-check/tree/HEAD/packages/fast-check))
| [`4.1.1` ->
`4.2.0`](https://renovatebot.com/diffs/npm/fast-check/4.1.1/4.2.0) |
[![age](https://developer.mend.io/api/mc/badges/age/npm/fast-check/4.2.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/fast-check/4.1.1/4.2.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| devDependencies | minor |
| [glob](https://redirect.github.com/isaacs/node-glob) | [`11.0.1` ->
`11.0.3`](https://renovatebot.com/diffs/npm/glob/11.0.1/11.0.3) |
[![age](https://developer.mend.io/api/mc/badges/age/npm/glob/11.0.3?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/glob/11.0.1/11.0.3?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| dependencies | patch |
| [happy-dom](https://redirect.github.com/capricorn86/happy-dom) |
[`18.0.0` ->
`18.0.1`](https://renovatebot.com/diffs/npm/happy-dom/18.0.0/18.0.1) |
[![age](https://developer.mend.io/api/mc/badges/age/npm/happy-dom/18.0.1?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/happy-dom/18.0.0/18.0.1?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| devDependencies | patch |
|
[inquirer](https://redirect.github.com/SBoudrias/Inquirer.js/blob/main/packages/inquirer/README.md)
([source](https://redirect.github.com/SBoudrias/Inquirer.js)) |
[`12.6.3` ->
`12.7.0`](https://renovatebot.com/diffs/npm/inquirer/12.6.3/12.7.0) |
[![age](https://developer.mend.io/api/mc/badges/age/npm/inquirer/12.7.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/inquirer/12.6.3/12.7.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| dependencies | minor |
|
[isomorphic-dompurify](https://redirect.github.com/kkomelin/isomorphic-dompurify)
| [`2.25.0` ->
`2.26.0`](https://renovatebot.com/diffs/npm/isomorphic-dompurify/2.25.0/2.26.0)
|
[![age](https://developer.mend.io/api/mc/badges/age/npm/isomorphic-dompurify/2.26.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/isomorphic-dompurify/2.25.0/2.26.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| dependencies | minor |
| [lucide-vue-next](https://lucide.dev)
([source](https://redirect.github.com/lucide-icons/lucide/tree/HEAD/packages/lucide-vue-next))
| [`0.519.0` ->
`0.525.0`](https://renovatebot.com/diffs/npm/lucide-vue-next/0.519.0/0.525.0)
|
[![age](https://developer.mend.io/api/mc/badges/age/npm/lucide-vue-next/0.525.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/lucide-vue-next/0.519.0/0.525.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| dependencies | minor |
|
[marked-base-url](https://redirect.github.com/markedjs/marked-base-url)
| [`1.1.6` ->
`1.1.7`](https://renovatebot.com/diffs/npm/marked-base-url/1.1.6/1.1.7)
|
[![age](https://developer.mend.io/api/mc/badges/age/npm/marked-base-url/1.1.7?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/marked-base-url/1.1.6/1.1.7?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| dependencies | patch |
| [node](https://nodejs.org)
([source](https://redirect.github.com/nodejs/node)) | `22.16.0` ->
`22.17.0` |
[![age](https://developer.mend.io/api/mc/badges/age/node-version/node/v22.17.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/node-version/node/v22.16.0/v22.17.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| | minor |
| [nuxt](https://nuxt.com)
([source](https://redirect.github.com/nuxt/nuxt/tree/HEAD/packages/nuxt))
| [`3.17.5` ->
`3.17.6`](https://renovatebot.com/diffs/npm/nuxt/3.17.5/3.17.6) |
[![age](https://developer.mend.io/api/mc/badges/age/npm/nuxt/3.17.6?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/nuxt/3.17.5/3.17.6?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| devDependencies | patch |
| [pnpm](https://pnpm.io)
([source](https://redirect.github.com/pnpm/pnpm/tree/HEAD/pnpm)) |
[`10.12.4` ->
`10.13.1`](https://renovatebot.com/diffs/npm/pnpm/10.12.4/10.13.1) |
[![age](https://developer.mend.io/api/mc/badges/age/npm/pnpm/10.13.1?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/pnpm/10.12.4/10.13.1?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| packageManager | minor |
| [pnpm](https://pnpm.io)
([source](https://redirect.github.com/pnpm/pnpm/tree/HEAD/pnpm)) |
[`10.12.4` ->
`10.13.1`](https://renovatebot.com/diffs/npm/pnpm/10.12.4/10.13.1) |
[![age](https://developer.mend.io/api/mc/badges/age/npm/pnpm/10.13.1?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/pnpm/10.12.4/10.13.1?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| engines | minor |
| [prettier](https://prettier.io)
([source](https://redirect.github.com/prettier/prettier)) | [`3.5.3` ->
`3.6.2`](https://renovatebot.com/diffs/npm/prettier/3.5.3/3.6.2) |
[![age](https://developer.mend.io/api/mc/badges/age/npm/prettier/3.6.2?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/prettier/3.5.3/3.6.2?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| devDependencies | minor |
|
[prettier-plugin-tailwindcss](https://redirect.github.com/tailwindlabs/prettier-plugin-tailwindcss)
| [`0.6.13` ->
`0.6.14`](https://renovatebot.com/diffs/npm/prettier-plugin-tailwindcss/0.6.13/0.6.14)
|
[![age](https://developer.mend.io/api/mc/badges/age/npm/prettier-plugin-tailwindcss/0.6.14?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/prettier-plugin-tailwindcss/0.6.13/0.6.14?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| devDependencies | patch |
| [reka-ui](https://redirect.github.com/unovue/reka-ui) | [`2.3.1` ->
`2.3.2`](https://renovatebot.com/diffs/npm/reka-ui/2.3.1/2.3.2) |
[![age](https://developer.mend.io/api/mc/badges/age/npm/reka-ui/2.3.2?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/reka-ui/2.3.1/2.3.2?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| dependencies | patch |
| [semver](https://redirect.github.com/npm/node-semver) | [`7.7.1` ->
`7.7.2`](https://renovatebot.com/diffs/npm/semver/7.7.1/7.7.2) |
[![age](https://developer.mend.io/api/mc/badges/age/npm/semver/7.7.2?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/semver/7.7.1/7.7.2?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| dependencies | patch |
| [systeminformation](https://systeminformation.io)
([source](https://redirect.github.com/sebhildebrandt/systeminformation))
| [`5.27.6` ->
`5.27.7`](https://renovatebot.com/diffs/npm/systeminformation/5.27.6/5.27.7)
|
[![age](https://developer.mend.io/api/mc/badges/age/npm/systeminformation/5.27.7?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/systeminformation/5.27.6/5.27.7?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| dependencies | patch |
| [tsx](https://tsx.is)
([source](https://redirect.github.com/privatenumber/tsx)) | [`4.19.3` ->
`4.20.3`](https://renovatebot.com/diffs/npm/tsx/4.19.3/4.20.3) |
[![age](https://developer.mend.io/api/mc/badges/age/npm/tsx/4.20.3?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/tsx/4.19.3/4.20.3?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| dependencies | minor |
|
[typescript-eslint](https://typescript-eslint.io/packages/typescript-eslint)
([source](https://redirect.github.com/typescript-eslint/typescript-eslint/tree/HEAD/packages/typescript-eslint))
| [`8.34.1` ->
`8.36.0`](https://renovatebot.com/diffs/npm/typescript-eslint/8.34.1/8.36.0)
|
[![age](https://developer.mend.io/api/mc/badges/age/npm/typescript-eslint/8.36.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/typescript-eslint/8.34.1/8.36.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| devDependencies | minor |
| [vite](https://vite.dev)
([source](https://redirect.github.com/vitejs/vite/tree/HEAD/packages/vite))
| [`7.0.3` ->
`7.0.4`](https://renovatebot.com/diffs/npm/vite/7.0.3/7.0.4) |
[![age](https://developer.mend.io/api/mc/badges/age/npm/vite/7.0.4?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/vite/7.0.3/7.0.4?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| devDependencies | patch |
| [vitest](https://redirect.github.com/vitest-dev/vitest)
([source](https://redirect.github.com/vitest-dev/vitest/tree/HEAD/packages/vitest))
| [`3.0.7` ->
`3.2.4`](https://renovatebot.com/diffs/npm/vitest/3.0.7/3.2.4) |
[![age](https://developer.mend.io/api/mc/badges/age/npm/vitest/3.2.4?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/vitest/3.0.7/3.2.4?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| devDependencies | minor |
|
[vue-i18n](https://redirect.github.com/intlify/vue-i18n/tree/master/packages/vue-i18n#readme)
([source](https://redirect.github.com/intlify/vue-i18n/tree/HEAD/packages/vue-i18n))
| [`11.1.6` ->
`11.1.9`](https://renovatebot.com/diffs/npm/vue-i18n/11.1.6/11.1.9) |
[![age](https://developer.mend.io/api/mc/badges/age/npm/vue-i18n/11.1.9?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/vue-i18n/11.1.6/11.1.9?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| dependencies | patch |
| [vue-sonner](https://redirect.github.com/xiaoluoboding/vue-sonner) |
[`1.3.0` ->
`1.3.2`](https://renovatebot.com/diffs/npm/vue-sonner/1.3.0/1.3.2) |
[![age](https://developer.mend.io/api/mc/badges/age/npm/vue-sonner/1.3.2?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/vue-sonner/1.3.0/1.3.2?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| dependencies | patch |
| [vuetify](https://vuetifyjs.com)
([source](https://redirect.github.com/vuetifyjs/vuetify/tree/HEAD/packages/vuetify))
| [`3.8.10` ->
`3.9.0`](https://renovatebot.com/diffs/npm/vuetify/3.8.10/3.9.0) |
[![age](https://developer.mend.io/api/mc/badges/age/npm/vuetify/3.9.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/vuetify/3.8.10/3.9.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| dependencies | minor |
| [wrangler](https://redirect.github.com/cloudflare/workers-sdk)
([source](https://redirect.github.com/cloudflare/workers-sdk/tree/HEAD/packages/wrangler))
| [`^3.87.0` ->
`^3.114.10`](https://renovatebot.com/diffs/npm/wrangler/3.114.10/3.114.11)
|
[![age](https://developer.mend.io/api/mc/badges/age/npm/wrangler/3.114.11?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/wrangler/3.114.10/3.114.11?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| devDependencies | patch |
| [ws](https://redirect.github.com/websockets/ws) | [`8.18.2` ->
`8.18.3`](https://renovatebot.com/diffs/npm/ws/8.18.2/8.18.3) |
[![age](https://developer.mend.io/api/mc/badges/age/npm/ws/8.18.3?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/ws/8.18.2/8.18.3?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| devDependencies | patch |
| [ws](https://redirect.github.com/websockets/ws) | [`8.18.2` ->
`8.18.3`](https://renovatebot.com/diffs/npm/ws/8.18.2/8.18.3) |
[![age](https://developer.mend.io/api/mc/badges/age/npm/ws/8.18.3?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/ws/8.18.2/8.18.3?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| dependencies | patch |
| [zod](https://zod.dev)
([source](https://redirect.github.com/colinhacks/zod)) | [`3.24.2` ->
`3.25.76`](https://renovatebot.com/diffs/npm/zod/3.24.2/3.25.76) |
[![age](https://developer.mend.io/api/mc/badges/age/npm/zod/3.25.76?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/zod/3.24.2/3.25.76?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| dependencies | minor |
| [zod](https://zod.dev)
([source](https://redirect.github.com/colinhacks/zod)) | [`3.25.67` ->
`3.25.76`](https://renovatebot.com/diffs/npm/zod/3.25.67/3.25.76) |
[![age](https://developer.mend.io/api/mc/badges/age/npm/zod/3.25.76?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/zod/3.25.67/3.25.76?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| dependencies | patch |
| [zx](https://google.github.io/zx/)
([source](https://redirect.github.com/google/zx)) | [`8.3.2` ->
`8.6.2`](https://renovatebot.com/diffs/npm/zx/8.3.2/8.6.2) |
[![age](https://developer.mend.io/api/mc/badges/age/npm/zx/8.6.2?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/zx/8.3.2/8.6.2?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| dependencies | minor |
| [zx](https://google.github.io/zx/)
([source](https://redirect.github.com/google/zx)) | [`8.5.5` ->
`8.6.2`](https://renovatebot.com/diffs/npm/zx/8.5.5/8.6.2) |
[![age](https://developer.mend.io/api/mc/badges/age/npm/zx/8.6.2?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/zx/8.5.5/8.6.2?slim=true)](https://docs.renovatebot.com/merge-confidence/)
| devDependencies | minor |

---

### Release Notes

<details>
<summary>eslint/eslint (@&#8203;eslint/js)</summary>

###
[`v9.30.1`](https://redirect.github.com/eslint/eslint/compare/v9.30.0...b3dbc16563cb7036d75edff9814e17053a645321)

[Compare
Source](https://redirect.github.com/eslint/eslint/compare/v9.30.0...v9.30.1)

###
[`v9.30.0`](https://redirect.github.com/eslint/eslint/compare/v9.29.0...5a5d5261037fdf84a91f2f22d3726d58572453f4)

[Compare
Source](https://redirect.github.com/eslint/eslint/compare/v9.29.0...v9.30.0)

</details>

<details>
<summary>faker-js/faker (@&#8203;faker-js/faker)</summary>

###
[`v9.9.0`](https://redirect.github.com/faker-js/faker/blob/HEAD/CHANGELOG.md#990-2025-07-01)

[Compare
Source](https://redirect.github.com/faker-js/faker/compare/v9.8.0...v9.9.0)

##### New Locales

- **locale:** add word data to pt\_br and pt\_pt locales
([#&#8203;3531](https://redirect.github.com/faker-js/faker/issues/3531))
([a405ac8](a405ac8740))

##### Features

- **location:** simple coordinate methods
([#&#8203;3528](https://redirect.github.com/faker-js/faker/issues/3528))
([d07d96d](d07d96d018))

</details>

<details>
<summary>floating-ui/floating-ui (@&#8203;floating-ui/dom)</summary>

###
[`v1.7.2`](https://redirect.github.com/floating-ui/floating-ui/blob/HEAD/packages/dom/CHANGELOG.md#172)

[Compare
Source](https://redirect.github.com/floating-ui/floating-ui/compare/@floating-ui/dom@1.7.1...@floating-ui/dom@1.7.2)

##### Patch Changes

- perf: reduce memory allocations
- Update dependencies: `@floating-ui/utils@0.2.10`,
`@floating-ui/core@1.7.2`

</details>

<details>
<summary>floating-ui/floating-ui (@&#8203;floating-ui/utils)</summary>

###
[`v0.2.10`](https://redirect.github.com/floating-ui/floating-ui/blob/HEAD/packages/utils/CHANGELOG.md#0210)

[Compare
Source](https://redirect.github.com/floating-ui/floating-ui/compare/@floating-ui/utils@0.2.9...@floating-ui/utils@0.2.10)

##### Patch Changes

- refactor: small performance improvements
- perf: reduce memory allocations

</details>

<details>
<summary>floating-ui/floating-ui (@&#8203;floating-ui/vue)</summary>

###
[`v1.1.7`](https://redirect.github.com/floating-ui/floating-ui/blob/HEAD/packages/vue/CHANGELOG.md#117)

[Compare
Source](https://redirect.github.com/floating-ui/floating-ui/compare/@floating-ui/vue@1.1.6...@floating-ui/vue@1.1.7)

##### Patch Changes

- Update dependencies: `@floating-ui/utils@0.2.10`,
`@floating-ui/dom@1.7.2`

</details>

<details>
<summary>dotansimha/graphql-code-generator
(@&#8203;graphql-codegen/client-preset)</summary>

###
[`v4.8.3`](https://redirect.github.com/dotansimha/graphql-code-generator/blob/HEAD/packages/presets/client/CHANGELOG.md#483)

[Compare
Source](https://redirect.github.com/dotansimha/graphql-code-generator/compare/@graphql-codegen/client-preset@4.8.2...@graphql-codegen/client-preset@4.8.3)

##### Patch Changes

-
[#&#8203;10362](https://redirect.github.com/dotansimha/graphql-code-generator/pull/10362)
[`3188b8c`](3188b8c39e)
Thanks [@&#8203;Brookke](https://redirect.github.com/Brookke)! - Make
generated type compatible with noImplicitOverride=true

-
[#&#8203;10373](https://redirect.github.com/dotansimha/graphql-code-generator/pull/10373)
[`c3295f9`](c3295f9c60)
Thanks [@&#8203;eddeee888](https://redirect.github.com/eddeee888)! - Fix
client preset not working with exactOptionalPropertyTypes=true when
documentMode=string

- Updated dependencies
\[[`3188b8c`](3188b8c39e),
[`c3295f9`](c3295f9c60)]:
-
[@&#8203;graphql-codegen/typed-document-node](https://redirect.github.com/graphql-codegen/typed-document-node)@&#8203;5.1.2

</details>

<details>
<summary>dotansimha/graphql-code-generator
(@&#8203;graphql-codegen/typed-document-node)</summary>

###
[`v5.1.2`](https://redirect.github.com/dotansimha/graphql-code-generator/blob/HEAD/packages/plugins/typescript/typed-document-node/CHANGELOG.md#512)

[Compare
Source](https://redirect.github.com/dotansimha/graphql-code-generator/compare/@graphql-codegen/typed-document-node@5.1.1...@graphql-codegen/typed-document-node@5.1.2)

##### Patch Changes

-
[#&#8203;10362](https://redirect.github.com/dotansimha/graphql-code-generator/pull/10362)
[`3188b8c`](3188b8c39e)
Thanks [@&#8203;Brookke](https://redirect.github.com/Brookke)! - Make
generated type compatible with noImplicitOverride=true

-
[#&#8203;10373](https://redirect.github.com/dotansimha/graphql-code-generator/pull/10373)
[`c3295f9`](c3295f9c60)
Thanks [@&#8203;eddeee888](https://redirect.github.com/eddeee888)! - Fix
client preset not working with exactOptionalPropertyTypes=true when
documentMode=string

</details>

<details>
<summary>ianvs/prettier-plugin-sort-imports
(@&#8203;ianvs/prettier-plugin-sort-imports)</summary>

###
[`v4.5.1`](https://redirect.github.com/ianvs/prettier-plugin-sort-imports/compare/v4.5.0...040fa5e3a7dd01a90d80bb12072344745e426da6)

[Compare
Source](https://redirect.github.com/ianvs/prettier-plugin-sort-imports/compare/v4.5.0...v4.5.1)

###
[`v4.5.0`](https://redirect.github.com/ianvs/prettier-plugin-sort-imports/compare/v4.4.2...3497e9a87974954e42198d04d69d9a2a24dbebbd)

[Compare
Source](https://redirect.github.com/ianvs/prettier-plugin-sort-imports/compare/v4.4.2...v4.5.0)

</details>

<details>
<summary>nuxt/devtools (@&#8203;nuxt/devtools)</summary>

###
[`v2.6.2`](https://redirect.github.com/nuxt/devtools/blob/HEAD/CHANGELOG.md#262-2025-07-02)

[Compare
Source](https://redirect.github.com/nuxt/devtools/compare/v2.6.1...v2.6.2)

##### Bug Fixes

- panel dragging issue, close
[#&#8203;874](https://redirect.github.com/nuxt/devtools/issues/874),
close
[#&#8203;871](https://redirect.github.com/nuxt/devtools/issues/871),
close
[#&#8203;873](https://redirect.github.com/nuxt/devtools/issues/873)
([619de37](619de37ace))

###
[`v2.6.1`](https://redirect.github.com/nuxt/devtools/blob/HEAD/CHANGELOG.md#261-2025-07-01)

[Compare
Source](https://redirect.github.com/nuxt/devtools/compare/v2.6.0...v2.6.1)

##### Bug Fixes

- **deps:** do not depend on `@nuxt/schema`
([#&#8203;872](https://redirect.github.com/nuxt/devtools/issues/872))
([62443ec](62443ecb12))

###
[`v2.6.0`](https://redirect.github.com/nuxt/devtools/blob/HEAD/CHANGELOG.md#260-2025-06-29)

[Compare
Source](https://redirect.github.com/nuxt/devtools/compare/v2.5.0...v2.6.0)

##### Bug Fixes

- timing labels wrapping
([#&#8203;866](https://redirect.github.com/nuxt/devtools/issues/866))
([fd01e60](fd01e6022a))

##### Features

- update deps
([eef2c09](eef2c09ea1))

</details>

<details>
<summary>nuxt/eslint (@&#8203;nuxt/eslint)</summary>

###
[`v1.5.2`](https://redirect.github.com/nuxt/eslint/releases/tag/v1.5.2)

[Compare
Source](https://redirect.github.com/nuxt/eslint/compare/v1.5.1...v1.5.2)

#####    🚀 Features

- Add option `features.import.plugin` to swap plugin implementation,
close [#&#8203;587](https://redirect.github.com/nuxt/eslint/issues/587)
 -  by [@&#8203;antfu](https://redirect.github.com/antfu) in
[https://github.com/nuxt/eslint/issues/587](https://redirect.github.com/nuxt/eslint/issues/587)
[<samp>(66f5e)</samp>](https://redirect.github.com/nuxt/eslint/commit/66f5ee0)

#####     [View changes on
GitHub](https://redirect.github.com/nuxt/eslint/compare/v1.5.1...v1.5.2)

###
[`v1.5.1`](https://redirect.github.com/nuxt/eslint/releases/tag/v1.5.1)

[Compare
Source](https://redirect.github.com/nuxt/eslint/compare/v1.5.0...v1.5.1)

#####    🐞 Bug Fixes

- **eslint-config**: Replace deprecated vue/object-property-newline
option  -  by [@&#8203;amery](https://redirect.github.com/amery) in
[https://github.com/nuxt/eslint/issues/586](https://redirect.github.com/nuxt/eslint/issues/586)
[<samp>(7805e)</samp>](https://redirect.github.com/nuxt/eslint/commit/7805e0d)

#####     [View changes on
GitHub](https://redirect.github.com/nuxt/eslint/compare/v1.5.0...v1.5.1)

###
[`v1.5.0`](https://redirect.github.com/nuxt/eslint/releases/tag/v1.5.0)

[Compare
Source](https://redirect.github.com/nuxt/eslint/compare/v1.4.1...v1.5.0)

#####    🚀 Features

- Switch to `eslint-plugin-import-lite`, update deps  -  by
[@&#8203;antfu](https://redirect.github.com/antfu)
[<samp>(31bd8)</samp>](https://redirect.github.com/nuxt/eslint/commit/31bd8a0)

#####    🐞 Bug Fixes

- **eslint-config**: Add file type restrictions to prevent CSS parsing
errors  -  by [@&#8203;amery](https://redirect.github.com/amery) in
[https://github.com/nuxt/eslint/issues/584](https://redirect.github.com/nuxt/eslint/issues/584)
[<samp>(40521)</samp>](https://redirect.github.com/nuxt/eslint/commit/40521a1)

#####     [View changes on
GitHub](https://redirect.github.com/nuxt/eslint/compare/v1.4.1...v1.5.0)

</details>

<details>
<summary>nuxt/test-utils (@&#8203;nuxt/test-utils)</summary>

###
[`v3.19.2`](https://redirect.github.com/nuxt/test-utils/releases/tag/v3.19.2)

[Compare
Source](https://redirect.github.com/nuxt/test-utils/compare/v3.19.1...v3.19.2)

> 3.19.2 is the next patch release.
>
> **Timetable**: 1 July

#### 👉 Changelog

[compare
changes](https://redirect.github.com/nuxt/test-utils/compare/v3.19.1...v3.19.2)

##### 🩹 Fixes

- **config:** Add missing mocks for vue-devtools
([#&#8203;1321](https://redirect.github.com/nuxt/test-utils/pull/1321))
- **runtime-utils:** Prevent event duplication
([#&#8203;1328](https://redirect.github.com/nuxt/test-utils/pull/1328))
- **config:** Include tests without `.nuxt.` extension
([#&#8203;1311](https://redirect.github.com/nuxt/test-utils/pull/1311))
- **deps:** Drop `@nuxt/schema` dependeny
([fa3a99b4](https://redirect.github.com/nuxt/test-utils/commit/fa3a99b4))
- **config:** Use 'projects' for `vitest` >= v3.2
([#&#8203;1344](https://redirect.github.com/nuxt/test-utils/pull/1344))
- **module:** Use user `vite` version to merge config
([#&#8203;1345](https://redirect.github.com/nuxt/test-utils/pull/1345))
- **runtime-utils:** Handle computed defined using an object
([#&#8203;1342](https://redirect.github.com/nuxt/test-utils/pull/1342))

##### 🏡 Chore

- Prefer `nuxt` over `nuxi`
([#&#8203;1310](https://redirect.github.com/nuxt/test-utils/pull/1310))
- Pin node types
([93921643](https://redirect.github.com/nuxt/test-utils/commit/93921643))
- Do not include dev-deps in `engines.node` calculation
([2f74359b](https://redirect.github.com/nuxt/test-utils/commit/2f74359b))
- Add type assertions for indexed access
([51b4a4e3](https://redirect.github.com/nuxt/test-utils/commit/51b4a4e3))
- Update installed-check flag
([2b97d885](https://redirect.github.com/nuxt/test-utils/commit/2b97d885))

#####  Tests

- Update stub name for nuxt v4
([e7b07843](https://redirect.github.com/nuxt/test-utils/commit/e7b07843))
- Satisfy typescript
([fb0dea24](https://redirect.github.com/nuxt/test-utils/commit/fb0dea24))
- Update cucumber test for nuxt v4 welcome screen template
([8ec7782f](https://redirect.github.com/nuxt/test-utils/commit/8ec7782f))
- Simplify test
([90278bac](https://redirect.github.com/nuxt/test-utils/commit/90278bac))
- Update workspace example
([02f9b0a0](https://redirect.github.com/nuxt/test-utils/commit/02f9b0a0))
- Make browser tests forward-compat with v4
([574ea5f9](https://redirect.github.com/nuxt/test-utils/commit/574ea5f9))

##### 🤖 CI

- Remove forced corepack installation
([bf19bd3a](https://redirect.github.com/nuxt/test-utils/commit/bf19bd3a))
- Run `knip`
([819aeacc](https://redirect.github.com/nuxt/test-utils/commit/819aeacc))
- Prepare environment before knipping
([ec7d8ddd](https://redirect.github.com/nuxt/test-utils/commit/ec7d8ddd))

##### ❤️ Contributors

- Daniel Roe
([@&#8203;danielroe](https://redirect.github.com/danielroe))
- Tomina ([@&#8203;Thomaash](https://redirect.github.com/Thomaash))
- lutejka ([@&#8203;lutejka](https://redirect.github.com/lutejka))
- J-Michalek
([@&#8203;J-Michalek](https://redirect.github.com/J-Michalek))

</details>

<details>
<summary>rollup/rollup (@&#8203;rollup/rollup-linux-x64-gnu)</summary>

###
[`v4.44.2`](https://redirect.github.com/rollup/rollup/blob/HEAD/CHANGELOG.md#4442)

[Compare
Source](https://redirect.github.com/rollup/rollup/compare/v4.44.1...v4.44.2)

*2025-07-04*

##### Bug Fixes

- Correctly handle `@__PURE__` annotations after `new` keyword
([#&#8203;5998](https://redirect.github.com/rollup/rollup/issues/5998))
- Generate correct source mapping for closing braces of block statements
([#&#8203;5999](https://redirect.github.com/rollup/rollup/issues/5999))

##### Pull Requests

- [#&#8203;5998](https://redirect.github.com/rollup/rollup/pull/5998):
Support `@__PURE__` when nested after new in constructor invocations
([@&#8203;TrickyPi](https://redirect.github.com/TrickyPi))
- [#&#8203;5999](https://redirect.github.com/rollup/rollup/pull/5999):
Add location info for closing brace of block statement
([@&#8203;TrickyPi](https://redirect.github.com/TrickyPi))
- [#&#8203;6002](https://redirect.github.com/rollup/rollup/pull/6002):
chore(deps): update dependency vite to v7
([@&#8203;renovate](https://redirect.github.com/renovate)\[bot],
[@&#8203;lukastaegert](https://redirect.github.com/lukastaegert))
- [#&#8203;6004](https://redirect.github.com/rollup/rollup/pull/6004):
fix(deps): lock file maintenance minor/patch updates
([@&#8203;renovate](https://redirect.github.com/renovate)\[bot],
[@&#8203;lukastaegert](https://redirect.github.com/lukastaegert))

###
[`v4.44.1`](https://redirect.github.com/rollup/rollup/blob/HEAD/CHANGELOG.md#4441)

[Compare
Source](https://redirect.github.com/rollup/rollup/compare/v4.44.0...v4.44.1)

*2025-06-26*

##### Bug Fixes

- Reinstate maxParallelFileOps limit of 1000 to resolve the issue for
some
([#&#8203;5992](https://redirect.github.com/rollup/rollup/issues/5992))

##### Pull Requests

- [#&#8203;5988](https://redirect.github.com/rollup/rollup/pull/5988):
fix(deps): lock file maintenance minor/patch updates
([@&#8203;renovate](https://redirect.github.com/renovate)\[bot],
[@&#8203;lukastaegert](https://redirect.github.com/lukastaegert))
- [#&#8203;5992](https://redirect.github.com/rollup/rollup/pull/5992):
Set maxParallelFileOps to 1000
([@&#8203;lukastaegert](https://redirect.github.com/lukastaegert))

</details>

<details>
<summary>swc-project/swc (@&#8203;swc/core)</summary>

###
[`v1.12.11`](https://redirect.github.com/swc-project/swc/blob/HEAD/CHANGELOG.md#11211---2025-07-08)

[Compare
Source](https://redirect.github.com/swc-project/swc/compare/v1.12.9...v1.12.11)

##### Bug Fixes

- **(ci)** Fix CI
([#&#8203;10790](https://redirect.github.com/swc-project/swc/issues/10790))
([b3f9760](b3f97604b8))

- **(es)** Use `default-features = false` for `swc` crate usages
([#&#8203;10776](https://redirect.github.com/swc-project/swc/issues/10776))
([50b2eac](50b2eacdf7))

- **(es)** Make `swc_typescript` optional
([#&#8203;10792](https://redirect.github.com/swc-project/swc/issues/10792))
([c32569d](c32569dd55))

- **(preset-env)** Fix `default` value for `caniuse`
([#&#8203;10754](https://redirect.github.com/swc-project/swc/issues/10754))
([aa4cd5b](aa4cd5ba7c))

- **(preset-env)** Revert `default` value
([#&#8203;10778](https://redirect.github.com/swc-project/swc/issues/10778))
([7af5824](7af58242c2))

##### Features

- **(es/minifeir)** Inline lazily initialized literals
([#&#8203;10752](https://redirect.github.com/swc-project/swc/issues/10752))
([fd5d2e2](fd5d2e2f33))

- **(es/minifier)** Evaluate `Number.XXX` constants
([#&#8203;10756](https://redirect.github.com/swc-project/swc/issues/10756))
([c47dab5](c47dab5f90))

- **(es/minifier)** Implement partial evaluation of array join
([#&#8203;10758](https://redirect.github.com/swc-project/swc/issues/10758))
([bdf3a98](bdf3a98bb4))

- **(swc\_core)** Expose `swc_ecma_parser/unstable`
([#&#8203;10744](https://redirect.github.com/swc-project/swc/issues/10744))
([db0679e](db0679e5ca))

##### Miscellaneous Tasks

- **(common)** Remove `clone()` in proc macro
([#&#8203;10762](https://redirect.github.com/swc-project/swc/issues/10762))
([12e3180](12e318036c))

- **(deps)** Update `browserslist-rs` to `0.19`
([#&#8203;10750](https://redirect.github.com/swc-project/swc/issues/10750))
([f8bf21c](f8bf21c072))

- **(deps)** Remove unused deps with cargo-shear
([#&#8203;10765](https://redirect.github.com/swc-project/swc/issues/10765))
([f4e4974](f4e4974ffe))

- **(es/module)** Drop `node` feature of `swc_ecma_loader`
([#&#8203;10761](https://redirect.github.com/swc-project/swc/issues/10761))
([44471b5](44471b5151))

- **(plugin/runner)** Remove unused feature and dependency
([#&#8203;10764](https://redirect.github.com/swc-project/swc/issues/10764))
([a7d8a0a](a7d8a0ac89))

##### Performance

- **(es/lexer)** Use `bitflags` for `Syntax`
([#&#8203;10676](https://redirect.github.com/swc-project/swc/issues/10676))
([bf8c722](bf8c722e25))

- **(es/lexer)** Do not scan number if there's no underscore
([#&#8203;10788](https://redirect.github.com/swc-project/swc/issues/10788))
([f5d92ee](f5d92ee1bf))

- **(es/lints)** Make rules not parallel
([#&#8203;10772](https://redirect.github.com/swc-project/swc/issues/10772))
([4e6001c](4e6001c5a4))

- **(es/lints)** Merge critical rules
([#&#8203;10773](https://redirect.github.com/swc-project/swc/issues/10773))
([816e75a](816e75a209))

- **(es/parser)** Reduce the number of context set ops
([#&#8203;10742](https://redirect.github.com/swc-project/swc/issues/10742))
([08b4e8b](08b4e8b285))

- **(es/parser)** Reduce value set operations for context
([#&#8203;10751](https://redirect.github.com/swc-project/swc/issues/10751))
([4976b12](4976b12f93))

- **(es/parser)** Reduce query ops of current token
([#&#8203;10766](https://redirect.github.com/swc-project/swc/issues/10766))
([4304f91](4304f9129c))

- **(es/parser)** Remove useless call in `parse_ident`
([#&#8203;10770](https://redirect.github.com/swc-project/swc/issues/10770))
([4ca12c9](4ca12c9725))

- **(es/renamer)** Reduce time complexity in case of conflict
([#&#8203;10749](https://redirect.github.com/swc-project/swc/issues/10749))
([0279914](02799141bf))

- **(hstr)** Do not compare string during creating atoms
([#&#8203;10791](https://redirect.github.com/swc-project/swc/issues/10791))
([43a4f11](43a4f117cb))

- Replace `rayon` with `par-iter`
([#&#8203;10774](https://redirect.github.com/swc-project/swc/issues/10774))
([a6e6ebe](a6e6ebeaca))

##### Refactor

- **(es)** Make `swc_ecma_lint` optional for `swc` crate
([#&#8203;10767](https://redirect.github.com/swc-project/swc/issues/10767))
([f80415b](f80415baa6))

- **(es/lexer)** Use const fn in `SyntaxFlags`
([#&#8203;10737](https://redirect.github.com/swc-project/swc/issues/10737))
([b9eb23a](b9eb23aec3))

- **(es/parser)** Cleanup `parse_setter_param`
([#&#8203;10745](https://redirect.github.com/swc-project/swc/issues/10745))
([70734f4](70734f40d4))

- **(es/parser)** Cleanup `typed-arena`
([#&#8203;10769](https://redirect.github.com/swc-project/swc/issues/10769))
([ce5138d](ce5138d3aa))

- **(es/parser)** Cleanup for ctx
([#&#8203;10777](https://redirect.github.com/swc-project/swc/issues/10777))
([d60a611](d60a611dc7))

- **(es/parser)** Delete `with_ctx`
([#&#8203;10779](https://redirect.github.com/swc-project/swc/issues/10779))
([ce057c5](ce057c55ef))

- **(es/parser)** Cleanup
([#&#8203;10781](https://redirect.github.com/swc-project/swc/issues/10781))
([176ce36](176ce36d24))

- **(es/preset)** Remove deprecated `preset_env` function and `feature`
module
([#&#8203;10759](https://redirect.github.com/swc-project/swc/issues/10759))
([fa0e0ab](fa0e0abf41))

- **(es/preset-env)** Use phf for corejs3 entry
([#&#8203;10712](https://redirect.github.com/swc-project/swc/issues/10712))
([658b26d](658b26d838))

##### Testing

- **(es/minifier)** Update the terser test list
([#&#8203;10748](https://redirect.github.com/swc-project/swc/issues/10748))
([1eace01](1eace01303))

- **(es/minifier)** Update the passing test list
([#&#8203;10782](https://redirect.github.com/swc-project/swc/issues/10782))
([8aa888b](8aa888bc2a))

- **(es/parser)** Add a test for duplicate labels
([#&#8203;10784](https://redirect.github.com/swc-project/swc/issues/10784))
([28fc643](28fc64310c))

##### Pref

- **(hstr)** Do not compare static tag
([#&#8203;10771](https://redirect.github.com/swc-project/swc/issues/10771))
([5d3ce83](5d3ce83add))

###
[`v1.12.9`](https://redirect.github.com/swc-project/swc/blob/HEAD/CHANGELOG.md#1129---2025-07-01)

[Compare
Source](https://redirect.github.com/swc-project/swc/compare/v1.12.7...v1.12.9)

##### Bug Fixes

- **(es/lexer)** Parse uppercase hex numbers correctly
([#&#8203;10728](https://redirect.github.com/swc-project/swc/issues/10728))
([ead6256](ead62560b0))

- **(es/lexer)** Allow keywords as jsx attribute names
([#&#8203;10730](https://redirect.github.com/swc-project/swc/issues/10730))
([04ef20a](https://redirect.github.com/swc-project/swc/commit/04ef20ad9b

</details>

---

### Configuration

📅 **Schedule**: Branch creation - At any time (no schedule defined),
Automerge - At any time (no schedule defined).

🚦 **Automerge**: Disabled by config. Please merge this manually once you
are satisfied.

♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the
rebase/retry checkbox.

👻 **Immortal**: This PR will be recreated if closed unmerged. Get
[config
help](https://redirect.github.com/renovatebot/renovate/discussions) if
that's undesired.

---

- [ ] <!-- rebase-check -->If you want to rebase/retry this PR, check
this box

---

This PR was generated by [Mend Renovate](https://mend.io/renovate/).
View the [repository job
log](https://developer.mend.io/github/unraid/api).

<!--renovate-debug:eyJjcmVhdGVkSW5WZXIiOiI0MS4yMy4yIiwidXBkYXRlZEluVmVyIjoiNDEuMjMuMiIsInRhcmdldEJyYW5jaCI6Im1haW4iLCJsYWJlbHMiOltdfQ==-->

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-07-11 14:40:42 -04:00
renovate[bot]
2ed1308e40 chore(deps): update dependency vite-plugin-vue-tracer to v1 (#1472)
This PR contains the following updates:

| Package | Change | Age | Confidence |
|---|---|---|---|
|
[vite-plugin-vue-tracer](https://redirect.github.com/antfu/vite-plugin-vue-tracer)
| [`0.1.4` ->
`1.0.0`](https://renovatebot.com/diffs/npm/vite-plugin-vue-tracer/0.1.4/1.0.0)
|
[![age](https://developer.mend.io/api/mc/badges/age/npm/vite-plugin-vue-tracer/1.0.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|
[![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/vite-plugin-vue-tracer/0.1.4/1.0.0?slim=true)](https://docs.renovatebot.com/merge-confidence/)
|

---

### Release Notes

<details>
<summary>antfu/vite-plugin-vue-tracer (vite-plugin-vue-tracer)</summary>

###
[`v1.0.0`](https://redirect.github.com/antfu/vite-plugin-vue-tracer/releases/tag/v1.0.0)

[Compare
Source](https://redirect.github.com/antfu/vite-plugin-vue-tracer/compare/v0.1.5...v1.0.0)

*No significant changes*

#####     [View changes on
GitHub](https://redirect.github.com/antfu/vite-plugin-vue-tracer/compare/v0.1.5...v1.0.0)

###
[`v0.1.5`](https://redirect.github.com/antfu/vite-plugin-vue-tracer/releases/tag/v0.1.5)

[Compare
Source](https://redirect.github.com/antfu/vite-plugin-vue-tracer/compare/v0.1.4...v0.1.5)

#####    🚀 Features

- Support Vite 7  -  by
[@&#8203;antfu](https://redirect.github.com/antfu)
[<samp>(6927e)</samp>](https://redirect.github.com/antfu/vite-plugin-vue-tracer/commit/6927e8a)

#####     [View changes on
GitHub](https://redirect.github.com/antfu/vite-plugin-vue-tracer/compare/v0.1.4...v0.1.5)

</details>

---

### Configuration

📅 **Schedule**: Branch creation - At any time (no schedule defined),
Automerge - At any time (no schedule defined).

🚦 **Automerge**: Disabled by config. Please merge this manually once you
are satisfied.

♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the
rebase/retry checkbox.

🔕 **Ignore**: Close this PR and you won't be reminded about this update
again.

---

- [ ] <!-- rebase-check -->If you want to rebase/retry this PR, check
this box

---

This PR was generated by [Mend Renovate](https://mend.io/renovate/).
View the [repository job
log](https://developer.mend.io/github/unraid/api).

<!--renovate-debug:eyJjcmVhdGVkSW5WZXIiOiI0MS4xNy4yIiwidXBkYXRlZEluVmVyIjoiNDEuMjMuMiIsInRhcmdldEJyYW5jaCI6Im1haW4iLCJsYWJlbHMiOltdfQ==-->

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-07-11 14:17:33 -04:00
Zack Spear
6c03df2b97 tests: server store trial extensions (#1504)
Requested in feature PR
https://github.com/unraid/api/pull/1490#issuecomment-3059002854

<!-- This is an auto-generated comment: release notes by coderabbit.ai
-->

## Summary by CodeRabbit

* **New Features**
* Enhanced trial expiration messaging to clearly communicate when the
trial is expiring, options for extension, and the consequences of
expiration.
* Added dynamic display of trial extension options and actions based on
eligibility and time remaining before expiration.

* **Bug Fixes**
* Improved accuracy of messages and actions related to trial extension
eligibility and renewal windows.

* **Tests**
* Added comprehensive tests for trial extension eligibility, renewal
windows, and related user messages and actions.

* **Documentation**
* Updated English locale strings to reflect new trial expiration and
extension messages.

<!-- end of auto-generated comment: release notes by coderabbit.ai -->
2025-07-11 14:17:09 -04:00
Pujit Mehrotra
074370c42c fix: over-eager cloud query from web components (#1506)
<!-- This is an auto-generated comment: release notes by coderabbit.ai
-->
## Summary by CodeRabbit

* **Bug Fixes**
* Improved initialization logic to ensure cloud state is only loaded
when the connect plugin is installed, enhancing reliability during
startup.
<!-- end of auto-generated comment: release notes by coderabbit.ai -->
2025-07-11 14:16:53 -04:00
Zack Spear
f34a33bc9f feat: trial extension allowed within 5 days of expiration (#1490) 2025-07-10 17:21:24 -07:00
github-actions[bot]
c7801a9236 chore(main): release 4.9.5 (#1503)
🤖 I have created a release *beep* *boop*
---


## [4.9.5](https://github.com/unraid/api/compare/v4.9.4...v4.9.5)
(2025-07-10)


### Bug Fixes

* **connect:** rm eager restart on `ERROR_RETYING` connection status
([#1502](https://github.com/unraid/api/issues/1502))
([dd759d9](dd759d9f0f))

---
This PR was generated with [Release
Please](https://github.com/googleapis/release-please). See
[documentation](https://github.com/googleapis/release-please#release-please).

Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
2025-07-10 10:27:45 -04:00
Pujit Mehrotra
dd759d9f0f fix(connect): rm eager restart on ERROR_RETYING connection status (#1502)
<!-- This is an auto-generated comment: release notes by coderabbit.ai
-->

## Summary by CodeRabbit

* **Bug Fixes**
* Improved connection handling to prevent unnecessary reconnection
attempts during error retry states, ensuring reconnections only occur on
specific failures.

* **Tests**
* Added comprehensive tests to verify connection recovery,
identity-based connection, logout behavior, DDoS prevention, and edge
case handling for connection state changes.

<!-- end of auto-generated comment: release notes by coderabbit.ai -->
2025-07-10 10:21:54 -04:00
github-actions[bot]
74da8d81ef chore(main): release 4.9.4 (#1498)
🤖 I have created a release *beep* *boop*
---


## [4.9.4](https://github.com/unraid/api/compare/v4.9.3...v4.9.4)
(2025-07-09)


### Bug Fixes

* backport `<unraid-modals>` upon plg install when necessary
([#1499](https://github.com/unraid/api/issues/1499))
([33e0b1a](33e0b1ab24))
* DefaultPageLayout patch rollback omits legacy header logo
([#1497](https://github.com/unraid/api/issues/1497))
([ea20d1e](ea20d1e211))
* event emitter setup for writing status
([#1496](https://github.com/unraid/api/issues/1496))
([ca4e2db](ca4e2db1f2))

---
This PR was generated with [Release
Please](https://github.com/googleapis/release-please). See
[documentation](https://github.com/googleapis/release-please#release-please).

Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
2025-07-09 13:40:56 -04:00
Pujit Mehrotra
33e0b1ab24 fix: backport <unraid-modals> upon plg install when necessary (#1499)
<!-- This is an auto-generated comment: release notes by coderabbit.ai
-->

## Summary by CodeRabbit

* **Bug Fixes**
* Prevented duplicate insertion of the modal component in the page
layout.

* **Chores**
* Improved installation script to ensure the modal component is added
only if missing.

<!-- end of auto-generated comment: release notes by coderabbit.ai -->
2025-07-09 13:32:55 -04:00
Eli Bosley
ca4e2db1f2 fix: event emitter setup for writing status (#1496)
<!-- This is an auto-generated comment: release notes by coderabbit.ai
-->
## Summary by CodeRabbit

* **Chores**
* Updated permissions to allow additional Bash command patterns in the
configuration.
* Improved connection status updates by triggering them via event
listeners during application bootstrap.
* Adjusted module provider registrations to reflect service relocation
within the application structure.
* **Tests**
* Added comprehensive unit and integration tests for connection status
writing and cleanup behaviors.
<!-- end of auto-generated comment: release notes by coderabbit.ai -->
2025-07-09 13:16:53 -04:00
Pujit Mehrotra
ea20d1e211 fix: DefaultPageLayout patch rollback omits legacy header logo (#1497)
<!-- This is an auto-generated comment: release notes by coderabbit.ai
-->

## Summary by CodeRabbit

* **New Features**
* Enhanced the header by displaying the OS version and additional server
information.
* Introduced a new notification system using a modern UI component for
toasts.
* Automatically creates a root session for local requests when no valid
session exists.

* **Bug Fixes**
* Removed outdated pop-up notification logic and bell icon from the
navigation area.

* **Style**
* Updated header layout and improved formatting for a cleaner
appearance.

<!-- end of auto-generated comment: release notes by coderabbit.ai -->
2025-07-09 13:12:18 -04:00
github-actions[bot]
79c57b8ed0 chore(main): release 4.9.3 (#1495)
🤖 I have created a release *beep* *boop*
---


## [4.9.3](https://github.com/unraid/api/compare/v4.9.2...v4.9.3)
(2025-07-09)


### Bug Fixes

* duplicated header logo after api stops
([#1493](https://github.com/unraid/api/issues/1493))
([4168f43](4168f43e3e))

---
This PR was generated with [Release
Please](https://github.com/googleapis/release-please). See
[documentation](https://github.com/googleapis/release-please#release-please).

Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
2025-07-09 11:06:47 -04:00
Pujit Mehrotra
4168f43e3e fix: duplicated header logo after api stops (#1493)
Move legacy header logo omission from API file modifier to plg install
step to avoid breaking rollback (ie upon `unraid-api stop`) due to web
component upgrade.

Tested on 7.1.4 & 7.2.0-beta.0.16

Testing & Reproduction procedure:

1. Install connect plugin
2. Run `unraid-api stop` on server
3. Refresh page. Before Unraid 7.2, Plugin versions prior to this will
display a duplicated logo that blocks the nav menu. Now, it will not.
Plugin uninstall behavior remains unchanged.

<!-- This is an auto-generated comment: release notes by coderabbit.ai
-->

## Summary by CodeRabbit

* **New Features**
* The plugin installation process now updates the header logo by
removing the old logo from the interface during installation.

<!-- end of auto-generated comment: release notes by coderabbit.ai -->
2025-07-09 10:59:10 -04:00
github-actions[bot]
20de3ec8d6 chore(main): release 4.9.2 (#1488)
🤖 I have created a release *beep* *boop*
---


## [4.9.2](https://github.com/unraid/api/compare/v4.9.1...v4.9.2)
(2025-07-09)


### Bug Fixes

* invalid configs no longer crash API
([#1491](https://github.com/unraid/api/issues/1491))
([6bf3f77](6bf3f77638))
* invalid state for unraid plugin
([#1492](https://github.com/unraid/api/issues/1492))
([39b8f45](39b8f453da))
* release note escaping
([5b6bcb6](5b6bcb6043))

---
This PR was generated with [Release
Please](https://github.com/googleapis/release-please). See
[documentation](https://github.com/googleapis/release-please#release-please).

Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
2025-07-09 09:23:25 -04:00
Eli Bosley
39b8f453da fix: invalid state for unraid plugin (#1492)
<!-- This is an auto-generated comment: release notes by coderabbit.ai
-->
## Summary by CodeRabbit

* **New Features**
* Improved connection status handling by introducing a new service that
writes connection status to a JSON file for enhanced integration.
* Updated system components to read connection status and allowed
origins from the new JSON file, ensuring more reliable and up-to-date
information.

* **Chores**
* Expanded allowed Bash command permissions to include commands starting
with "mv:".
<!-- end of auto-generated comment: release notes by coderabbit.ai -->
2025-07-09 09:21:43 -04:00
Eli Bosley
6bf3f77638 fix: invalid configs no longer crash API (#1491)
<!-- This is an auto-generated comment: release notes by coderabbit.ai
-->
## Summary by CodeRabbit

* **Bug Fixes**
* Improved error handling when loading and parsing configuration files,
preventing crashes and ensuring fallback to default settings if issues
occur.
* Enhanced logging for configuration errors, including warnings for
empty files and detailed error messages for JSON parsing failures.
* Added error handling to plugin listing to avoid failures when
configuration loading encounters errors.

* **Chores**
* Updated permissions to allow linting only for the `./api` package
using a filtered command.

* **Tests**
* Added comprehensive tests for configuration loading, parsing,
persistence, and updating, covering various file states and error
scenarios.
<!-- end of auto-generated comment: release notes by coderabbit.ai -->
2025-07-09 08:50:43 -04:00
Eli Bosley
a79d049865 chore: lint for renovate PR (#1481) 2025-07-08 17:01:49 -04:00
Eli Bosley
5b6bcb6043 fix: release note escaping 2025-07-08 16:41:05 -04:00
github-actions[bot]
6ee3cae962 chore(main): release 4.9.1 (#1487)
🤖 I have created a release *beep* *boop*
---


## [4.9.1](https://github.com/unraid/api/compare/v4.9.0...v4.9.1)
(2025-07-08)


### Bug Fixes

* **HeaderOsVersion:** adjust top margin for header component
([#1485](https://github.com/unraid/api/issues/1485))
([862b54d](862b54de8c))
* sign out doesn't work
([#1486](https://github.com/unraid/api/issues/1486))
([f3671c3](f3671c3e07))

---
This PR was generated with [Release
Please](https://github.com/googleapis/release-please). See
[documentation](https://github.com/googleapis/release-please#release-please).

Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
2025-07-08 16:30:00 -04:00
Eli Bosley
f3671c3e07 fix: sign out doesn't work (#1486)
<!-- This is an auto-generated comment: release notes by coderabbit.ai
-->
## Summary by CodeRabbit

* **Refactor**
* Improved handling of Connect account sign-in and sign-out with
persistent mutation instances for better status updates and error
reporting.

* **Chores**
* Expanded allowed command patterns in configuration for development,
build, and testing tasks.

* **Tests**
* Enhanced mutation mocks in component tests to increase test
reliability.
<!-- end of auto-generated comment: release notes by coderabbit.ai -->
2025-07-08 16:28:38 -04:00
Zack Spear
862b54de8c fix(HeaderOsVersion): adjust top margin for header component (#1485)
<!-- This is an auto-generated comment: release notes by coderabbit.ai
-->

## Summary by CodeRabbit

* **Style**
  * Increased top margin in the OS version header for improved spacing.
* Updated user profile container to use padding instead of margin for
better layout consistency.

<!-- end of auto-generated comment: release notes by coderabbit.ai -->
2025-07-08 16:11:02 -04:00
github-actions[bot]
9624ca5c39 chore(main): release 4.9.0 (#1378)
🤖 I have created a release *beep* *boop*
---


## [4.9.0](https://github.com/unraid/api/compare/v4.8.0...v4.9.0)
(2025-07-08)


### Features

* add graphql resource for API plugins
([#1420](https://github.com/unraid/api/issues/1420))
([642a220](642a220c3a))
* add management page for API keys
([#1408](https://github.com/unraid/api/issues/1408))
([0788756](0788756b91))
* add rclone ([#1362](https://github.com/unraid/api/issues/1362))
([5517e75](5517e7506b))
* API key management
([#1407](https://github.com/unraid/api/issues/1407))
([d37dc3b](d37dc3bce2))
* api plugin management via CLI
([#1416](https://github.com/unraid/api/issues/1416))
([3dcbfbe](3dcbfbe489))
* build out docker components
([#1427](https://github.com/unraid/api/issues/1427))
([711cc9a](711cc9ac92))
* docker and info resolver issues
([#1423](https://github.com/unraid/api/issues/1423))
([9901039](9901039a38))
* fix shading in UPC to be less severe
([#1438](https://github.com/unraid/api/issues/1438))
([b7c2407](b7c2407840))
* info resolver cleanup
([#1425](https://github.com/unraid/api/issues/1425))
([1b279bb](1b279bbab3))
* initial codeql setup
([#1390](https://github.com/unraid/api/issues/1390))
([2ade7eb](2ade7eb527))
* initialize claude code in codebse
([#1418](https://github.com/unraid/api/issues/1418))
([b6c4ee6](b6c4ee6eb4))
* move api key fetching to use api key service
([#1439](https://github.com/unraid/api/issues/1439))
([86bea56](86bea56272))
* move to cron v4 ([#1428](https://github.com/unraid/api/issues/1428))
([b8035c2](b8035c207a))
* move to iframe for changelog
([#1388](https://github.com/unraid/api/issues/1388))
([fcd6fbc](fcd6fbcdd4))
* native slackware package
([#1381](https://github.com/unraid/api/issues/1381))
([4f63b4c](4f63b4cf3b))
* send active unraid theme to docs
([#1400](https://github.com/unraid/api/issues/1400))
([f71943b](f71943b62b))
* slightly better watch mode
([#1398](https://github.com/unraid/api/issues/1398))
([881f1e0](881f1e0960))
* upgrade nuxt-custom-elements
([#1461](https://github.com/unraid/api/issues/1461))
([345e83b](345e83bfb0))
* use bigint instead of long
([#1403](https://github.com/unraid/api/issues/1403))
([574d572](574d572d65))


### Bug Fixes

* activation indicator removed
([5edfd82](5edfd823b8))
* alignment of settings on ManagementAccess settings page
([#1421](https://github.com/unraid/api/issues/1421))
([70c790f](70c790ff89))
* allow rclone to fail to initialize
([#1453](https://github.com/unraid/api/issues/1453))
([7c6f02a](7c6f02a5cb))
* always download 7.1 versioned files for patching
([edc0d15](edc0d1578b))
* api `pnpm type-check`
([#1442](https://github.com/unraid/api/issues/1442))
([3122bdb](3122bdb953))
* **api:** connect config `email` validation
([#1454](https://github.com/unraid/api/issues/1454))
([b9a1b9b](b9a1b9b087))
* backport
unraid/webgui[#2269](https://github.com/unraid/api/issues/2269) rc.nginx
update ([#1436](https://github.com/unraid/api/issues/1436))
([a7ef06e](a7ef06ea25))
* bigint
([e54d27a](e54d27aede))
* config migration from `myservers.cfg`
([#1440](https://github.com/unraid/api/issues/1440))
([c4c9984](c4c99843c7))
* **connect:** fatal race-condition in websocket disposal
([#1462](https://github.com/unraid/api/issues/1462))
([0ec0de9](0ec0de982f))
* **connect:** mothership connection
([#1464](https://github.com/unraid/api/issues/1464))
([7be8bc8](7be8bc84d3))
* console hidden
([9b85e00](9b85e009b8))
* debounce is too long
([#1426](https://github.com/unraid/api/issues/1426))
([f12d231](f12d231e63))
* delete legacy connect keys and ensure description
([22fe91c](22fe91cd56))
* **deps:** pin dependencies
([#1465](https://github.com/unraid/api/issues/1465))
([ba75a40](ba75a409a4))
* **deps:** pin dependencies
([#1470](https://github.com/unraid/api/issues/1470))
([412b329](412b32996d))
* **deps:** storybook v9
([#1476](https://github.com/unraid/api/issues/1476))
([45bb49b](45bb49bcd6))
* **deps:** update all non-major dependencies
([#1366](https://github.com/unraid/api/issues/1366))
([291ee47](291ee475fb))
* **deps:** update all non-major dependencies
([#1379](https://github.com/unraid/api/issues/1379))
([8f70326](8f70326d0f))
* **deps:** update all non-major dependencies
([#1389](https://github.com/unraid/api/issues/1389))
([cb43f95](cb43f95233))
* **deps:** update all non-major dependencies
([#1399](https://github.com/unraid/api/issues/1399))
([68df344](68df344a4b))
* **deps:** update dependency @types/diff to v8
([#1393](https://github.com/unraid/api/issues/1393))
([00da27d](00da27d04f))
* **deps:** update dependency cache-manager to v7
([#1413](https://github.com/unraid/api/issues/1413))
([9492c2a](9492c2ae6a))
* **deps:** update dependency commander to v14
([#1394](https://github.com/unraid/api/issues/1394))
([106ea09](106ea09399))
* **deps:** update dependency diff to v8
([#1386](https://github.com/unraid/api/issues/1386))
([e580f64](e580f646a5))
* **deps:** update dependency dotenv to v17
([#1474](https://github.com/unraid/api/issues/1474))
([d613bfa](d613bfa041))
* **deps:** update dependency lucide-vue-next to ^0.509.0
([#1383](https://github.com/unraid/api/issues/1383))
([469333a](469333acd4))
* **deps:** update dependency marked to v16
([#1444](https://github.com/unraid/api/issues/1444))
([453a5b2](453a5b2c95))
* **deps:** update dependency shadcn-vue to v2
([#1302](https://github.com/unraid/api/issues/1302))
([26ecf77](26ecf779e6))
* **deps:** update dependency vue-sonner to v2
([#1401](https://github.com/unraid/api/issues/1401))
([53ca414](53ca41404f))
* disable file changes on Unraid 7.2
([#1382](https://github.com/unraid/api/issues/1382))
([02de89d](02de89d130))
* do not start API with doinst.sh
([7d88b33](7d88b3393c))
* do not uninstall fully on 7.2
([#1484](https://github.com/unraid/api/issues/1484))
([2263881](22638811a9))
* drop console with terser
([a87d455](a87d455bac))
* error logs from `cloud` query when connect is not installed
([#1450](https://github.com/unraid/api/issues/1450))
([719f460](719f460016))
* flash backup integration with Unraid Connect config
([#1448](https://github.com/unraid/api/issues/1448))
([038c582](038c582aed))
* header padding regression
([#1477](https://github.com/unraid/api/issues/1477))
([e791cc6](e791cc680d))
* incorrect state merging in redux store
([#1437](https://github.com/unraid/api/issues/1437))
([17b7428](17b7428779))
* lanip copy button not present
([#1459](https://github.com/unraid/api/issues/1459))
([a280786](a2807864ac))
* move to bigint scalar
([b625227](b625227913))
* node_modules dir removed on plugin update
([#1406](https://github.com/unraid/api/issues/1406))
([7b005cb](7b005cbbf6))
* omit Connect actions in UPC when plugin is not installed
([#1417](https://github.com/unraid/api/issues/1417))
([8c8a527](8c8a5276b4))
* parsing of `ssoEnabled` in state.php
([#1455](https://github.com/unraid/api/issues/1455))
([f542c8e](f542c8e0bd))
* pin ranges ([#1460](https://github.com/unraid/api/issues/1460))
([f88400e](f88400eea8))
* pr plugin promotion workflow
([#1456](https://github.com/unraid/api/issues/1456))
([13bd9bb](13bd9bb567))
* proper fallback if missing paths config modules
([7067e9e](7067e9e3dd))
* rc.unraid-api now cleans up older dependencies
([#1404](https://github.com/unraid/api/issues/1404))
([83076bb](83076bb940))
* remote access lifecycle during boot & shutdown
([#1422](https://github.com/unraid/api/issues/1422))
([7bc583b](7bc583b186))
* sign out correctly on error
([#1452](https://github.com/unraid/api/issues/1452))
([d08fc94](d08fc94afb))
* simplify usb listing
([#1402](https://github.com/unraid/api/issues/1402))
([5355115](5355115af2))
* theme issues when sent from graph
([#1424](https://github.com/unraid/api/issues/1424))
([75ad838](75ad8381bd))
* **ui:** notifications positioning regression
([#1445](https://github.com/unraid/api/issues/1445))
([f73e5e0](f73e5e0058))
* use some instead of every for connect detection
([9ce2fee](9ce2fee380))


### Reverts

* revert package.json dependency updates from commit 711cc9a for api and
packages/*
([94420e4](94420e4d45))

---
This PR was generated with [Release
Please](https://github.com/googleapis/release-please). See
[documentation](https://github.com/googleapis/release-please#release-please).

Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
2025-07-08 15:45:49 -04:00
Eli Bosley
22638811a9 fix: do not uninstall fully on 7.2 (#1484)
<!-- This is an auto-generated comment: release notes by coderabbit.ai
-->
## Summary by CodeRabbit

* **New Features**
* Uninstallation process now adapts based on Unraid version: for version
7.2 or higher, users receive a notification and are prompted to reboot
to complete plugin removal.
* **Bug Fixes**
* Improved handling of plugin removal to ensure compatibility with
different Unraid versions and prevent unintended reinstalls.
<!-- end of auto-generated comment: release notes by coderabbit.ai -->
2025-07-08 15:44:30 -04:00
Eli Bosley
5edfd823b8 fix: activation indicator removed 2025-07-08 15:42:41 -04:00
Eli Bosley
87fc83645f fix: monitor jobs 2025-05-27 11:25:21 -04:00
Eli Bosley
f126c9568a feat: more progress with new backup schema 2025-05-27 08:00:42 -04:00
Eli Bosley
c273a3b7e7 feat: remove stream job manager in favor of using stdin 2025-05-26 22:33:56 -04:00
Eli Bosley
92f3d6956e feat: refactor preprocessors to source 2025-05-26 20:15:13 -04:00
Eli Bosley
90ed4b9de3 feat: cleanup types 2025-05-26 19:38:24 -04:00
Eli Bosley
015c6e527b feat: substantial type cleanup 2025-05-26 19:22:12 -04:00
Eli Bosley
5fcb8da50b feat: progress on flash backup 2025-05-26 16:03:00 -04:00
Eli Bosley
5b0862dd98 feat: substantial code cleanup 2025-05-24 22:14:17 -04:00
Eli Bosley
8da7c6e586 feat: backups working 2025-05-24 19:56:53 -04:00
Eli Bosley
333093a20d feat: setup initial backup stats 2025-05-24 09:56:10 -04:00
Eli Bosley
69359902cb chore: begin setting up new views and mutations for flash backup 2025-05-24 07:42:39 -04:00
Eli Bosley
8befa23b4d fix: entropy on crypt creation 2025-05-23 20:55:41 -04:00
Eli Bosley
f0c26b777f chore: remove unused comments 2025-05-23 20:54:48 -04:00
Eli Bosley
f29d4f5318 fix: colors 2025-05-23 20:48:07 -04:00
Eli Bosley
7f9f4c68ac fix: simplify api service test 2025-05-23 20:44:30 -04:00
Eli Bosley
cebca3d6bf fix: combobox commit on close 2025-05-23 20:38:45 -04:00
Eli Bosley
25f57f90aa fix: comment cleanup 2025-05-23 20:36:22 -04:00
Eli Bosley
50b80b9c07 fix: format 2025-05-23 20:32:25 -04:00
Eli Bosley
69b8eb9060 fix: sanitize params 2025-05-23 20:32:09 -04:00
Eli Bosley
d83d36c355 fix: logging api version as well 2025-05-23 20:29:57 -04:00
Eli Bosley
7c26b01be6 fix: some redaction and fix constructor 2025-05-23 20:29:57 -04:00
Eli Bosley
1d3800c164 fix: use DTOs for rclone api service 2025-05-23 20:29:57 -04:00
Eli Bosley
9d4249950d fix: cleanup types in spec 2025-05-23 20:29:57 -04:00
Eli Bosley
5e9d09e75c fix: review feedback 2025-05-23 20:29:57 -04:00
Eli Bosley
64c71459be fix: invalid tester 2025-05-23 20:29:57 -04:00
Eli Bosley
8f8352090c fix: rclone pretry and unnecessary escapes 2025-05-23 20:29:57 -04:00
Eli Bosley
744f34fc7b fix: rclone username and password removed 2025-05-23 20:29:57 -04:00
Eli Bosley
3ffde0272c fix: remove rclone webUI 2025-05-23 20:29:57 -04:00
Eli Bosley
a5c7b9fdd3 Update unraid-ui/src/forms/renderers.ts
Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com>
2025-05-23 20:29:57 -04:00
Eli Bosley
db9b8c12b9 feat: rclone installed into package 2025-05-23 20:29:57 -04:00
Eli Bosley
93d9530628 feat: add crypt one-click setup for remotes 2025-05-23 20:29:57 -04:00
Eli Bosley
af5ffec13d chore: refactor form label helper 2025-05-23 20:28:50 -04:00
Eli Bosley
54b0bc0837 chore: lint 2025-05-23 20:28:50 -04:00
Eli Bosley
7be58908f0 chore: unit test fixes 2025-05-23 20:28:50 -04:00
Eli Bosley
00b1c1b0c7 chore: don't disable apollo in dev mode 2025-05-23 20:28:50 -04:00
Eli Bosley
d3adbafbff chore: lint 2025-05-23 20:28:50 -04:00
Eli Bosley
dada8e63c5 chore: error wrapper generic 2025-05-23 20:28:50 -04:00
Eli Bosley
f5e4607f70 chore: more cleanup 2025-05-23 20:28:50 -04:00
Eli Bosley
68139cda2b chore: fix combobox 2025-05-23 20:28:50 -04:00
Eli Bosley
bf3b95bfe5 feat: fix storybook config 2025-05-23 20:28:50 -04:00
Eli Bosley
35a6d14367 fix: storybook 2025-05-23 20:28:50 -04:00
Eli Bosley
0be56f148d chore: renderer fixes 2025-05-23 20:28:11 -04:00
Eli Bosley
4c9e0044e5 chore: more progress on generative ui 2025-05-23 20:28:11 -04:00
Eli Bosley
242697c8d8 feat: ui working for RClone setup 2025-05-23 20:27:56 -04:00
Eli Bosley
f93c850b95 chore: rclone config almost complete 2025-05-23 20:27:56 -04:00
Eli Bosley
8df0ca58b5 chore: progress on rclone 2025-05-23 20:27:56 -04:00
Eli Bosley
d31d86dc7d feat: add rclone 2025-05-23 20:27:11 -04:00
145 changed files with 17033 additions and 2114 deletions

View File

@@ -2,7 +2,29 @@
"permissions": {
"allow": [
"Bash(rg:*)",
"Bash(find:*)"
"Bash(find:*)",
"Bash(pnpm codegen:*)",
"Bash(pnpm dev:*)",
"Bash(pnpm build:*)",
"Bash(pnpm test:*)",
"Bash(grep:*)",
"Bash(pnpm type-check:*)",
"Bash(pnpm lint:*)",
"Bash(pnpm --filter ./api lint)",
"Bash(mv:*)",
"Bash(ls:*)",
"mcp__ide__getDiagnostics",
"Bash(pnpm --filter \"*connect*\" test connect-status-writer.service.spec)",
"Bash(pnpm add:*)",
"Bash(npx tsc:*)",
"Bash(pnpm list:*)",
"Bash(rm:*)",
"Bash(pnpm --filter ./api test)",
"Bash(pnpm i:*)",
"Bash(pnpm:*)",
"Bash(corepack prepare:*)",
"Bash(nvm:*)",
"Bash(git config:*)"
]
},
"enableAllProjectMcpServers": false

View File

@@ -25,7 +25,7 @@ jobs:
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '20'
node-version: '22'
- uses: pnpm/action-setup@v4
name: Install pnpm
@@ -33,7 +33,7 @@ jobs:
run_install: false
- name: Cache APT Packages
uses: awalsh128/cache-apt-pkgs-action@v1.4.3
uses: awalsh128/cache-apt-pkgs-action@v1.5.1
with:
packages: bash procps python3 libvirt-dev jq zstd git build-essential libvirt-daemon-system
version: 1.0

View File

@@ -45,7 +45,7 @@ jobs:
node-version-file: ".nvmrc"
- name: Cache APT Packages
uses: awalsh128/cache-apt-pkgs-action@v1.4.3
uses: awalsh128/cache-apt-pkgs-action@v1.5.1
with:
packages: bash procps python3 libvirt-dev jq zstd git build-essential libvirt-daemon-system
version: 1.0
@@ -190,7 +190,7 @@ jobs:
${{ runner.os }}-pnpm-store-
- name: Cache APT Packages
uses: awalsh128/cache-apt-pkgs-action@v1.4.3
uses: awalsh128/cache-apt-pkgs-action@v1.5.1
with:
packages: bash procps python3 libvirt-dev jq zstd git build-essential
version: 1.0
@@ -267,7 +267,7 @@ jobs:
${{ runner.os }}-pnpm-store-
- name: Cache APT Packages
uses: awalsh128/cache-apt-pkgs-action@v1.4.3
uses: awalsh128/cache-apt-pkgs-action@v1.5.1
with:
packages: bash procps python3 libvirt-dev jq zstd git build-essential
version: 1.0

View File

@@ -32,7 +32,9 @@ jobs:
with:
node-version: '22.17.0'
- run: |
echo '${{ steps.release-info.outputs.body }}' >> release-notes.txt
cat << 'EOF' > release-notes.txt
${{ steps.release-info.outputs.body }}
EOF
- run: npm install html-escaper@2 xml2js
- name: Update Plugin Changelog
uses: actions/github-script@v7

View File

@@ -31,7 +31,7 @@ jobs:
python-version: "3.13.5"
- name: Cache APT Packages
uses: awalsh128/cache-apt-pkgs-action@v1.4.3
uses: awalsh128/cache-apt-pkgs-action@v1.5.1
with:
packages: libvirt-dev
version: 1.0

2
.nvmrc
View File

@@ -1 +1 @@
22.16.0
22.17.0

View File

@@ -1 +1 @@
{".":"4.8.0"}
{".":"4.9.5"}

View File

@@ -15,6 +15,7 @@ PATHS_ACTIVATION_BASE=./dev/activation
PATHS_PASSWD=./dev/passwd
PATHS_RCLONE_SOCKET=./dev/rclone-socket
PATHS_LOG_BASE=./dev/log # Where we store logs
PATHS_BACKUP_JOBS=./dev/api/backup
ENVIRONMENT="development"
NODE_ENV="development"
PORT="3001"
@@ -26,4 +27,4 @@ BYPASS_PERMISSION_CHECKS=false
BYPASS_CORS_CHECKS=true
CHOKIDAR_USEPOLLING=true
LOG_TRANSPORT=console
LOG_LEVEL=trace
LOG_LEVEL=debug # Change to trace for extremely noisy logging

View File

@@ -53,5 +53,5 @@ export default tseslint.config(eslint.configs.recommended, ...tseslint.configs.r
'eol-last': ['error', 'always'],
},
ignores: ['src/graphql/generated/client/**/*'],
ignores: ['src/graphql/generated/client/**/*', 'scripts/**/*'],
});

View File

@@ -1,5 +1,132 @@
# Changelog
## [4.9.5](https://github.com/unraid/api/compare/v4.9.4...v4.9.5) (2025-07-10)
### Bug Fixes
* **connect:** rm eager restart on `ERROR_RETYING` connection status ([#1502](https://github.com/unraid/api/issues/1502)) ([dd759d9](https://github.com/unraid/api/commit/dd759d9f0f841b296f8083bc67c6cd3f7a69aa5b))
## [4.9.4](https://github.com/unraid/api/compare/v4.9.3...v4.9.4) (2025-07-09)
### Bug Fixes
* backport `<unraid-modals>` upon plg install when necessary ([#1499](https://github.com/unraid/api/issues/1499)) ([33e0b1a](https://github.com/unraid/api/commit/33e0b1ab24bedb6a2c7b376ea73dbe65bc3044be))
* DefaultPageLayout patch rollback omits legacy header logo ([#1497](https://github.com/unraid/api/issues/1497)) ([ea20d1e](https://github.com/unraid/api/commit/ea20d1e2116fcafa154090fee78b42ec5d9ba584))
* event emitter setup for writing status ([#1496](https://github.com/unraid/api/issues/1496)) ([ca4e2db](https://github.com/unraid/api/commit/ca4e2db1f29126a1fa3784af563832edda64b0ca))
## [4.9.3](https://github.com/unraid/api/compare/v4.9.2...v4.9.3) (2025-07-09)
### Bug Fixes
* duplicated header logo after api stops ([#1493](https://github.com/unraid/api/issues/1493)) ([4168f43](https://github.com/unraid/api/commit/4168f43e3ecd51479bec3aae585abbe6dcd3e416))
## [4.9.2](https://github.com/unraid/api/compare/v4.9.1...v4.9.2) (2025-07-09)
### Bug Fixes
* invalid configs no longer crash API ([#1491](https://github.com/unraid/api/issues/1491)) ([6bf3f77](https://github.com/unraid/api/commit/6bf3f776380edeff5133517e6aca223556e30144))
* invalid state for unraid plugin ([#1492](https://github.com/unraid/api/issues/1492)) ([39b8f45](https://github.com/unraid/api/commit/39b8f453da23793ef51f8e7f7196370aada8c5aa))
* release note escaping ([5b6bcb6](https://github.com/unraid/api/commit/5b6bcb6043a5269bff4dc28714d787a5a3f07e22))
## [4.9.1](https://github.com/unraid/api/compare/v4.9.0...v4.9.1) (2025-07-08)
### Bug Fixes
* **HeaderOsVersion:** adjust top margin for header component ([#1485](https://github.com/unraid/api/issues/1485)) ([862b54d](https://github.com/unraid/api/commit/862b54de8cd793606f1d29e76c19d4a0e1ae172f))
* sign out doesn't work ([#1486](https://github.com/unraid/api/issues/1486)) ([f3671c3](https://github.com/unraid/api/commit/f3671c3e0750b79be1f19655a07a0e9932289b3f))
## [4.9.0](https://github.com/unraid/api/compare/v4.8.0...v4.9.0) (2025-07-08)
### Features
* add graphql resource for API plugins ([#1420](https://github.com/unraid/api/issues/1420)) ([642a220](https://github.com/unraid/api/commit/642a220c3a796829505d8449dc774968c9d5c222))
* add management page for API keys ([#1408](https://github.com/unraid/api/issues/1408)) ([0788756](https://github.com/unraid/api/commit/0788756b918a8e99be51f34bf6f96bbe5b67395a))
* add rclone ([#1362](https://github.com/unraid/api/issues/1362)) ([5517e75](https://github.com/unraid/api/commit/5517e7506b05c7bef5012bb9f8d2103e91061997))
* API key management ([#1407](https://github.com/unraid/api/issues/1407)) ([d37dc3b](https://github.com/unraid/api/commit/d37dc3bce28bad1c893ae7eff96ca5ffd9177648))
* api plugin management via CLI ([#1416](https://github.com/unraid/api/issues/1416)) ([3dcbfbe](https://github.com/unraid/api/commit/3dcbfbe48973b8047f0c6c560068808d86ac6970))
* build out docker components ([#1427](https://github.com/unraid/api/issues/1427)) ([711cc9a](https://github.com/unraid/api/commit/711cc9ac926958bcf2996455b023ad265b041530))
* docker and info resolver issues ([#1423](https://github.com/unraid/api/issues/1423)) ([9901039](https://github.com/unraid/api/commit/9901039a3863de06b520e23cb2573b610716c673))
* fix shading in UPC to be less severe ([#1438](https://github.com/unraid/api/issues/1438)) ([b7c2407](https://github.com/unraid/api/commit/b7c240784052276fc60e064bd7d64dd6e801ae90))
* info resolver cleanup ([#1425](https://github.com/unraid/api/issues/1425)) ([1b279bb](https://github.com/unraid/api/commit/1b279bbab3a51e7d032e7e3c9898feac8bfdbafa))
* initial codeql setup ([#1390](https://github.com/unraid/api/issues/1390)) ([2ade7eb](https://github.com/unraid/api/commit/2ade7eb52792ef481aaf711dc07029629ea107d9))
* initialize claude code in codebse ([#1418](https://github.com/unraid/api/issues/1418)) ([b6c4ee6](https://github.com/unraid/api/commit/b6c4ee6eb4b9ebb6d6e59a341e1f51b253578752))
* move api key fetching to use api key service ([#1439](https://github.com/unraid/api/issues/1439)) ([86bea56](https://github.com/unraid/api/commit/86bea5627270a2a18c5b7db36dd59061ab61e753))
* move to cron v4 ([#1428](https://github.com/unraid/api/issues/1428)) ([b8035c2](https://github.com/unraid/api/commit/b8035c207a6e387c7af3346593a872664f6c867b))
* move to iframe for changelog ([#1388](https://github.com/unraid/api/issues/1388)) ([fcd6fbc](https://github.com/unraid/api/commit/fcd6fbcdd48e7f224b3bd8799a668d9e01967f0c))
* native slackware package ([#1381](https://github.com/unraid/api/issues/1381)) ([4f63b4c](https://github.com/unraid/api/commit/4f63b4cf3bb9391785f07a38defe54ec39071caa))
* send active unraid theme to docs ([#1400](https://github.com/unraid/api/issues/1400)) ([f71943b](https://github.com/unraid/api/commit/f71943b62b30119e17766e56534962630f52a591))
* slightly better watch mode ([#1398](https://github.com/unraid/api/issues/1398)) ([881f1e0](https://github.com/unraid/api/commit/881f1e09607d1e4a8606f8d048636ba09d8fcac1))
* upgrade nuxt-custom-elements ([#1461](https://github.com/unraid/api/issues/1461)) ([345e83b](https://github.com/unraid/api/commit/345e83bfb0904381d784fc77b3dcd3ad7e53d898))
* use bigint instead of long ([#1403](https://github.com/unraid/api/issues/1403)) ([574d572](https://github.com/unraid/api/commit/574d572d6567c652057b29776694e86267316ca7))
### Bug Fixes
* activation indicator removed ([5edfd82](https://github.com/unraid/api/commit/5edfd823b862cfc1f864565021f12334fe9317c6))
* alignment of settings on ManagementAccess settings page ([#1421](https://github.com/unraid/api/issues/1421)) ([70c790f](https://github.com/unraid/api/commit/70c790ff89075a785d7f0623bbf3c34a3806bbdc))
* allow rclone to fail to initialize ([#1453](https://github.com/unraid/api/issues/1453)) ([7c6f02a](https://github.com/unraid/api/commit/7c6f02a5cb474fb285db294ec6f80d1c2c57e142))
* always download 7.1 versioned files for patching ([edc0d15](https://github.com/unraid/api/commit/edc0d1578b89c3b3e56e637de07137e069656fa8))
* api `pnpm type-check` ([#1442](https://github.com/unraid/api/issues/1442)) ([3122bdb](https://github.com/unraid/api/commit/3122bdb953eec58469fd9cf6f468e75621781040))
* **api:** connect config `email` validation ([#1454](https://github.com/unraid/api/issues/1454)) ([b9a1b9b](https://github.com/unraid/api/commit/b9a1b9b08746b6d4cb2128d029a3dab7cdd47677))
* backport unraid/webgui[#2269](https://github.com/unraid/api/issues/2269) rc.nginx update ([#1436](https://github.com/unraid/api/issues/1436)) ([a7ef06e](https://github.com/unraid/api/commit/a7ef06ea252545cef084e21cea741a8ec866e7cc))
* bigint ([e54d27a](https://github.com/unraid/api/commit/e54d27aede1b1e784971468777c5e65cde66f2ac))
* config migration from `myservers.cfg` ([#1440](https://github.com/unraid/api/issues/1440)) ([c4c9984](https://github.com/unraid/api/commit/c4c99843c7104414120bffc5dd5ed78ab6c8ba02))
* **connect:** fatal race-condition in websocket disposal ([#1462](https://github.com/unraid/api/issues/1462)) ([0ec0de9](https://github.com/unraid/api/commit/0ec0de982f017b61a145c7a4176718b484834f41))
* **connect:** mothership connection ([#1464](https://github.com/unraid/api/issues/1464)) ([7be8bc8](https://github.com/unraid/api/commit/7be8bc84d3831f9cea7ff62d0964612ad366a976))
* console hidden ([9b85e00](https://github.com/unraid/api/commit/9b85e009b833706294a841a54498e45a8e0204ed))
* debounce is too long ([#1426](https://github.com/unraid/api/issues/1426)) ([f12d231](https://github.com/unraid/api/commit/f12d231e6376d0f253cee67b7ed690c432c63ec5))
* delete legacy connect keys and ensure description ([22fe91c](https://github.com/unraid/api/commit/22fe91cd561e88aa24e8f8cfa5a6143e7644e4e0))
* **deps:** pin dependencies ([#1465](https://github.com/unraid/api/issues/1465)) ([ba75a40](https://github.com/unraid/api/commit/ba75a409a4d3e820308b78fd5a5380021d3757b0))
* **deps:** pin dependencies ([#1470](https://github.com/unraid/api/issues/1470)) ([412b329](https://github.com/unraid/api/commit/412b32996d9c8352c25309cc0d549a57468d0fb5))
* **deps:** storybook v9 ([#1476](https://github.com/unraid/api/issues/1476)) ([45bb49b](https://github.com/unraid/api/commit/45bb49bcd60a9753be492203111e489fd37c1a5f))
* **deps:** update all non-major dependencies ([#1366](https://github.com/unraid/api/issues/1366)) ([291ee47](https://github.com/unraid/api/commit/291ee475fb9ef44f6da7b76a9eb11b7dd29a5d13))
* **deps:** update all non-major dependencies ([#1379](https://github.com/unraid/api/issues/1379)) ([8f70326](https://github.com/unraid/api/commit/8f70326d0fe3e4c3bcd3e8e4e6566766f1c05eb7))
* **deps:** update all non-major dependencies ([#1389](https://github.com/unraid/api/issues/1389)) ([cb43f95](https://github.com/unraid/api/commit/cb43f95233590888a8e20a130e62cadc176c6793))
* **deps:** update all non-major dependencies ([#1399](https://github.com/unraid/api/issues/1399)) ([68df344](https://github.com/unraid/api/commit/68df344a4b412227cffa96867f086177b251f028))
* **deps:** update dependency @types/diff to v8 ([#1393](https://github.com/unraid/api/issues/1393)) ([00da27d](https://github.com/unraid/api/commit/00da27d04f2ee2ca8b8b9cdcc6ea3c490c02a3a4))
* **deps:** update dependency cache-manager to v7 ([#1413](https://github.com/unraid/api/issues/1413)) ([9492c2a](https://github.com/unraid/api/commit/9492c2ae6a0086d14e73d280c55746206b73a7b0))
* **deps:** update dependency commander to v14 ([#1394](https://github.com/unraid/api/issues/1394)) ([106ea09](https://github.com/unraid/api/commit/106ea093996f2d0c71c1511bc009ecc9a6be91ec))
* **deps:** update dependency diff to v8 ([#1386](https://github.com/unraid/api/issues/1386)) ([e580f64](https://github.com/unraid/api/commit/e580f646a52b8bda605132cf44ec58137e08dd42))
* **deps:** update dependency dotenv to v17 ([#1474](https://github.com/unraid/api/issues/1474)) ([d613bfa](https://github.com/unraid/api/commit/d613bfa0410e7ef8451fc8ea20e57a7db67f7994))
* **deps:** update dependency lucide-vue-next to ^0.509.0 ([#1383](https://github.com/unraid/api/issues/1383)) ([469333a](https://github.com/unraid/api/commit/469333acd4a0cbeecc9e9cbadb2884289d83aee3))
* **deps:** update dependency marked to v16 ([#1444](https://github.com/unraid/api/issues/1444)) ([453a5b2](https://github.com/unraid/api/commit/453a5b2c9591f755ce07548a9996d7a6cf0925c4))
* **deps:** update dependency shadcn-vue to v2 ([#1302](https://github.com/unraid/api/issues/1302)) ([26ecf77](https://github.com/unraid/api/commit/26ecf779e675d0bc533d61e045325ab062effcbf))
* **deps:** update dependency vue-sonner to v2 ([#1401](https://github.com/unraid/api/issues/1401)) ([53ca414](https://github.com/unraid/api/commit/53ca41404f13c057c340dcf9010af72c3365e499))
* disable file changes on Unraid 7.2 ([#1382](https://github.com/unraid/api/issues/1382)) ([02de89d](https://github.com/unraid/api/commit/02de89d1309f67e4b6d4f8de5f66815ee4d2464c))
* do not start API with doinst.sh ([7d88b33](https://github.com/unraid/api/commit/7d88b3393cbd8ab1e93a86dfa1b7b74cc97255cc))
* do not uninstall fully on 7.2 ([#1484](https://github.com/unraid/api/issues/1484)) ([2263881](https://github.com/unraid/api/commit/22638811a9fdb524420b1347ac49cfaa51bbecb5))
* drop console with terser ([a87d455](https://github.com/unraid/api/commit/a87d455bace04aab9d7fa0e63cb61d26ef9b3b72))
* error logs from `cloud` query when connect is not installed ([#1450](https://github.com/unraid/api/issues/1450)) ([719f460](https://github.com/unraid/api/commit/719f460016d769255582742d7d71ca97d132022b))
* flash backup integration with Unraid Connect config ([#1448](https://github.com/unraid/api/issues/1448)) ([038c582](https://github.com/unraid/api/commit/038c582aed5f5efaea3583372778b9baa318e1ea))
* header padding regression ([#1477](https://github.com/unraid/api/issues/1477)) ([e791cc6](https://github.com/unraid/api/commit/e791cc680de9c40378043348ddca70902da6d250))
* incorrect state merging in redux store ([#1437](https://github.com/unraid/api/issues/1437)) ([17b7428](https://github.com/unraid/api/commit/17b74287796e6feb75466033e279dc3bcf57f1e6))
* lanip copy button not present ([#1459](https://github.com/unraid/api/issues/1459)) ([a280786](https://github.com/unraid/api/commit/a2807864acef742e454d87bb093ee91806e527e5))
* move to bigint scalar ([b625227](https://github.com/unraid/api/commit/b625227913e80e4731a13b54b525ec7385918c51))
* node_modules dir removed on plugin update ([#1406](https://github.com/unraid/api/issues/1406)) ([7b005cb](https://github.com/unraid/api/commit/7b005cbbf682a1336641f5fc85022e9d651569d0))
* omit Connect actions in UPC when plugin is not installed ([#1417](https://github.com/unraid/api/issues/1417)) ([8c8a527](https://github.com/unraid/api/commit/8c8a5276b49833c08bca133e374e1e66273b41aa))
* parsing of `ssoEnabled` in state.php ([#1455](https://github.com/unraid/api/issues/1455)) ([f542c8e](https://github.com/unraid/api/commit/f542c8e0bd9596d9d3abf75b58b97d95fb033215))
* pin ranges ([#1460](https://github.com/unraid/api/issues/1460)) ([f88400e](https://github.com/unraid/api/commit/f88400eea820ac80c867fdb63cd503ed91493146))
* pr plugin promotion workflow ([#1456](https://github.com/unraid/api/issues/1456)) ([13bd9bb](https://github.com/unraid/api/commit/13bd9bb5670bb96b158068114d62572d88c7cae9))
* proper fallback if missing paths config modules ([7067e9e](https://github.com/unraid/api/commit/7067e9e3dd3966309013b52c90090cc82de4e4fb))
* rc.unraid-api now cleans up older dependencies ([#1404](https://github.com/unraid/api/issues/1404)) ([83076bb](https://github.com/unraid/api/commit/83076bb94088095de8b1a332a50bbef91421f0c1))
* remote access lifecycle during boot & shutdown ([#1422](https://github.com/unraid/api/issues/1422)) ([7bc583b](https://github.com/unraid/api/commit/7bc583b18621c8140232772ca36c6d9b8d8a9cd7))
* sign out correctly on error ([#1452](https://github.com/unraid/api/issues/1452)) ([d08fc94](https://github.com/unraid/api/commit/d08fc94afb94e386907da44402ee5a24cfb3d00a))
* simplify usb listing ([#1402](https://github.com/unraid/api/issues/1402)) ([5355115](https://github.com/unraid/api/commit/5355115af2f4122af9afa3f63ed8f830b33cbf5c))
* theme issues when sent from graph ([#1424](https://github.com/unraid/api/issues/1424)) ([75ad838](https://github.com/unraid/api/commit/75ad8381bd4f4045ab1d3aa84e08ecddfba27617))
* **ui:** notifications positioning regression ([#1445](https://github.com/unraid/api/issues/1445)) ([f73e5e0](https://github.com/unraid/api/commit/f73e5e0058fcc3bedebfbe7380ffcb44aea981b8))
* use some instead of every for connect detection ([9ce2fee](https://github.com/unraid/api/commit/9ce2fee380c4db1395f5d4df7f16ae6c57d1a748))
### Reverts
* revert package.json dependency updates from commit 711cc9a for api and packages/* ([94420e4](https://github.com/unraid/api/commit/94420e4d45735b8def3915b5789c15c1c3121f1e))
## [4.8.0](https://github.com/unraid/api/compare/v4.7.0...v4.8.0) (2025-05-01)

View File

@@ -0,0 +1,25 @@
[
{
"id": "a68667b6-f4ef-4c47-aec3-d9886be78487",
"name": "Test",
"sourceType": "RAW",
"destinationType": "RCLONE",
"schedule": "0 2 * * *",
"enabled": true,
"sourceConfig": {
"label": "Raw file backup",
"sourcePath": "/Users/elibosley/Desktop",
"excludePatterns": [],
"includePatterns": []
},
"destinationConfig": {
"type": "RCLONE",
"remoteName": "google_drives",
"destinationPath": "desktop"
},
"createdAt": "2025-05-27T15:02:31.655Z",
"updatedAt": "2025-05-27T15:11:40.547Z",
"lastRunAt": "2025-05-27T15:07:37.139Z",
"lastRunStatus": "Failed: RClone group backup-job_1748358397105_sbo5j322k failed or timed out."
}
]

View File

@@ -1,10 +1,12 @@
{
"version": "4.8.0",
"version": "4.9.5",
"extraOrigins": [
"https://google.com",
"https://test.com"
],
"sandbox": true,
"ssoSubIds": [],
"plugins": ["unraid-api-plugin-connect"]
"plugins": [
"unraid-api-plugin-connect"
]
}

View File

@@ -1,5 +1,5 @@
[api]
version="4.4.1"
version="4.8.0"
extraOrigins="https://google.com,https://test.com"
[local]
sandbox="yes"

View File

@@ -598,6 +598,7 @@ enum Resource {
ACTIVATION_CODE
API_KEY
ARRAY
BACKUP
CLOUD
CONFIG
CONNECT
@@ -653,6 +654,63 @@ type ApiKeyWithSecret implements Node {
key: String!
}
type JobStatus implements Node {
id: PrefixedID!
"""External job ID from the job execution system"""
externalJobId: String!
name: String!
status: BackupJobStatus!
"""Progress percentage (0-100)"""
progress: Int!
message: String
error: String
startTime: DateTime!
endTime: DateTime
"""Bytes transferred"""
bytesTransferred: Int
"""Total bytes to transfer"""
totalBytes: Int
"""Transfer speed in bytes per second"""
speed: Int
"""Elapsed time in seconds"""
elapsedTime: Int
"""Estimated time to completion in seconds"""
eta: Int
"""Human-readable bytes transferred"""
formattedBytesTransferred: String
"""Human-readable transfer speed"""
formattedSpeed: String
"""Human-readable elapsed time"""
formattedElapsedTime: String
"""Human-readable ETA"""
formattedEta: String
}
"""Status of a backup job"""
enum BackupJobStatus {
QUEUED
RUNNING
COMPLETED
FAILED
CANCELLED
}
"""
A date-time string at UTC, such as 2019-12-03T09:54:33Z, compliant with the date-time format.
"""
scalar DateTime
type RCloneDrive {
"""Provider name"""
name: String!
@@ -693,6 +751,98 @@ type RCloneRemote {
config: JSON!
}
type RCloneJobStats {
"""Bytes transferred"""
bytes: Float
"""Transfer speed in bytes/sec"""
speed: Float
"""Estimated time to completion in seconds"""
eta: Float
"""Elapsed time in seconds"""
elapsedTime: Float
"""Progress percentage (0-100)"""
percentage: Float
"""Number of checks completed"""
checks: Float
"""Number of deletes completed"""
deletes: Float
"""Number of errors encountered"""
errors: Float
"""Whether a fatal error occurred"""
fatalError: Boolean
"""Last error message"""
lastError: String
"""Number of renames completed"""
renames: Float
"""Whether there is a retry error"""
retryError: Boolean
"""Number of server-side copies"""
serverSideCopies: Float
"""Bytes in server-side copies"""
serverSideCopyBytes: Float
"""Number of server-side moves"""
serverSideMoves: Float
"""Bytes in server-side moves"""
serverSideMoveBytes: Float
"""Total bytes to transfer"""
totalBytes: Float
"""Total checks to perform"""
totalChecks: Float
"""Total transfers to perform"""
totalTransfers: Float
"""Time spent transferring in seconds"""
transferTime: Float
"""Number of transfers completed"""
transfers: Float
"""Currently transferring files"""
transferring: JSON
"""Currently checking files"""
checking: JSON
"""Human-readable bytes transferred"""
formattedBytes: String
"""Human-readable transfer speed"""
formattedSpeed: String
"""Human-readable elapsed time"""
formattedElapsedTime: String
"""Human-readable ETA"""
formattedEta: String
"""Calculated percentage (fallback when percentage is null)"""
calculatedPercentage: Float
"""Whether the job is actively running"""
isActivelyRunning: Boolean
"""Whether the job is completed"""
isCompleted: Boolean
}
type ArrayMutations {
"""Set array state"""
setState(input: ArrayStateInput!): UnraidArray!
@@ -764,6 +914,186 @@ type VmMutations {
reset(id: PrefixedID!): Boolean!
}
"""Backup related mutations"""
type BackupMutations {
"""Create a new backup job configuration"""
createBackupJobConfig(input: CreateBackupJobConfigInput!): BackupJobConfig!
"""Update a backup job configuration"""
updateBackupJobConfig(id: PrefixedID!, input: UpdateBackupJobConfigInput!): BackupJobConfig
"""Delete a backup job configuration"""
deleteBackupJobConfig(id: PrefixedID!): Boolean!
"""Initiates a backup using a configured remote."""
initiateBackup(input: InitiateBackupInput!): BackupStatus!
"""Toggle a backup job configuration enabled/disabled"""
toggleJobConfig(id: PrefixedID!): BackupJobConfig
"""Manually trigger a backup job using existing configuration"""
triggerJob(id: PrefixedID!): BackupStatus!
"""Stop all running backup jobs"""
stopAllBackupJobs: BackupStatus!
"""Stop a specific backup job"""
stopBackupJob(id: PrefixedID!): BackupStatus!
"""Forget all finished backup jobs to clean up the job list"""
forgetFinishedBackupJobs: BackupStatus!
}
input CreateBackupJobConfigInput {
name: String!
schedule: String
enabled: Boolean! = true
"""Source configuration for this backup job"""
sourceConfig: SourceConfigInput
"""Destination configuration for this backup job"""
destinationConfig: DestinationConfigInput
}
input SourceConfigInput {
type: SourceType!
"""Timeout for backup operation in seconds"""
timeout: Float! = 3600
"""Whether to cleanup on failure"""
cleanupOnFailure: Boolean! = true
zfsConfig: ZfsPreprocessConfigInput
flashConfig: FlashPreprocessConfigInput
scriptConfig: ScriptPreprocessConfigInput
rawConfig: RawBackupConfigInput
}
"""
Type of backup to perform (ZFS snapshot, Flash backup, Custom script, or Raw file backup)
"""
enum SourceType {
ZFS
FLASH
SCRIPT
RAW
}
input ZfsPreprocessConfigInput {
"""Human-readable label for this source configuration"""
label: String
"""ZFS pool name"""
poolName: String!
"""Dataset name within the pool"""
datasetName: String!
"""Snapshot name prefix"""
snapshotPrefix: String
"""Whether to cleanup snapshots after backup"""
cleanupSnapshots: Boolean! = true
"""Number of snapshots to retain"""
retainSnapshots: Float
}
input FlashPreprocessConfigInput {
"""Human-readable label for this source configuration"""
label: String
"""Flash drive mount path"""
flashPath: String! = "/boot"
"""Whether to include git history"""
includeGitHistory: Boolean! = true
"""Additional paths to include in backup"""
additionalPaths: [String!]
}
input ScriptPreprocessConfigInput {
"""Human-readable label for this source configuration"""
label: String
"""Path to the script file"""
scriptPath: String!
"""Arguments to pass to the script"""
scriptArgs: [String!]
"""Working directory for script execution"""
workingDirectory: String
"""Environment variables for script execution"""
environment: JSON
"""Output file path where script should write data"""
outputPath: String!
}
input RawBackupConfigInput {
"""Human-readable label for this source configuration"""
label: String
"""Source path to backup"""
sourcePath: String!
"""File patterns to exclude from backup"""
excludePatterns: [String!]
"""File patterns to include in backup"""
includePatterns: [String!]
}
input DestinationConfigInput {
type: DestinationType!
rcloneConfig: RcloneDestinationConfigInput
}
enum DestinationType {
RCLONE
}
input RcloneDestinationConfigInput {
remoteName: String!
destinationPath: String!
rcloneOptions: JSON
}
input UpdateBackupJobConfigInput {
name: String
schedule: String
enabled: Boolean
"""Source configuration for this backup job"""
sourceConfig: SourceConfigInput
"""Destination configuration for this backup job"""
destinationConfig: DestinationConfigInput
lastRunStatus: String
lastRunAt: String
currentJobId: String
}
input InitiateBackupInput {
"""The name of the remote configuration to use for the backup."""
remoteName: String!
"""Source path to backup."""
sourcePath: String!
"""Destination path on the remote."""
destinationPath: String!
"""
Additional options for the backup operation, such as --dry-run or --transfers.
"""
options: JSON
}
"""API Key related mutations"""
type ApiKeyMutations {
"""Create an API key"""
@@ -886,10 +1216,125 @@ type ParityCheck {
running: Boolean
}
type FlashPreprocessConfig {
label: String!
flashPath: String!
includeGitHistory: Boolean!
additionalPaths: [String!]
}
type RawBackupConfig {
label: String!
sourcePath: String!
excludePatterns: [String!]
includePatterns: [String!]
}
type ScriptPreprocessConfig {
label: String!
scriptPath: String!
scriptArgs: [String!]
workingDirectory: String
environment: JSON
outputPath: String!
}
type ZfsPreprocessConfig {
label: String!
poolName: String!
datasetName: String!
snapshotPrefix: String
cleanupSnapshots: Boolean!
retainSnapshots: Float
}
type Backup implements Node {
id: PrefixedID!
jobs: [JobStatus!]!
configs: [BackupJobConfig!]!
"""Get the status for the backup service"""
status: BackupStatus!
}
type BackupStatus {
"""Status message indicating the outcome of the backup initiation."""
status: String!
"""Job ID if available, can be used to check job status."""
jobId: String
}
type BackupJobConfig implements Node {
id: PrefixedID!
"""Human-readable name for this backup job"""
name: String!
"""Type of the backup source"""
sourceType: SourceType!
"""Type of the backup destination"""
destinationType: DestinationType!
"""Cron schedule expression (e.g., "0 2 * * *" for daily at 2AM)"""
schedule: String!
"""Whether this backup job is enabled"""
enabled: Boolean!
"""Source configuration for this backup job"""
sourceConfig: SourceConfigUnion!
"""Destination configuration for this backup job"""
destinationConfig: DestinationConfigUnion!
"""When this config was created"""
createdAt: DateTimeISO!
"""When this config was last updated"""
updatedAt: DateTimeISO!
"""Last time this job ran"""
lastRunAt: DateTimeISO
"""Status of last run"""
lastRunStatus: String
"""Current running job ID for this config"""
currentJobId: String
"""Get the current running job for this backup config"""
currentJob: JobStatus
}
union SourceConfigUnion = ZfsPreprocessConfig | FlashPreprocessConfig | ScriptPreprocessConfig | RawBackupConfig
union DestinationConfigUnion = RcloneDestinationConfig
type RcloneDestinationConfig {
type: String!
"""Remote name from rclone config"""
remoteName: String!
"""Destination path on the remote"""
destinationPath: String!
"""RClone options (e.g., --transfers, --checkers)"""
rcloneOptions: JSON
}
"""
A date-time string at UTC, such as 2019-12-03T09:54:33Z, compliant with the date-time format.
A date-time string at UTC, such as 2007-12-03T10:15:30Z, compliant with the `date-time` format outlined in section 5.6 of the RFC 3339 profile of the ISO 8601 standard for representation of dates and times using the Gregorian calendar.This scalar is serialized to a string in ISO 8601 format and parsed from a string in ISO 8601 format.
"""
scalar DateTime
scalar DateTimeISO
type BackupJobConfigForm {
id: PrefixedID!
dataSchema: JSON!
uiSchema: JSON!
}
type Config implements Node {
id: PrefixedID!
@@ -1248,14 +1693,6 @@ type Docker implements Node {
networks(skipCache: Boolean! = false): [DockerNetwork!]!
}
type FlashBackupStatus {
"""Status message indicating the outcome of the backup initiation."""
status: String!
"""Job ID if available, can be used to check job status."""
jobId: String
}
type Flash implements Node {
id: PrefixedID!
guid: String!
@@ -1658,13 +2095,27 @@ type Query {
vms: Vms!
parityHistory: [ParityCheck!]!
array: UnraidArray!
"""Get backup service information"""
backup: Backup!
"""Get a specific backup job configuration"""
backupJobConfig(id: PrefixedID!): BackupJobConfig
"""Get status of a specific backup job"""
backupJob(id: PrefixedID!): JobStatus
"""Get the JSON schema for backup job configuration form"""
backupJobConfigForm(input: BackupJobConfigFormInput): BackupJobConfigForm!
backupJobStatus(jobId: PrefixedID!): JobStatus
allBackupJobStatuses: [JobStatus!]!
rclone: RCloneBackupSettings!
customization: Customization
publicPartnerInfo: PublicPartnerInfo
publicTheme: Theme!
docker: Docker!
disks: [Disk!]!
disk(id: PrefixedID!): Disk!
rclone: RCloneBackupSettings!
settings: Settings!
isSSOEnabled: Boolean!
@@ -1676,6 +2127,10 @@ type Query {
cloud: Cloud!
}
input BackupJobConfigFormInput {
showAdvanced: Boolean! = false
}
type Mutation {
"""Creates a new notification record"""
createNotification(input: NotificationData!): Notification!
@@ -1699,12 +2154,10 @@ type Mutation {
array: ArrayMutations!
docker: DockerMutations!
vm: VmMutations!
backup: BackupMutations!
parityCheck: ParityCheckMutations!
apiKey: ApiKeyMutations!
rclone: RCloneMutations!
"""Initiates a flash drive backup using a configured remote."""
initiateFlashBackup(input: InitiateFlashBackupInput!): FlashBackupStatus!
updateSettings(input: JSON!): UpdateSettingsResponse!
"""
@@ -1731,22 +2184,6 @@ input NotificationData {
link: String
}
input InitiateFlashBackupInput {
"""The name of the remote configuration to use for the backup."""
remoteName: String!
"""Source path to backup (typically the flash drive)."""
sourcePath: String!
"""Destination path on the remote."""
destinationPath: String!
"""
Additional options for the backup operation, such as --dry-run or --transfers.
"""
options: JSON
}
input PluginManagementInput {
"""Array of plugin package names to add or remove"""
names: [String!]!

View File

@@ -1,6 +1,6 @@
{
"name": "@unraid/api",
"version": "4.8.0",
"version": "4.9.5",
"main": "src/cli/index.ts",
"type": "module",
"corepack": {
@@ -10,7 +10,7 @@
"author": "Lime Technology, Inc. <unraid.net>",
"license": "GPL-2.0-or-later",
"engines": {
"pnpm": "10.12.4"
"pnpm": "10.13.1"
},
"scripts": {
"// Development": "",
@@ -57,7 +57,7 @@
"@as-integrations/fastify": "2.1.1",
"@fastify/cookie": "11.0.2",
"@fastify/helmet": "13.0.1",
"@graphql-codegen/client-preset": "4.8.2",
"@graphql-codegen/client-preset": "4.8.3",
"@graphql-tools/load-files": "7.0.1",
"@graphql-tools/merge": "9.0.24",
"@graphql-tools/schema": "10.0.23",
@@ -82,7 +82,7 @@
"accesscontrol": "2.2.1",
"bycontract": "2.0.11",
"bytes": "3.1.2",
"cache-manager": "7.0.0",
"cache-manager": "7.0.1",
"cacheable-lookup": "7.0.0",
"camelcase-keys": "9.1.3",
"casbin": "5.38.0",
@@ -94,11 +94,11 @@
"command-exists": "1.2.9",
"convert": "5.12.0",
"cookie": "1.0.2",
"cron": "4.3.1",
"cron": "4.3.0",
"cross-fetch": "4.1.0",
"diff": "8.0.2",
"dockerode": "4.0.7",
"dotenv": "17.1.0",
"dotenv": "17.2.0",
"execa": "9.6.0",
"exit-hook": "4.0.0",
"fastify": "5.4.0",
@@ -138,11 +138,11 @@
"rxjs": "7.8.2",
"semver": "7.7.2",
"strftime": "0.10.3",
"systeminformation": "5.27.6",
"systeminformation": "5.27.7",
"uuid": "11.1.0",
"ws": "8.18.2",
"ws": "8.18.3",
"zen-observable-ts": "1.1.0",
"zod": "3.25.67"
"zod": "3.25.76"
},
"peerDependencies": {
"unraid-api-plugin-connect": "workspace:*"
@@ -153,35 +153,35 @@
}
},
"devDependencies": {
"@eslint/js": "9.29.0",
"@eslint/js": "9.30.1",
"@graphql-codegen/add": "5.0.3",
"@graphql-codegen/cli": "5.0.7",
"@graphql-codegen/fragment-matcher": "5.1.0",
"@graphql-codegen/import-types-preset": "3.0.1",
"@graphql-codegen/typed-document-node": "5.1.1",
"@graphql-codegen/typed-document-node": "5.1.2",
"@graphql-codegen/typescript": "4.1.6",
"@graphql-codegen/typescript-operations": "4.6.1",
"@graphql-codegen/typescript-resolvers": "4.5.1",
"@graphql-typed-document-node/core": "3.2.0",
"@ianvs/prettier-plugin-sort-imports": "4.4.2",
"@ianvs/prettier-plugin-sort-imports": "4.5.1",
"@nestjs/testing": "11.1.3",
"@originjs/vite-plugin-commonjs": "1.0.3",
"@rollup/plugin-node-resolve": "16.0.1",
"@swc/core": "1.12.4",
"@swc/core": "1.12.11",
"@types/async-exit-hook": "2.0.2",
"@types/bytes": "3.1.5",
"@types/cli-table": "0.3.4",
"@types/command-exists": "1.2.3",
"@types/cors": "2.8.19",
"@types/dockerode": "3.3.41",
"@types/dockerode": "3.3.42",
"@types/graphql-fields": "1.3.9",
"@types/graphql-type-uuid": "0.2.6",
"@types/ini": "4.1.1",
"@types/ip": "1.1.3",
"@types/lodash": "4.17.18",
"@types/lodash": "4.17.20",
"@types/lodash-es": "4.17.12",
"@types/mustache": "4.2.6",
"@types/node": "22.15.32",
"@types/node": "22.16.3",
"@types/pify": "6.1.0",
"@types/semver": "7.7.0",
"@types/sendmail": "1.4.7",
@@ -192,28 +192,28 @@
"@types/wtfnode": "0.7.3",
"@vitest/coverage-v8": "3.2.4",
"@vitest/ui": "3.2.4",
"commit-and-tag-version": "9.6.0",
"cz-conventional-changelog": "3.3.0",
"eslint": "9.29.0",
"eslint-plugin-import": "2.31.0",
"eslint-plugin-n": "17.20.0",
"eslint": "9.30.1",
"eslint-plugin-import": "2.32.0",
"eslint-plugin-n": "17.21.0",
"eslint-plugin-no-relative-import-paths": "1.6.1",
"eslint-plugin-prettier": "5.5.0",
"eslint-plugin-prettier": "5.5.1",
"graphql-codegen-typescript-validation-schema": "0.17.1",
"jiti": "2.4.2",
"nodemon": "3.1.10",
"prettier": "3.5.3",
"prettier": "3.6.2",
"rollup-plugin-node-externals": "8.0.1",
"commit-and-tag-version": "9.5.0",
"tsx": "4.20.3",
"type-fest": "4.41.0",
"typescript": "5.8.3",
"typescript-eslint": "8.34.1",
"typescript-eslint": "8.36.0",
"unplugin-swc": "1.5.5",
"vite": "7.0.3",
"vite": "7.0.4",
"vite-plugin-node": "7.0.0",
"vite-tsconfig-paths": "5.1.4",
"vitest": "3.2.4",
"zx": "8.5.5"
"zx": "8.6.2"
},
"overrides": {
"eslint": {
@@ -225,8 +225,9 @@
"nest-authz": {
"@nestjs/common": "$@nestjs/common",
"@nestjs/core": "$@nestjs/core"
}
},
"cron": "4.3.1"
},
"private": true,
"packageManager": "pnpm@10.12.4"
"packageManager": "pnpm@10.13.1"
}

View File

@@ -11,6 +11,7 @@ import {
RCloneStartBackupInput,
UpdateRCloneRemoteDto,
} from '@app/unraid-api/graph/resolvers/rclone/rclone.model.js';
import { FormatService } from '@app/unraid-api/utils/format.service.js';
vi.mock('got');
vi.mock('execa');
@@ -55,6 +56,8 @@ describe('RCloneApiService', () => {
let mockExeca: any;
let mockPRetry: any;
let mockExistsSync: any;
let mockFormatService: FormatService;
let mockCacheManager: any;
beforeEach(async () => {
vi.clearAllMocks();
@@ -69,18 +72,67 @@ describe('RCloneApiService', () => {
mockPRetry = vi.mocked(pRetry.default);
mockExistsSync = vi.mocked(existsSync);
mockGot.post = vi.fn().mockResolvedValue({ body: {} });
mockExeca.mockReturnValue({
on: vi.fn(),
kill: vi.fn(),
killed: false,
pid: 12345,
} as any);
mockGot.post = vi.fn().mockImplementation((url: string) => {
// Mock the core/pid call to indicate socket is running
if (url.includes('core/pid')) {
return Promise.resolve({ body: { pid: 12345 } });
}
return Promise.resolve({ body: {} });
});
// Mock execa to return a resolved promise for rclone version check
mockExeca.mockImplementation((cmd: string, args: string[]) => {
if (cmd === 'rclone' && args[0] === 'version') {
return Promise.resolve({ stdout: 'rclone v1.67.0', stderr: '', exitCode: 0 } as any);
}
return {
on: vi.fn(),
kill: vi.fn(),
killed: false,
pid: 12345,
} as any;
});
mockPRetry.mockResolvedValue(undefined);
mockExistsSync.mockReturnValue(false);
// Mock socket exists
mockExistsSync.mockReturnValue(true);
service = new RCloneApiService();
await service.onModuleInit();
mockFormatService = {
formatBytes: vi.fn(),
formatDuration: vi.fn(),
} as any;
// Mock RCloneStatusService
const mockStatusService = {
enhanceStatsWithFormattedFields: vi.fn(),
transformStatsToJob: vi.fn(),
calculateCombinedStats: vi.fn(),
parseActiveJobs: vi.fn(),
parseBackupStatus: vi.fn(),
parseJobWithStats: vi.fn(),
parseAllJobsWithStats: vi.fn(),
parseJobsWithStats: vi.fn(),
getBackupStatus: vi.fn(),
} as any;
// Mock StreamingJobManager
const mockStreamingJobManager = {
startJob: vi.fn(),
stopJob: vi.fn(),
getJobStatus: vi.fn(),
getAllJobs: vi.fn(),
} as any;
// Mock cache manager
mockCacheManager = {
get: vi.fn().mockResolvedValue(null),
set: vi.fn().mockResolvedValue(undefined),
del: vi.fn().mockResolvedValue(undefined),
};
service = new RCloneApiService(mockStatusService);
// Mock the service as initialized without actually running onModuleInit
// to avoid the initialization API calls
(service as any).initialized = true;
(service as any).rcloneBaseUrl = 'http://unix:/tmp/rclone.sock:';
});
describe('getProviders', () => {
@@ -248,6 +300,9 @@ describe('RCloneApiService', () => {
options: { delete_on: 'dst' },
};
const mockResponse = { jobid: 'job-123' };
// Clear previous mock calls and set up fresh mock
mockGot.post.mockClear();
mockGot.post.mockResolvedValue({ body: mockResponse });
const result = await service.startBackup(input);
@@ -256,11 +311,11 @@ describe('RCloneApiService', () => {
expect(mockGot.post).toHaveBeenCalledWith(
'http://unix:/tmp/rclone.sock:/sync/copy',
expect.objectContaining({
json: {
json: expect.objectContaining({
srcFs: '/source/path',
dstFs: 'remote:backup/path',
delete_on: 'dst',
},
}),
})
);
});
@@ -269,8 +324,22 @@ describe('RCloneApiService', () => {
describe('getJobStatus', () => {
it('should return job status', async () => {
const input: GetRCloneJobStatusDto = { jobId: 'job-123' };
const mockStatus = { status: 'running', progress: 0.5 };
mockGot.post.mockResolvedValue({ body: mockStatus });
const mockStatus = { id: 'job-123', status: 'running', progress: 0.5 };
mockGot.post.mockImplementation((url: string) => {
if (url.includes('core/stats')) {
return Promise.resolve({ body: {} });
}
if (url.includes('job/status')) {
return Promise.resolve({ body: mockStatus });
}
return Promise.resolve({ body: {} });
});
// Mock the status service methods
const mockStatusService = (service as any).statusService;
mockStatusService.enhanceStatsWithFormattedFields = vi.fn().mockReturnValue({});
mockStatusService.transformStatsToJob = vi.fn().mockReturnValue(null);
mockStatusService.parseJobWithStats = vi.fn().mockReturnValue(mockStatus);
const result = await service.getJobStatus(input);
@@ -335,7 +404,7 @@ describe('RCloneApiService', () => {
mockGot.post.mockRejectedValue(httpError);
await expect(service.getProviders()).rejects.toThrow(
'Rclone API Error (config/providers, HTTP 404): Failed to process error response body. Raw body:'
'Rclone API Error (config/providers, HTTP 404): Failed to process error response: '
);
});
@@ -352,7 +421,7 @@ describe('RCloneApiService', () => {
mockGot.post.mockRejectedValue(httpError);
await expect(service.getProviders()).rejects.toThrow(
'Rclone API Error (config/providers, HTTP 400): Failed to process error response body. Raw body: invalid json'
'Rclone API Error (config/providers, HTTP 400): Failed to process error response: invalid json'
);
});
@@ -367,7 +436,7 @@ describe('RCloneApiService', () => {
mockGot.post.mockRejectedValue('unknown error');
await expect(service.getProviders()).rejects.toThrow(
'Unknown error calling RClone API (config/providers) with params {}: unknown error'
'Unknown error calling RClone API (config/providers): unknown error'
);
});
});

View File

@@ -31,6 +31,7 @@ exports[`Returns paths 1`] = `
"activationBase",
"webGuiBase",
"identConfig",
"backupBase",
"activation",
"boot",
"webgui",

View File

@@ -124,7 +124,15 @@ export const parseConfig = <T extends Record<string, any>>(
throw new AppError('Invalid Parameters Passed to ParseConfig');
}
const data: Record<string, any> = parseIni(fileContents);
let data: Record<string, any>;
try {
data = parseIni(fileContents);
} catch (error) {
throw new AppError(
`Failed to parse config file: ${error instanceof Error ? error.message : String(error)}`
);
}
// Remove quotes around keys
const dataWithoutQuoteKeys = Object.fromEntries(
Object.entries(data).map(([key, value]) => [key.replace(/^"(.+(?="$))"$/, '$1'), value])

View File

@@ -71,6 +71,7 @@ const initialState = {
),
webGuiBase: '/usr/local/emhttp/webGui' as const,
identConfig: resolvePath(process.env.PATHS_IDENT_CONFIG ?? ('/boot/config/ident.cfg' as const)),
backupBase: resolvePath(process.env.PATHS_BACKUP_JOBS ?? ('/boot/config/api/backup/' as const)),
};
// Derive asset paths from base paths

View File

@@ -75,7 +75,7 @@ export class AuthService {
// Now get the updated roles
const existingRoles = await this.authzService.getRolesForUser(user.id);
this.logger.debug(`User ${user.id} has roles: ${existingRoles}`);
this.logger.verbose(`User ${user.id} has roles: ${existingRoles}`);
return user;
} catch (error: unknown) {
@@ -213,7 +213,7 @@ export class AuthService {
...rolesToRemove.map((role) => this.authzService.deleteRoleForUser(userId, role)),
]);
this.logger.debug(
this.logger.verbose(
`Synced roles for user ${userId}. Added: ${rolesToAdd.join(
','
)}, Removed: ${rolesToRemove.join(',')}`
@@ -234,7 +234,6 @@ export class AuthService {
* @returns a service account that represents the user session (i.e. a webgui user).
*/
async getSessionUser(): Promise<UserAccount> {
this.logger.debug('getSessionUser called!');
return {
id: '-1',
description: 'Session receives administrator permissions',

View File

@@ -12,6 +12,8 @@ import { ConfigPersistenceHelper } from '@app/unraid-api/config/persistence.help
export { type ApiConfig };
const logger = new Logger('ApiConfig');
const createDefaultConfig = (): ApiConfig => ({
version: API_VERSION,
extraOrigins: [],
@@ -33,21 +35,54 @@ export const persistApiConfig = async (config: ApiConfig) => {
};
export const loadApiConfig = async () => {
const defaultConfig = createDefaultConfig();
const apiConfig = new ApiStateConfig<ApiConfig>(
{
name: 'api',
defaultConfig,
parse: (data) => data as ApiConfig,
},
new ConfigPersistenceHelper()
);
const diskConfig = await apiConfig.parseConfig();
return {
...defaultConfig,
...diskConfig,
version: API_VERSION,
};
try {
const defaultConfig = createDefaultConfig();
const apiConfig = new ApiStateConfig<ApiConfig>(
{
name: 'api',
defaultConfig,
parse: (data) => data as ApiConfig,
},
new ConfigPersistenceHelper()
);
let diskConfig: ApiConfig | undefined;
try {
diskConfig = await apiConfig.parseConfig();
} catch (error) {
logger.error('Failed to load API config from disk, using defaults:', error);
diskConfig = undefined;
// Try to overwrite the invalid config with defaults to fix the issue
try {
const configToWrite = {
...defaultConfig,
version: API_VERSION,
};
const writeSuccess = await apiConfig.persist(configToWrite);
if (writeSuccess) {
logger.log('Successfully overwrote invalid config file with defaults.');
} else {
logger.error(
'Failed to overwrite invalid config file. Continuing with defaults in memory only.'
);
}
} catch (persistError) {
logger.error('Error during config file repair:', persistError);
}
}
return {
...defaultConfig,
...diskConfig,
version: API_VERSION,
};
} catch (outerError) {
// This should never happen, but ensures the config factory never throws
logger.error('Critical error in loadApiConfig, using minimal defaults:', outerError);
return createDefaultConfig();
}
};
/**
@@ -81,21 +116,29 @@ export class ApiConfigPersistence {
}
async onModuleInit() {
if (!(await fileExists(this.filePath))) {
this.migrateFromMyServersConfig();
try {
if (!(await fileExists(this.filePath))) {
this.migrateFromMyServersConfig();
}
await this.persistenceHelper.persistIfChanged(this.filePath, this.config);
this.configService.changes$.pipe(bufferTime(25)).subscribe({
next: async (changes) => {
if (changes.some((change) => change.path.startsWith('api'))) {
this.logger.verbose(`API Config changed ${JSON.stringify(changes)}`);
try {
await this.persistenceHelper.persistIfChanged(this.filePath, this.config);
} catch (persistError) {
this.logger.error('Error persisting config changes:', persistError);
}
}
},
error: (err) => {
this.logger.error('Error receiving config changes:', err);
},
});
} catch (error) {
this.logger.error('Error during API config module initialization:', error);
}
await this.persistenceHelper.persistIfChanged(this.filePath, this.config);
this.configService.changes$.pipe(bufferTime(25)).subscribe({
next: async (changes) => {
if (changes.some((change) => change.path.startsWith('api'))) {
this.logger.verbose(`API Config changed ${JSON.stringify(changes)}`);
await this.persistenceHelper.persistIfChanged(this.filePath, this.config);
}
},
error: (err) => {
this.logger.error('Error receiving config changes:', err);
},
});
}
convertLegacyConfig(

View File

@@ -2,9 +2,26 @@ import { ConfigService } from '@nestjs/config';
import { beforeEach, describe, expect, it, vi } from 'vitest';
import { ApiConfigPersistence } from '@app/unraid-api/config/api-config.module.js';
import { fileExists } from '@app/core/utils/files/file-exists.js';
import { ApiConfigPersistence, loadApiConfig } from '@app/unraid-api/config/api-config.module.js';
import { ConfigPersistenceHelper } from '@app/unraid-api/config/persistence.helper.js';
// Mock the core file-exists utility used by ApiStateConfig
vi.mock('@app/core/utils/files/file-exists.js', () => ({
fileExists: vi.fn(),
}));
// Mock the shared file-exists utility used by ConfigPersistenceHelper
vi.mock('@unraid/shared/util/file.js', () => ({
fileExists: vi.fn(),
}));
// Mock fs/promises for file I/O operations
vi.mock('fs/promises', () => ({
readFile: vi.fn(),
writeFile: vi.fn(),
}));
describe('ApiConfigPersistence', () => {
let service: ApiConfigPersistence;
let configService: ConfigService;
@@ -135,3 +152,127 @@ describe('ApiConfigPersistence', () => {
});
});
});
describe('loadApiConfig', () => {
let readFile: any;
let writeFile: any;
beforeEach(async () => {
vi.clearAllMocks();
// Reset modules to ensure fresh imports
vi.resetModules();
// Get mocked functions
const fsMocks = await import('fs/promises');
readFile = fsMocks.readFile;
writeFile = fsMocks.writeFile;
});
it('should return default config when file does not exist', async () => {
vi.mocked(fileExists).mockResolvedValue(false);
const result = await loadApiConfig();
expect(result).toEqual({
version: expect.any(String),
extraOrigins: [],
sandbox: false,
ssoSubIds: [],
plugins: [],
});
});
it('should merge disk config with defaults when file exists', async () => {
const diskConfig = {
extraOrigins: ['https://example.com'],
sandbox: true,
ssoSubIds: ['sub1', 'sub2'],
};
vi.mocked(fileExists).mockResolvedValue(true);
vi.mocked(readFile).mockResolvedValue(JSON.stringify(diskConfig));
const result = await loadApiConfig();
expect(result).toEqual({
version: expect.any(String),
extraOrigins: ['https://example.com'],
sandbox: true,
ssoSubIds: ['sub1', 'sub2'],
plugins: [],
});
});
it('should use default config and overwrite file when JSON parsing fails', async () => {
const { fileExists: sharedFileExists } = await import('@unraid/shared/util/file.js');
vi.mocked(fileExists).mockResolvedValue(true);
vi.mocked(readFile).mockResolvedValue('{ invalid json }');
vi.mocked(sharedFileExists).mockResolvedValue(false); // For persist operation
vi.mocked(writeFile).mockResolvedValue(undefined);
const result = await loadApiConfig();
// Error logging is handled by NestJS Logger, just verify the config is returned
expect(writeFile).toHaveBeenCalled();
expect(result).toEqual({
version: expect.any(String),
extraOrigins: [],
sandbox: false,
ssoSubIds: [],
plugins: [],
});
});
it('should handle write failure gracefully when JSON parsing fails', async () => {
const { fileExists: sharedFileExists } = await import('@unraid/shared/util/file.js');
vi.mocked(fileExists).mockResolvedValue(true);
vi.mocked(readFile).mockResolvedValue('{ invalid json }');
vi.mocked(sharedFileExists).mockResolvedValue(false); // For persist operation
vi.mocked(writeFile).mockRejectedValue(new Error('Permission denied'));
const result = await loadApiConfig();
// Error logging is handled by NestJS Logger, just verify the config is returned
expect(writeFile).toHaveBeenCalled();
expect(result).toEqual({
version: expect.any(String),
extraOrigins: [],
sandbox: false,
ssoSubIds: [],
plugins: [],
});
});
it('should use default config when file is empty', async () => {
vi.mocked(fileExists).mockResolvedValue(true);
vi.mocked(readFile).mockResolvedValue('');
const result = await loadApiConfig();
// No error logging expected for empty files
expect(result).toEqual({
version: expect.any(String),
extraOrigins: [],
sandbox: false,
ssoSubIds: [],
plugins: [],
});
});
it('should always override version with current API_VERSION', async () => {
const diskConfig = {
version: 'old-version',
extraOrigins: ['https://example.com'],
};
vi.mocked(fileExists).mockResolvedValue(true);
vi.mocked(readFile).mockResolvedValue(JSON.stringify(diskConfig));
const result = await loadApiConfig();
expect(result.version).not.toBe('old-version');
expect(result.version).toBeTruthy();
});
});

View File

@@ -0,0 +1,364 @@
import { Logger } from '@nestjs/common';
import { readFile } from 'node:fs/promises';
import { join } from 'path';
import type { Mock } from 'vitest';
import { beforeEach, describe, expect, it, vi } from 'vitest';
import { fileExists } from '@app/core/utils/files/file-exists.js';
import { ApiStateConfig } from '@app/unraid-api/config/factory/api-state.model.js';
import { ConfigPersistenceHelper } from '@app/unraid-api/config/persistence.helper.js';
vi.mock('node:fs/promises');
vi.mock('@app/core/utils/files/file-exists.js');
vi.mock('@app/environment.js', () => ({
PATHS_CONFIG_MODULES: '/test/config/path',
}));
describe('ApiStateConfig', () => {
let mockPersistenceHelper: ConfigPersistenceHelper;
let mockLogger: Logger;
interface TestConfig {
name: string;
value: number;
enabled: boolean;
}
const defaultConfig: TestConfig = {
name: 'test',
value: 42,
enabled: true,
};
const parseFunction = (data: unknown): TestConfig => {
if (!data || typeof data !== 'object') {
throw new Error('Invalid config format');
}
return data as TestConfig;
};
beforeEach(() => {
vi.clearAllMocks();
mockPersistenceHelper = {
persistIfChanged: vi.fn().mockResolvedValue(true),
} as any;
mockLogger = {
log: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
debug: vi.fn(),
} as any;
vi.spyOn(Logger.prototype, 'log').mockImplementation(mockLogger.log);
vi.spyOn(Logger.prototype, 'warn').mockImplementation(mockLogger.warn);
vi.spyOn(Logger.prototype, 'error').mockImplementation(mockLogger.error);
vi.spyOn(Logger.prototype, 'debug').mockImplementation(mockLogger.debug);
});
describe('constructor', () => {
it('should initialize with cloned default config', () => {
const config = new ApiStateConfig(
{
name: 'test-config',
defaultConfig,
parse: parseFunction,
},
mockPersistenceHelper
);
expect(config.config).toEqual(defaultConfig);
expect(config.config).not.toBe(defaultConfig);
});
});
describe('token', () => {
it('should generate correct token', () => {
const config = new ApiStateConfig(
{
name: 'my-config',
defaultConfig,
parse: parseFunction,
},
mockPersistenceHelper
);
expect(config.token).toBe('ApiConfig.my-config');
});
});
describe('file paths', () => {
it('should generate correct file name', () => {
const config = new ApiStateConfig(
{
name: 'test-config',
defaultConfig,
parse: parseFunction,
},
mockPersistenceHelper
);
expect(config.fileName).toBe('test-config.json');
});
it('should generate correct file path', () => {
const config = new ApiStateConfig(
{
name: 'test-config',
defaultConfig,
parse: parseFunction,
},
mockPersistenceHelper
);
expect(config.filePath).toBe(join('/test/config/path', 'test-config.json'));
});
});
describe('parseConfig', () => {
let config: ApiStateConfig<TestConfig>;
beforeEach(() => {
config = new ApiStateConfig(
{
name: 'test-config',
defaultConfig,
parse: parseFunction,
},
mockPersistenceHelper
);
});
it('should return undefined when file does not exist', async () => {
(fileExists as Mock).mockResolvedValue(false);
const result = await config.parseConfig();
expect(result).toBeUndefined();
expect(readFile).not.toHaveBeenCalled();
});
it('should parse valid JSON config', async () => {
const validConfig = { name: 'custom', value: 100, enabled: false };
(fileExists as Mock).mockResolvedValue(true);
(readFile as Mock).mockResolvedValue(JSON.stringify(validConfig));
const result = await config.parseConfig();
expect(result).toEqual(validConfig);
expect(readFile).toHaveBeenCalledWith(config.filePath, 'utf8');
});
it('should return undefined for empty file', async () => {
(fileExists as Mock).mockResolvedValue(true);
(readFile as Mock).mockResolvedValue('');
const result = await config.parseConfig();
expect(result).toBeUndefined();
expect(mockLogger.warn).toHaveBeenCalledWith(expect.stringContaining('is empty'));
});
it('should return undefined for whitespace-only file', async () => {
(fileExists as Mock).mockResolvedValue(true);
(readFile as Mock).mockResolvedValue(' \n\t ');
const result = await config.parseConfig();
expect(result).toBeUndefined();
expect(mockLogger.warn).toHaveBeenCalledWith(expect.stringContaining('is empty'));
});
it('should throw error for invalid JSON', async () => {
(fileExists as Mock).mockResolvedValue(true);
(readFile as Mock).mockResolvedValue('{ invalid json }');
await expect(config.parseConfig()).rejects.toThrow();
expect(mockLogger.error).toHaveBeenCalledWith(
expect.stringContaining('Failed to parse JSON')
);
expect(mockLogger.debug).toHaveBeenCalledWith(expect.stringContaining('{ invalid json }'));
});
it('should throw error for incomplete JSON', async () => {
(fileExists as Mock).mockResolvedValue(true);
(readFile as Mock).mockResolvedValue('{ "name": "test"');
await expect(config.parseConfig()).rejects.toThrow();
expect(mockLogger.error).toHaveBeenCalledWith(
expect.stringContaining('Failed to parse JSON')
);
});
it('should use custom file path when provided', async () => {
const customPath = '/custom/path/config.json';
(fileExists as Mock).mockResolvedValue(true);
(readFile as Mock).mockResolvedValue(JSON.stringify(defaultConfig));
await config.parseConfig({ filePath: customPath });
expect(fileExists).toHaveBeenCalledWith(customPath);
expect(readFile).toHaveBeenCalledWith(customPath, 'utf8');
});
});
describe('persist', () => {
let config: ApiStateConfig<TestConfig>;
beforeEach(() => {
config = new ApiStateConfig(
{
name: 'test-config',
defaultConfig,
parse: parseFunction,
},
mockPersistenceHelper
);
});
it('should persist current config when no argument provided', async () => {
const result = await config.persist();
expect(result).toBe(true);
expect(mockPersistenceHelper.persistIfChanged).toHaveBeenCalledWith(
config.filePath,
defaultConfig
);
});
it('should persist provided config', async () => {
const customConfig = { name: 'custom', value: 999, enabled: false };
const result = await config.persist(customConfig);
expect(result).toBe(true);
expect(mockPersistenceHelper.persistIfChanged).toHaveBeenCalledWith(
config.filePath,
customConfig
);
});
it('should return false and log error on persistence failure', async () => {
(mockPersistenceHelper.persistIfChanged as Mock).mockResolvedValue(false);
const result = await config.persist();
expect(result).toBe(false);
expect(mockLogger.error).toHaveBeenCalledWith(
expect.stringContaining('Could not write config')
);
});
});
describe('load', () => {
let config: ApiStateConfig<TestConfig>;
beforeEach(() => {
config = new ApiStateConfig(
{
name: 'test-config',
defaultConfig,
parse: parseFunction,
},
mockPersistenceHelper
);
});
it('should load config from file when it exists', async () => {
const savedConfig = { name: 'saved', value: 200, enabled: true };
(fileExists as Mock).mockResolvedValue(true);
(readFile as Mock).mockResolvedValue(JSON.stringify(savedConfig));
await config.load();
expect(config.config).toEqual(savedConfig);
});
it('should create default config when file does not exist', async () => {
(fileExists as Mock).mockResolvedValue(false);
await config.load();
expect(config.config).toEqual(defaultConfig);
expect(mockLogger.log).toHaveBeenCalledWith(
expect.stringContaining('Config file does not exist')
);
expect(mockPersistenceHelper.persistIfChanged).toHaveBeenCalledWith(
config.filePath,
defaultConfig
);
});
it('should not modify config when file is invalid', async () => {
(fileExists as Mock).mockResolvedValue(true);
(readFile as Mock).mockResolvedValue('invalid json');
await config.load();
expect(config.config).toEqual(defaultConfig);
expect(mockLogger.warn).toHaveBeenCalledWith(
expect.any(Error),
expect.stringContaining('is invalid')
);
});
it('should not throw even when persist fails', async () => {
(fileExists as Mock).mockResolvedValue(false);
(mockPersistenceHelper.persistIfChanged as Mock).mockResolvedValue(false);
await expect(config.load()).resolves.not.toThrow();
expect(config.config).toEqual(defaultConfig);
});
});
describe('update', () => {
let config: ApiStateConfig<TestConfig>;
beforeEach(() => {
config = new ApiStateConfig(
{
name: 'test-config',
defaultConfig,
parse: parseFunction,
},
mockPersistenceHelper
);
});
it('should update config with partial values', () => {
config.update({ value: 123 });
expect(config.config).toEqual({
name: 'test',
value: 123,
enabled: true,
});
});
it('should return self for chaining', () => {
const result = config.update({ enabled: false });
expect(result).toBe(config);
});
it('should validate updated config through parse function', () => {
const badParseFunction = vi.fn().mockImplementation(() => {
throw new Error('Validation failed');
});
const strictConfig = new ApiStateConfig(
{
name: 'strict-config',
defaultConfig,
parse: badParseFunction,
},
mockPersistenceHelper
);
expect(() => strictConfig.update({ value: -1 })).toThrow('Validation failed');
});
});
});

View File

@@ -56,13 +56,11 @@ export class ApiStateConfig<T> {
* @returns True if the config was written successfully, false otherwise.
*/
async persist(config = this.#config) {
try {
await this.persistenceHelper.persistIfChanged(this.filePath, config);
return true;
} catch (error) {
this.logger.error(error, `Could not write config to ${this.filePath}.`);
return false;
const success = await this.persistenceHelper.persistIfChanged(this.filePath, config);
if (!success) {
this.logger.error(`Could not write config to ${this.filePath}.`);
}
return success;
}
/**
@@ -76,8 +74,23 @@ export class ApiStateConfig<T> {
const { filePath = this.filePath } = opts;
if (!(await fileExists(filePath))) return undefined;
const rawConfig = JSON.parse(await readFile(filePath, 'utf8'));
return this.options.parse(rawConfig);
const fileContent = await readFile(filePath, 'utf8');
if (!fileContent || fileContent.trim() === '') {
this.logger.warn(`Config file '${filePath}' is empty.`);
return undefined;
}
try {
const rawConfig = JSON.parse(fileContent);
return this.options.parse(rawConfig);
} catch (error) {
this.logger.error(
`Failed to parse JSON from '${filePath}': ${error instanceof Error ? error.message : String(error)}`
);
this.logger.debug(`File content: ${fileContent.substring(0, 100)}...`);
throw error;
}
}
/**

View File

@@ -12,24 +12,59 @@ export class ConfigPersistenceHelper {
*
* @param filePath - The path to the config file.
* @param data - The data to persist.
* @returns `true` if the config was persisted, `false` otherwise.
* @returns `true` if the config was persisted, `false` if no changes were needed or if persistence failed.
*
* @throws {Error} if the config file does not exist or is unreadable.
* @throws {Error} if the config file is not valid JSON.
* @throws {Error} if given data is not JSON (de)serializable.
* @throws {Error} if the config file is not writable.
* This method is designed to never throw errors. If the existing file is corrupted or unreadable,
* it will attempt to overwrite it with the new data. If write operations fail, it returns false
* but does not crash the application.
*/
async persistIfChanged(filePath: string, data: unknown): Promise<boolean> {
if (!(await fileExists(filePath))) {
await writeFile(filePath, JSON.stringify(data ?? {}, null, 2));
return true;
try {
const jsonString = JSON.stringify(data ?? {}, null, 2);
await writeFile(filePath, jsonString);
return true;
} catch (error) {
// JSON serialization or write failed, but don't crash - just return false
return false;
}
}
const currentData = JSON.parse(await readFile(filePath, 'utf8'));
const stagedData = JSON.parse(JSON.stringify(data));
let currentData: unknown;
try {
const fileContent = await readFile(filePath, 'utf8');
currentData = JSON.parse(fileContent);
} catch (error) {
// If existing file is corrupted, treat it as if it doesn't exist
// and write the new data
try {
const jsonString = JSON.stringify(data ?? {}, null, 2);
await writeFile(filePath, jsonString);
return true;
} catch (writeError) {
// JSON serialization or write failed, but don't crash - just return false
return false;
}
}
let stagedData: unknown;
try {
stagedData = JSON.parse(JSON.stringify(data));
} catch (error) {
// If data can't be serialized to JSON, we can't persist it
return false;
}
if (isEqual(currentData, stagedData)) {
return false;
}
await writeFile(filePath, JSON.stringify(stagedData, null, 2));
return true;
try {
await writeFile(filePath, JSON.stringify(stagedData, null, 2));
return true;
} catch (error) {
// Write failed, but don't crash - just return false
return false;
}
}
}

View File

@@ -0,0 +1,600 @@
import { forwardRef, Inject, Injectable, Logger, OnModuleInit } from '@nestjs/common';
import { SchedulerRegistry } from '@nestjs/schedule';
import { existsSync } from 'fs';
import { readFile, writeFile } from 'fs/promises';
import { join } from 'path';
import { CronJob } from 'cron';
import { v4 as uuidv4 } from 'uuid';
import { getters } from '@app/store/index.js';
import {
BackupJobConfig,
CreateBackupJobConfigInput,
UpdateBackupJobConfigInput,
} from '@app/unraid-api/graph/resolvers/backup/backup.model.js';
import { getBackupJobGroupId } from '@app/unraid-api/graph/resolvers/backup/backup.utils.js';
import {
DestinationConfigInput,
DestinationType,
RcloneDestinationConfig,
} from '@app/unraid-api/graph/resolvers/backup/destination/backup-destination.types.js';
import { BackupOrchestrationService } from '@app/unraid-api/graph/resolvers/backup/orchestration/backup-orchestration.service.js';
import {
FlashPreprocessConfig,
RawBackupConfig,
ScriptPreprocessConfig,
SourceConfigInput,
SourceType,
ZfsPreprocessConfig,
} from '@app/unraid-api/graph/resolvers/backup/source/backup-source.types.js';
import { RCloneService } from '@app/unraid-api/graph/resolvers/rclone/rclone.service.js';
const JOB_GROUP_PREFIX = 'backup-';
@Injectable()
export class BackupConfigService implements OnModuleInit {
private readonly logger = new Logger(BackupConfigService.name);
private readonly configPath: string;
private configs: Map<string, BackupJobConfig> = new Map();
constructor(
private readonly rcloneService: RCloneService,
private readonly schedulerRegistry: SchedulerRegistry,
@Inject(forwardRef(() => BackupOrchestrationService))
private readonly backupOrchestrationService: BackupOrchestrationService
) {
const paths = getters.paths();
this.configPath = join(paths.backupBase, 'backup-jobs.json');
}
async onModuleInit(): Promise<void> {
await this.loadConfigs();
}
private transformSourceConfigInput(
input: SourceConfigInput
): ZfsPreprocessConfig | FlashPreprocessConfig | ScriptPreprocessConfig | RawBackupConfig {
switch (input.type) {
case SourceType.ZFS: {
if (!input.zfsConfig) {
throw new Error('ZFS configuration is required when type is ZFS');
}
const zfsConfig = new ZfsPreprocessConfig();
zfsConfig.label = input.zfsConfig.label || 'ZFS backup';
zfsConfig.poolName = input.zfsConfig.poolName;
zfsConfig.datasetName = input.zfsConfig.datasetName;
zfsConfig.snapshotPrefix = input.zfsConfig.snapshotPrefix;
zfsConfig.cleanupSnapshots = input.zfsConfig.cleanupSnapshots ?? true;
zfsConfig.retainSnapshots = input.zfsConfig.retainSnapshots;
return zfsConfig;
}
case SourceType.FLASH: {
if (!input.flashConfig) {
throw new Error('Flash configuration is required when type is FLASH');
}
const flashConfig = new FlashPreprocessConfig();
flashConfig.label = input.flashConfig.label || 'Flash drive backup';
flashConfig.flashPath = input.flashConfig.flashPath || '/boot';
flashConfig.includeGitHistory = input.flashConfig.includeGitHistory ?? true;
flashConfig.additionalPaths = input.flashConfig.additionalPaths || [];
return flashConfig;
}
case SourceType.SCRIPT: {
if (!input.scriptConfig) {
throw new Error('Script configuration is required when type is SCRIPT');
}
const scriptConfig = new ScriptPreprocessConfig();
scriptConfig.label = input.scriptConfig.label || 'Script backup';
scriptConfig.scriptPath = input.scriptConfig.scriptPath;
scriptConfig.scriptArgs = input.scriptConfig.scriptArgs || [];
scriptConfig.workingDirectory = input.scriptConfig.workingDirectory;
scriptConfig.environment = input.scriptConfig.environment;
scriptConfig.outputPath = input.scriptConfig.outputPath;
return scriptConfig;
}
case SourceType.RAW: {
if (!input.rawConfig) {
throw new Error('Raw configuration is required when type is RAW');
}
const rawConfig = new RawBackupConfig();
rawConfig.label = input.rawConfig.label || 'Raw file backup';
rawConfig.sourcePath = input.rawConfig.sourcePath;
rawConfig.excludePatterns = input.rawConfig.excludePatterns || [];
rawConfig.includePatterns = input.rawConfig.includePatterns || [];
return rawConfig;
}
default:
throw new Error(`Unsupported source type: ${input.type}`);
}
}
private transformDestinationConfigInput(input: DestinationConfigInput): RcloneDestinationConfig {
switch (input.type) {
case DestinationType.RCLONE: {
if (!input.rcloneConfig) {
throw new Error('RClone configuration is required when type is RCLONE');
}
const rcloneConfig = new RcloneDestinationConfig();
rcloneConfig.type = 'RCLONE';
rcloneConfig.remoteName = input.rcloneConfig.remoteName;
rcloneConfig.destinationPath = input.rcloneConfig.destinationPath;
rcloneConfig.rcloneOptions = input.rcloneConfig.rcloneOptions;
return rcloneConfig;
}
default:
throw new Error(`Unsupported destination type: ${input.type}`);
}
}
async createBackupJobConfig(input: CreateBackupJobConfigInput): Promise<BackupJobConfig> {
const id = uuidv4();
const now = new Date().toISOString();
// Validate input sourceConfig and destinationConfig presence
if (!input.sourceConfig) {
this.logger.error('Source configuration (sourceConfig) is required.');
throw new Error('Source configuration (sourceConfig) is required.');
}
if (!input.destinationConfig) {
this.logger.error('Destination configuration (destinationConfig) is required.');
throw new Error('Destination configuration (destinationConfig) is required.');
}
// Extract sourceType and destinationType from the respective config objects
const sourceType = input.sourceConfig.type;
const destinationType = input.destinationConfig.type;
if (!sourceType) {
this.logger.error("Source configuration must include a valid 'type' property.");
throw new Error("Source configuration must include a valid 'type' property.");
}
if (!destinationType) {
this.logger.error("Destination configuration must include a valid 'type' property.");
throw new Error("Destination configuration must include a valid 'type' property.");
}
// Transform the source config input into the appropriate union member
const transformedSourceConfig = this.transformSourceConfigInput(input.sourceConfig);
// Transform the destination config input into the appropriate union member
const transformedDestinationConfig = this.transformDestinationConfigInput(
input.destinationConfig
);
const config: BackupJobConfig = {
id,
name: input.name,
sourceType,
destinationType,
schedule: input.schedule || '0 2 * * *',
enabled: input.enabled,
sourceConfig: transformedSourceConfig,
destinationConfig: transformedDestinationConfig,
createdAt: now,
updatedAt: now,
};
this.configs.set(id, config);
await this.saveConfigs();
if (config.enabled) {
this.scheduleJob(config);
}
return config;
}
async updateBackupJobConfig(
id: string,
input: UpdateBackupJobConfigInput
): Promise<BackupJobConfig | null> {
this.logger.debug(
`[updateBackupJobConfig] Called with ID: ${id}, Input: ${JSON.stringify(input)}`
);
const existing = this.configs.get(id);
if (!existing) {
this.logger.warn(`[updateBackupJobConfig] No existing config found for ID: ${id}`);
return null;
}
this.logger.debug(
`[updateBackupJobConfig] Existing config for ID ${id}: ${JSON.stringify(existing)}`
);
// Handle sourceConfig update
let updatedSourceConfig = existing.sourceConfig;
let updatedSourceType = existing.sourceType;
if (input.sourceConfig) {
const inputSourceType = input.sourceConfig.type;
if (!inputSourceType) {
this.logger.warn(
`[updateBackupJobConfig] Source config update for ID ${id} is missing 'type'. Update skipped for sourceConfig.`
);
} else {
// Transform the input into the appropriate union member
updatedSourceConfig = this.transformSourceConfigInput(input.sourceConfig);
updatedSourceType = inputSourceType;
this.logger.debug(`[updateBackupJobConfig] Transformed sourceConfig for ${id}.`);
}
}
// Handle destinationConfig update
let updatedDestinationConfig = existing.destinationConfig;
let updatedDestinationType = existing.destinationType;
if (input.destinationConfig) {
const inputDestinationType = input.destinationConfig.type;
if (!inputDestinationType) {
this.logger.warn(
`[updateBackupJobConfig] Destination config update for ID ${id} is missing 'type'. Update skipped for destinationConfig.`
);
} else {
// Transform the input into the appropriate union member
updatedDestinationConfig = this.transformDestinationConfigInput(input.destinationConfig);
updatedDestinationType = inputDestinationType;
this.logger.debug(`[updateBackupJobConfig] Updated destinationConfig for ${id}.`);
}
}
const updated: BackupJobConfig = {
...existing,
name: input.name ?? existing.name,
schedule: input.schedule ?? existing.schedule,
enabled: input.enabled ?? existing.enabled,
sourceType: updatedSourceType,
destinationType: updatedDestinationType,
sourceConfig: updatedSourceConfig,
destinationConfig: updatedDestinationConfig,
updatedAt: new Date().toISOString(),
lastRunAt: input.lastRunAt !== undefined ? input.lastRunAt : existing.lastRunAt,
lastRunStatus:
input.lastRunStatus !== undefined ? input.lastRunStatus : existing.lastRunStatus,
};
this.logger.debug(
`[updateBackupJobConfig] Updated object for ID ${id} (before set): ${JSON.stringify(updated)}`
);
this.configs.set(id, updated);
await this.saveConfigs();
this.logger.debug(`[updateBackupJobConfig] Configs saved for ID: ${id}`);
this.unscheduleJob(id);
if (updated.enabled) {
this.scheduleJob(updated);
}
return updated;
}
async deleteBackupJobConfig(id: string): Promise<boolean> {
const config = this.configs.get(id);
if (!config) return false;
this.unscheduleJob(id);
this.configs.delete(id);
await this.saveConfigs();
return true;
}
async getBackupJobConfig(id: string): Promise<BackupJobConfig | null> {
this.logger.debug(`[getBackupJobConfig] Called for ID: ${id}`);
const config = this.configs.get(id);
if (config) {
this.logger.debug(
`[getBackupJobConfig] Found config for ID ${id}: ${JSON.stringify(config)}`
);
} else {
this.logger.warn(`[getBackupJobConfig] No config found for ID: ${id}`);
}
return config || null;
}
async getAllBackupJobConfigs(): Promise<BackupJobConfig[]> {
return Array.from(this.configs.values());
}
private transformPlainObjectToSourceConfig(
obj: any,
sourceType: SourceType
): ZfsPreprocessConfig | FlashPreprocessConfig | ScriptPreprocessConfig | RawBackupConfig {
switch (sourceType) {
case SourceType.ZFS: {
const zfsConfig = new ZfsPreprocessConfig();
Object.assign(zfsConfig, obj);
return zfsConfig;
}
case SourceType.FLASH: {
const flashConfig = new FlashPreprocessConfig();
Object.assign(flashConfig, obj);
return flashConfig;
}
case SourceType.SCRIPT: {
const scriptConfig = new ScriptPreprocessConfig();
Object.assign(scriptConfig, obj);
return scriptConfig;
}
case SourceType.RAW: {
const rawConfig = new RawBackupConfig();
Object.assign(rawConfig, obj);
return rawConfig;
}
default:
this.logger.error(
`Unsupported source type encountered during plain object transformation: ${sourceType as string}`
);
throw new Error(`Unsupported source type: ${sourceType as string}`);
}
}
private transformPlainObjectToDestinationConfig(
obj: any,
destinationType: DestinationType
): RcloneDestinationConfig {
switch (destinationType) {
case DestinationType.RCLONE: {
const rcloneConfig = new RcloneDestinationConfig();
Object.assign(rcloneConfig, obj);
return rcloneConfig;
}
default:
throw new Error(`Unsupported destination type: ${destinationType}`);
}
}
private async executeBackupJob(config: BackupJobConfig): Promise<void> {
this.logger.log(
`Executing backup job via BackupOrchestrationService: ${config.name} (ID: ${config.id})`
);
// Prepare updates, currentJobId will be set after job starts
const updatesForInMemoryConfig: Partial<BackupJobConfig> = {
lastRunAt: new Date().toISOString(),
lastRunStatus: 'Starting...',
currentJobId: undefined, // Initialize
};
try {
// Delegate to the BackupOrchestrationService and get the jobId
// IMPORTANT: This assumes backupOrchestrationService.executeBackupJob is modified to return the jobId string
const jobId = await this.backupOrchestrationService.executeBackupJob(config, config.id);
if (jobId) {
updatesForInMemoryConfig.currentJobId = jobId;
this.logger.log(
`Backup job ${config.name} (ID: ${config.id}) initiated by BackupOrchestrationService with Job ID: ${jobId}.`
);
} else {
this.logger.warn(
`BackupOrchestrationService.executeBackupJob did not return a jobId for config ${config.id}. currentJobId will not be set.`
);
}
// Update the in-memory config with all changes including currentJobId
const currentConfig = this.configs.get(config.id);
if (currentConfig) {
this.configs.set(config.id, {
...currentConfig,
...updatesForInMemoryConfig,
});
} else {
this.logger.warn(
`Config ${config.id} not found in memory map after starting job. State may be inconsistent.`
);
// Fallback: attempt to set it anyway, though this indicates a potential issue
this.configs.set(config.id, {
...config, // Use the passed config as a base
...updatesForInMemoryConfig,
});
}
// Persist only non-transient parts to backup-jobs.json
// Create a separate object for saving that omits currentJobId
const configToPersist = {
...(this.configs.get(config.id) || config), // Get the most up-to-date version from memory
};
delete configToPersist.currentJobId; // Ensure currentJobId is not persisted
configToPersist.lastRunAt = updatesForInMemoryConfig.lastRunAt;
configToPersist.lastRunStatus = updatesForInMemoryConfig.lastRunStatus;
// Update the map with the version to be persisted, then save
// This is tricky because we want currentJobId in memory but not on disk.
// A better approach might be to manage currentJobId in a separate map or handle it during serialization.
// For now, we'll update the main config, then save a version without currentJobId.
// This means this.configs.get(config.id) will have currentJobId.
// Create a shallow copy for saving, minus currentJobId.
const { currentJobId: _, ...persistentConfigData } = this.configs.get(config.id)!;
// Create a new map for saving or filter this.configs map during saveConfigs()
// To avoid mutating this.configs directly for persistence:
const tempConfigsForSave = new Map(this.configs);
tempConfigsForSave.set(config.id, persistentConfigData as BackupJobConfig);
// Modify saveConfigs to accept a map or make it aware of not saving currentJobId.
// For simplicity now, we'll assume saveConfigs handles this or we handle it before calling.
// The current saveConfigs just iterates this.configs.values().
// Let's ensure the main in-memory config (this.configs) has currentJobId.
// And when saving, saveConfigs needs to be aware or we provide a filtered list.
// Simplification: Save current status but not currentJobId.
// We will modify saveConfigs later if needed. For now, this means currentJobId is purely in-memory.
// The state in `this.configs` *will* have `currentJobId`.
// `saveConfigs` will write it to disk if not handled.
// Let's assume for now this is acceptable and address saveConfigs separately if `currentJobId` appears in JSON.
// The current saveConfigs WILL persist currentJobId.
//
// Correct approach: Update in-memory, then save a version *without* currentJobId.
// This requires `saveConfigs` to be smarter or to pass it a temporary, filtered list.
// The `this.configs.set(config.id, persistentConfig)` line from my thought process was problematic.
// The in-memory `this.configs.get(config.id)` now correctly has the `currentJobId`.
// When `saveConfigs()` is called, it will iterate `this.configs.values()`.
// We need to ensure `currentJobId` is stripped before writing to JSON.
// This should be done in `saveConfigs` or by passing a "cleaned" list to `writeFile`.
// For now, let `saveConfigs` persist it and we can clean it up in a follow-up if it's an issue.
// The immediate goal is for the GraphQL resolver to see currentJobId.
// Save the config with lastRunAt and lastRunStatus (currentJobId will also be saved by current saveConfigs)
await this.saveConfigs();
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
this.logger.error(
`Backup job ${config.name} (ID: ${config.id}) failed during orchestration: ${errorMessage}`,
(error as Error).stack
);
const currentConfig = this.configs.get(config.id);
const failedConfigUpdate = {
lastRunAt: new Date().toISOString(),
lastRunStatus: `Failed: ${errorMessage}`,
currentJobId: undefined, // Clear currentJobId on failure
};
if (currentConfig) {
this.configs.set(config.id, {
...currentConfig,
...failedConfigUpdate,
});
} else {
// If not in map, use passed config as base
this.configs.set(config.id, {
...config,
...failedConfigUpdate,
});
}
await this.saveConfigs(); // Save updated status, currentJobId will be cleared
throw error;
}
}
// Add a new method to be called when a job completes or is stopped
public async handleJobCompletion(
configId: string,
finalStatus: string,
jobId?: string
): Promise<void> {
const config = this.configs.get(configId);
if (config) {
this.logger.log(
`Handling job completion for config ${configId}, job ${jobId}. Final status: ${finalStatus}`
);
const updates: Partial<BackupJobConfig> = {
lastRunStatus: finalStatus,
lastRunAt: new Date().toISOString(), // Update lastRunAt to completion time
};
// Only clear currentJobId if it matches the completed/stopped job
if (config.currentJobId === jobId) {
updates.currentJobId = undefined;
} else if (jobId && config.currentJobId) {
this.logger.warn(
`Completed job ID ${jobId} does not match currentJobId ${config.currentJobId} for config ${configId}. currentJobId not cleared.`
);
}
this.configs.set(configId, {
...config,
...updates,
});
// currentJobId will be cleared or remain as is in memory.
// saveConfigs will persist this state.
await this.saveConfigs();
} else {
this.logger.warn(`Config ${configId} not found when trying to handle job completion.`);
}
}
private scheduleJob(config: BackupJobConfig): void {
try {
const job = new CronJob(
config.schedule,
() => this.executeBackupJob(config),
null,
false,
'UTC'
);
this.schedulerRegistry.addCronJob(getBackupJobGroupId(config.id), job);
job.start();
this.logger.log(`Scheduled backup job: ${config.name} with schedule: ${config.schedule}`);
} catch (error) {
this.logger.error(`Failed to schedule backup job ${config.name}:`, error);
}
}
private unscheduleJob(id: string): void {
try {
const jobName = getBackupJobGroupId(id);
if (this.schedulerRegistry.doesExist('cron', jobName)) {
this.schedulerRegistry.deleteCronJob(jobName);
this.logger.log(`Unscheduled backup job: ${id}`);
} else {
this.logger.debug(`No existing cron job found to unschedule for backup job: ${id}`);
}
} catch (error) {
this.logger.error(`Failed to unschedule backup job ${id}:`, error);
}
}
private async loadConfigs(): Promise<void> {
try {
if (existsSync(this.configPath)) {
const data = await readFile(this.configPath, 'utf-8');
const configs: BackupJobConfig[] = JSON.parse(data);
// First, unschedule any existing jobs before clearing the config map
this.configs.forEach((config) => {
if (config.enabled) {
this.unscheduleJob(config.id);
}
});
this.configs.clear();
configs.forEach((config) => {
// Transform plain objects back into class instances
const transformedConfig = {
...config,
sourceConfig: this.transformPlainObjectToSourceConfig(
config.sourceConfig,
config.sourceType
),
destinationConfig: this.transformPlainObjectToDestinationConfig(
config.destinationConfig,
config.destinationType
),
};
this.configs.set(config.id, transformedConfig);
if (transformedConfig.enabled) {
this.scheduleJob(transformedConfig);
}
});
this.logger.log(`Loaded ${configs.length} backup job configurations`);
}
} catch (error) {
this.logger.error('Failed to load backup configurations:', error);
}
}
private async saveConfigs(): Promise<void> {
try {
// Create a deep copy of configs for saving, stripping currentJobId
const configsToSave: BackupJobConfig[] = [];
for (const config of this.configs.values()) {
const { currentJobId, ...restOfConfig } = config; // Destructure to remove currentJobId
configsToSave.push(restOfConfig as BackupJobConfig); // Cast needed if TS complains
}
await writeFile(this.configPath, JSON.stringify(configsToSave, null, 2));
} catch (error) {
this.logger.error('Failed to save backup configurations:', error);
}
}
}

View File

@@ -0,0 +1,313 @@
import { Logger } from '@nestjs/common';
import { Args, ResolveField, Resolver } from '@nestjs/graphql';
import { Resource } from '@unraid/shared/graphql.model';
import { PrefixedID } from '@unraid/shared/prefixed-id-scalar';
import {
AuthActionVerb,
AuthPossession,
UsePermissions,
} from '@unraid/shared/use-permissions.directive.js';
import { BackupConfigService } from '@app/unraid-api/graph/resolvers/backup/backup-config.service.js';
import {
BackupJobConfig,
BackupStatus,
CreateBackupJobConfigInput,
InitiateBackupInput,
UpdateBackupJobConfigInput,
} from '@app/unraid-api/graph/resolvers/backup/backup.model.js';
import { BackupOrchestrationService } from '@app/unraid-api/graph/resolvers/backup/orchestration/backup-orchestration.service.js';
import { BackupMutations } from '@app/unraid-api/graph/resolvers/mutation/mutation.model.js';
import { RCloneService } from '@app/unraid-api/graph/resolvers/rclone/rclone.service.js';
@Resolver(() => BackupMutations)
export class BackupMutationsResolver {
private readonly logger = new Logger(BackupMutationsResolver.name);
constructor(
private readonly backupConfigService: BackupConfigService,
private readonly rcloneService: RCloneService,
private readonly backupOrchestrationService: BackupOrchestrationService
) {}
private async executeBackup(
sourcePath: string,
remoteName: string,
destinationPath: string,
options: Record<string, any> = {},
configId?: string
): Promise<BackupStatus> {
try {
this.logger.log(`Executing backup: ${sourcePath} -> ${remoteName}:${destinationPath}`);
// Create a temporary config for the orchestration service
const tempConfig: BackupJobConfig = {
id: configId || `temp-${Date.now()}`,
name: `Manual backup to ${remoteName}`,
sourceType: 'raw' as any,
destinationType: 'rclone' as any,
schedule: '',
enabled: true,
sourceConfig: {
type: 'raw',
sourcePath: sourcePath,
} as any,
destinationConfig: {
type: 'rclone',
remoteName: remoteName,
destinationPath: destinationPath,
options: options,
} as any,
createdAt: new Date().toISOString(),
updatedAt: new Date().toISOString(),
};
const jobId = tempConfig.id;
// Use the orchestration service for execution
await this.backupOrchestrationService.executeBackupJob(tempConfig, jobId);
this.logger.log(`Backup job initiated successfully with ID: ${jobId}`);
return {
status: 'Backup initiated successfully',
jobId: jobId,
};
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
this.logger.error(
`Failed to execute backup: ${errorMessage}`,
error instanceof Error ? error.stack : undefined
);
return {
status: `Failed to initiate backup: ${errorMessage}`,
jobId: undefined,
};
}
}
@ResolveField(() => BackupJobConfig, {
description: 'Create a new backup job configuration',
})
@UsePermissions({
action: AuthActionVerb.CREATE,
resource: Resource.BACKUP,
possession: AuthPossession.ANY,
})
async createBackupJobConfig(
@Args('input') input: CreateBackupJobConfigInput
): Promise<BackupJobConfig> {
return this.backupConfigService.createBackupJobConfig(input);
}
@ResolveField(() => BackupJobConfig, {
description: 'Update a backup job configuration',
nullable: true,
})
@UsePermissions({
action: AuthActionVerb.UPDATE,
resource: Resource.BACKUP,
possession: AuthPossession.ANY,
})
async updateBackupJobConfig(
@Args('id', { type: () => PrefixedID }) id: string,
@Args('input') input: UpdateBackupJobConfigInput
): Promise<BackupJobConfig | null> {
return this.backupConfigService.updateBackupJobConfig(id, input);
}
@ResolveField(() => Boolean, {
description: 'Delete a backup job configuration',
})
@UsePermissions({
action: AuthActionVerb.DELETE,
resource: Resource.BACKUP,
possession: AuthPossession.ANY,
})
async deleteBackupJobConfig(@Args('id', { type: () => PrefixedID }) id: string): Promise<boolean> {
return this.backupConfigService.deleteBackupJobConfig(id);
}
@ResolveField(() => BackupStatus, {
description: 'Initiates a backup using a configured remote.',
})
@UsePermissions({
action: AuthActionVerb.CREATE,
resource: Resource.BACKUP,
possession: AuthPossession.ANY,
})
async initiateBackup(@Args('input') input: InitiateBackupInput): Promise<BackupStatus> {
return this.executeBackup(
input.sourcePath,
input.remoteName,
input.destinationPath,
input.options || {}
);
}
@ResolveField(() => BackupJobConfig, {
description: 'Toggle a backup job configuration enabled/disabled',
nullable: true,
})
@UsePermissions({
action: AuthActionVerb.UPDATE,
resource: Resource.BACKUP,
possession: AuthPossession.ANY,
})
async toggleJobConfig(
@Args('id', { type: () => PrefixedID }) id: string
): Promise<BackupJobConfig | null> {
const existing = await this.backupConfigService.getBackupJobConfig(id);
if (!existing) return null;
return this.backupConfigService.updateBackupJobConfig(id, {
enabled: !existing.enabled,
});
}
@ResolveField(() => BackupStatus, {
description: 'Manually trigger a backup job using existing configuration',
})
@UsePermissions({
action: AuthActionVerb.CREATE,
resource: Resource.BACKUP,
possession: AuthPossession.ANY,
})
async triggerJob(@Args('id', { type: () => PrefixedID }) id: string): Promise<BackupStatus> {
const config = await this.backupConfigService.getBackupJobConfig(id);
if (!config) {
return {
status: 'Failed to trigger backup: Configuration not found',
jobId: undefined,
};
}
try {
// Use the orchestration service to execute the backup job
await this.backupOrchestrationService.executeBackupJob(config, config.id);
// Update the config with job start information
await this.backupConfigService.updateBackupJobConfig(id, {
lastRunStatus: `Started with job ID: ${config.id}`,
lastRunAt: new Date().toISOString(),
});
return {
status: 'Backup job triggered successfully',
jobId: config.id,
};
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
this.logger.error(`Failed to trigger backup job ${id}: ${errorMessage}`);
await this.backupConfigService.updateBackupJobConfig(id, {
lastRunStatus: `Failed: ${errorMessage}`,
lastRunAt: new Date().toISOString(),
});
return {
status: `Failed to trigger backup: ${errorMessage}`,
jobId: undefined,
};
}
}
@ResolveField(() => BackupStatus, {
description: 'Stop all running backup jobs',
})
@UsePermissions({
action: AuthActionVerb.DELETE,
resource: Resource.BACKUP,
possession: AuthPossession.ANY,
})
async stopAllBackupJobs(): Promise<BackupStatus> {
try {
const result = await this.rcloneService['rcloneApiService'].stopAllJobs();
const stoppedCount = result.stopped.length;
const errorCount = result.errors.length;
if (stoppedCount > 0) {
this.logger.log(`Stopped ${stoppedCount} backup jobs`);
}
if (errorCount > 0) {
this.logger.warn(`Failed operations on ${errorCount} jobs: ${result.errors.join(', ')}`);
}
return {
status: `Stopped ${stoppedCount} jobs${errorCount > 0 ? `, ${errorCount} errors` : ''}`,
jobId: undefined,
};
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
this.logger.error(`Failed to stop backup jobs: ${errorMessage}`);
return {
status: `Failed to stop backup jobs: ${errorMessage}`,
jobId: undefined,
};
}
}
@ResolveField(() => BackupStatus, {
description: 'Stop a specific backup job',
})
@UsePermissions({
action: AuthActionVerb.DELETE,
resource: Resource.BACKUP,
possession: AuthPossession.ANY,
})
async stopBackupJob(@Args('id', { type: () => PrefixedID }) id: string): Promise<BackupStatus> {
try {
const result = await this.rcloneService['rcloneApiService'].stopJob(id);
const stoppedCount = result.stopped.length;
const errorCount = result.errors.length;
if (stoppedCount > 0) {
this.logger.log(`Stopped backup job: ${id}`);
}
if (errorCount > 0) {
this.logger.warn(`Failed to stop job ${id}: ${result.errors.join(', ')}`);
}
return {
status: stoppedCount > 0 ? `Stopped job ${id}` : `Failed to stop job ${id}`,
jobId: stoppedCount > 0 ? id : undefined,
};
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
this.logger.error(`Failed to stop backup job ${id}: ${errorMessage}`);
return {
status: `Failed to stop backup job: ${errorMessage}`,
jobId: undefined,
};
}
}
@ResolveField(() => BackupStatus, {
description: 'Forget all finished backup jobs to clean up the job list',
})
@UsePermissions({
action: AuthActionVerb.DELETE,
resource: Resource.BACKUP,
possession: AuthPossession.ANY,
})
async forgetFinishedBackupJobs(): Promise<BackupStatus> {
try {
this.logger.log('Forgetting finished backup jobs is handled automatically by RClone');
return {
status: 'Finished jobs are automatically cleaned up by RClone',
jobId: undefined,
};
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
this.logger.error(`Failed to forget finished backup jobs: ${errorMessage}`);
return {
status: `Failed to forget finished backup jobs: ${errorMessage}`,
jobId: undefined,
};
}
}
}

View File

@@ -0,0 +1,226 @@
import { Field, InputType, ObjectType } from '@nestjs/graphql';
import { type Layout } from '@jsonforms/core';
import { Node } from '@unraid/shared/graphql.model.js';
import { PrefixedID } from '@unraid/shared/prefixed-id-scalar.js';
import {
IsBoolean,
IsNotEmpty,
IsObject,
IsOptional,
IsString,
Matches,
ValidateIf,
ValidateNested,
} from 'class-validator';
import { DateTimeISOResolver, GraphQLJSON } from 'graphql-scalars';
import {
DestinationConfigInput,
DestinationConfigInputUnion,
DestinationConfigUnion,
DestinationType,
} from '@app/unraid-api/graph/resolvers/backup/destination/backup-destination.types.js';
import { JobStatus } from '@app/unraid-api/graph/resolvers/backup/orchestration/backup-job-status.model.js';
import {
SourceConfigInput,
SourceConfigInputUnion,
SourceConfigUnion,
SourceType,
} from '@app/unraid-api/graph/resolvers/backup/source/backup-source.types.js';
import { DataSlice } from '@app/unraid-api/types/json-forms.js';
@ObjectType({
implements: () => Node,
})
export class Backup extends Node {
@Field(() => [JobStatus])
jobs!: JobStatus[];
@Field(() => [BackupJobConfig])
configs!: BackupJobConfig[];
}
@InputType()
export class InitiateBackupInput {
@Field(() => String, { description: 'The name of the remote configuration to use for the backup.' })
@IsString()
@IsNotEmpty()
remoteName!: string;
@Field(() => String, { description: 'Source path to backup.' })
@IsString()
@IsNotEmpty()
sourcePath!: string;
@Field(() => String, { description: 'Destination path on the remote.' })
@IsString()
@IsNotEmpty()
destinationPath!: string;
@Field(() => GraphQLJSON, {
description: 'Additional options for the backup operation, such as --dry-run or --transfers.',
nullable: true,
})
@IsOptional()
@IsObject()
options?: Record<string, unknown>;
}
@ObjectType()
export class BackupStatus {
@Field(() => String, {
description: 'Status message indicating the outcome of the backup initiation.',
})
status!: string;
@Field(() => String, {
description: 'Job ID if available, can be used to check job status.',
nullable: true,
})
jobId?: string;
}
@ObjectType()
export class RCloneWebGuiInfo {
@Field()
url!: string;
}
@ObjectType({
implements: () => Node,
})
export class BackupJobConfig extends Node {
@Field(() => String, { description: 'Human-readable name for this backup job' })
name!: string;
@Field(() => SourceType, { description: 'Type of the backup source' })
sourceType!: SourceType;
@Field(() => DestinationType, { description: 'Type of the backup destination' })
destinationType!: DestinationType;
@Field(() => String, {
description: 'Cron schedule expression (e.g., "0 2 * * *" for daily at 2AM)',
})
schedule!: string;
@Field(() => Boolean, { description: 'Whether this backup job is enabled' })
enabled!: boolean;
@Field(() => SourceConfigUnion, { description: 'Source configuration for this backup job' })
sourceConfig!: typeof SourceConfigUnion;
@Field(() => DestinationConfigUnion, {
description: 'Destination configuration for this backup job',
})
destinationConfig!: typeof DestinationConfigUnion;
@Field(() => DateTimeISOResolver, { description: 'When this config was created' })
createdAt!: string;
@Field(() => DateTimeISOResolver, { description: 'When this config was last updated' })
updatedAt!: string;
@Field(() => DateTimeISOResolver, { description: 'Last time this job ran', nullable: true })
lastRunAt?: string;
@Field(() => String, { description: 'Status of last run', nullable: true })
lastRunStatus?: string;
@Field(() => String, { description: 'Current running job ID for this config', nullable: true })
currentJobId?: string;
}
@InputType()
export class BaseBackupJobConfigInput {
@Field(() => String, { nullable: true })
@IsOptional()
@IsString()
@IsNotEmpty()
name?: string;
@Field(() => String, { nullable: true })
@IsOptional()
@IsString()
@ValidateIf((o) => o.schedule && o.schedule.length > 0)
@Matches(
/^(\*|[0-5]?\d)(\s+(\*|[0-1]?\d|2[0-3]))(\s+(\*|[1-2]?\d|3[0-1]))(\s+(\*|[1-9]|1[0-2]))(\s+(\*|[0-6]))$/,
{
message: 'schedule must be a valid cron expression',
}
)
schedule?: string;
@Field(() => Boolean, { nullable: true })
@IsOptional()
@IsBoolean()
enabled?: boolean;
@Field(() => SourceConfigInputUnion, {
description: 'Source configuration for this backup job',
nullable: true,
})
@IsOptional()
@ValidateNested()
sourceConfig?: SourceConfigInput;
@Field(() => DestinationConfigInputUnion, {
description: 'Destination configuration for this backup job',
nullable: true,
})
@IsOptional()
@ValidateNested()
destinationConfig?: DestinationConfigInput;
}
@InputType()
export class CreateBackupJobConfigInput extends BaseBackupJobConfigInput {
@Field(() => String)
@IsString()
@IsNotEmpty()
declare name: string;
@Field(() => Boolean, { defaultValue: true })
@IsBoolean()
@ValidateIf((o) => o.schedule && o.schedule.length > 0)
declare enabled: boolean;
}
@InputType()
export class UpdateBackupJobConfigInput extends BaseBackupJobConfigInput {
@Field(() => String, { nullable: true })
@IsOptional()
@IsString()
lastRunStatus?: string;
@Field(() => String, { nullable: true })
@IsOptional()
@IsString()
lastRunAt?: string;
@Field(() => String, { nullable: true })
@IsOptional()
@IsString()
currentJobId?: string;
}
@ObjectType()
export class BackupJobConfigForm {
@Field(() => PrefixedID)
id!: string;
@Field(() => GraphQLJSON)
dataSchema!: { properties: DataSlice; type: 'object' };
@Field(() => GraphQLJSON)
uiSchema!: Layout;
}
@InputType()
export class BackupJobConfigFormInput {
@Field(() => Boolean, { defaultValue: false })
@IsOptional()
@IsBoolean()
showAdvanced?: boolean;
}

View File

@@ -0,0 +1,30 @@
import { forwardRef, Module } from '@nestjs/common';
import { ScheduleModule } from '@nestjs/schedule';
import { BackupConfigService } from '@app/unraid-api/graph/resolvers/backup/backup-config.service.js';
import { BackupMutationsResolver } from '@app/unraid-api/graph/resolvers/backup/backup-mutations.resolver.js';
import {
BackupJobConfigResolver,
BackupResolver,
} from '@app/unraid-api/graph/resolvers/backup/backup.resolver.js';
import { BackupDestinationModule } from '@app/unraid-api/graph/resolvers/backup/destination/backup-destination.module.js';
import { BackupJobStatusResolver } from '@app/unraid-api/graph/resolvers/backup/orchestration/backup-job-status.resolver.js';
import { BackupJobTrackingService } from '@app/unraid-api/graph/resolvers/backup/orchestration/backup-job-tracking.service.js';
import { BackupOrchestrationService } from '@app/unraid-api/graph/resolvers/backup/orchestration/backup-orchestration.service.js';
import { BackupSourceModule } from '@app/unraid-api/graph/resolvers/backup/source/backup-source.module.js';
import { RCloneModule } from '@app/unraid-api/graph/resolvers/rclone/rclone.module.js';
@Module({
imports: [RCloneModule, ScheduleModule.forRoot(), BackupSourceModule, BackupDestinationModule],
providers: [
BackupResolver,
BackupJobConfigResolver,
BackupMutationsResolver,
BackupConfigService,
BackupOrchestrationService,
BackupJobTrackingService,
BackupJobStatusResolver,
],
exports: [forwardRef(() => BackupOrchestrationService), BackupJobTrackingService],
})
export class BackupModule {}

View File

@@ -0,0 +1,131 @@
import { Logger } from '@nestjs/common';
import { Args, Parent, Query, ResolveField, Resolver } from '@nestjs/graphql';
import { PrefixedID } from '@unraid/shared/prefixed-id-scalar.js';
import { BackupConfigService } from '@app/unraid-api/graph/resolvers/backup/backup-config.service.js';
import {
Backup,
BackupJobConfig,
BackupJobConfigForm,
BackupJobConfigFormInput,
BackupStatus,
} from '@app/unraid-api/graph/resolvers/backup/backup.model.js';
import { buildBackupJobConfigSchema } from '@app/unraid-api/graph/resolvers/backup/jsonforms/backup-jsonforms-config.js';
import { JobStatus } from '@app/unraid-api/graph/resolvers/backup/orchestration/backup-job-status.model.js';
import { BackupJobTrackingService } from '@app/unraid-api/graph/resolvers/backup/orchestration/backup-job-tracking.service.js';
import { RCloneService } from '@app/unraid-api/graph/resolvers/rclone/rclone.service.js';
import { FormatService } from '@app/unraid-api/utils/format.service.js';
@Resolver(() => Backup)
export class BackupResolver {
private readonly logger = new Logger(BackupResolver.name);
constructor(
private readonly rcloneService: RCloneService,
private readonly backupConfigService: BackupConfigService,
private readonly formatService: FormatService,
private readonly backupJobTrackingService: BackupJobTrackingService
) {}
@Query(() => Backup, {
description: 'Get backup service information',
})
async backup(): Promise<Backup> {
return {
id: 'backup',
jobs: [],
configs: [],
};
}
@ResolveField(() => [JobStatus], {
description: 'Get all running backup jobs',
})
async jobs(): Promise<JobStatus[]> {
return this.backupJobTrackingService.getAllJobStatuses();
}
@ResolveField(() => [BackupJobConfig], {
description: 'Get all backup job configurations',
})
async configs(): Promise<BackupJobConfig[]> {
return this.backupConfigService.getAllBackupJobConfigs();
}
@Query(() => BackupJobConfig, {
description: 'Get a specific backup job configuration',
nullable: true,
})
async backupJobConfig(
@Args('id', { type: () => PrefixedID }) id: string
): Promise<BackupJobConfig | null> {
return this.backupConfigService.getBackupJobConfig(id);
}
@Query(() => JobStatus, {
description: 'Get status of a specific backup job',
nullable: true,
})
async backupJob(@Args('id', { type: () => PrefixedID }) id: string): Promise<JobStatus | null> {
return this.backupJobTrackingService.getJobStatus(id) || null;
}
@ResolveField(() => BackupStatus, {
description: 'Get the status for the backup service',
})
async status(): Promise<BackupStatus> {
return {
status: 'Available',
jobId: undefined,
};
}
@Query(() => BackupJobConfigForm, {
description: 'Get the JSON schema for backup job configuration form',
})
async backupJobConfigForm(
@Args('input', { nullable: true }) input?: BackupJobConfigFormInput
): Promise<BackupJobConfigForm> {
const remotes = await this.rcloneService.getRemoteDetails();
const { dataSchema, uiSchema } = buildBackupJobConfigSchema({
remotes,
});
return {
id: 'backup-job-config-form',
dataSchema,
uiSchema,
};
}
}
@Resolver(() => BackupJobConfig)
export class BackupJobConfigResolver {
private readonly logger = new Logger(BackupJobConfigResolver.name);
constructor(private readonly backupJobTrackingService: BackupJobTrackingService) {}
@ResolveField(() => JobStatus, {
description: 'Get the current running job for this backup config',
nullable: true,
})
async currentJob(@Parent() config: BackupJobConfig): Promise<JobStatus | null> {
if (!config.currentJobId) {
return null;
}
this.logger.debug(
`Looking for current job for config ${config.id} using currentJobId: ${config.currentJobId}`
);
const jobStatus = this.backupJobTrackingService.getJobStatus(config.currentJobId);
if (!jobStatus) {
this.logger.debug(`No job status found for job ID: ${config.currentJobId}`);
return null;
}
return jobStatus as JobStatus;
}
}

View File

@@ -0,0 +1,32 @@
export const BACKUP_JOB_GROUP_PREFIX = 'backup-';
/**
* Generates the group ID for a backup job based on its configuration ID.
* This group ID is used by RClone to group related backup operations.
* @param configId The ID of the backup job configuration.
* @returns The RClone group ID string.
*/
export function getBackupJobGroupId(configId: string): string {
return `${BACKUP_JOB_GROUP_PREFIX}${configId}`;
}
/**
* Extracts the configuration ID from a backup job group ID.
* @param groupId The RClone group ID string (e.g., "backup-someConfigId").
* @returns The configuration ID if the group ID is valid and prefixed, otherwise undefined.
*/
export function getConfigIdFromGroupId(groupId: string): string | undefined {
if (groupId.startsWith(BACKUP_JOB_GROUP_PREFIX)) {
return groupId.substring(BACKUP_JOB_GROUP_PREFIX.length);
}
return undefined;
}
/**
* Checks if the given ID corresponds to a backup job group.
* @param id The ID string to check (can be a job ID or a group ID).
* @returns True if the ID represents a backup job group, false otherwise.
*/
export function isBackupJobGroup(id: string): boolean {
return id.startsWith(BACKUP_JOB_GROUP_PREFIX);
}

View File

@@ -0,0 +1,180 @@
import type { LabelElement, SchemaBasedCondition } from '@jsonforms/core';
import { JsonSchema7, RuleEffect } from '@jsonforms/core';
import type { RCloneRemote } from '@app/unraid-api/graph/resolvers/rclone/rclone.model.js';
import type { SettingSlice, UIElement } from '@app/unraid-api/types/json-forms.js';
import { DestinationType } from '@app/unraid-api/graph/resolvers/backup/destination/backup-destination.types.js';
import { createLabeledControl } from '@app/unraid-api/graph/utils/form-utils.js';
export function getDestinationConfigSlice({ remotes = [] }: { remotes?: RCloneRemote[] }): SettingSlice {
const destinationConfigElements: UIElement[] = [
{
type: 'Control',
scope: '#/properties/destinationConfig/properties/type',
options: {
format: 'radio',
radioLayout: 'horizontal',
options: [
{
label: 'RClone Remote',
value: DestinationType.RCLONE,
description: 'Backup to cloud storage via RClone',
},
],
},
},
// RClone Configuration
{
type: 'VerticalLayout',
rule: {
effect: RuleEffect.SHOW,
condition: {
scope: '#/properties/destinationConfig/properties/type',
schema: { const: DestinationType.RCLONE },
} as SchemaBasedCondition,
},
elements: [
{
type: 'Label',
text: 'RClone Configuration',
options: {
description: 'Configure RClone remote destination settings.',
},
} as LabelElement,
createLabeledControl({
scope: '#/properties/destinationConfig/properties/rcloneConfig/properties/remoteName',
label: 'Remote Configuration',
description: 'Select the RClone remote configuration to use for this backup',
controlOptions: {
suggestions: remotes.map((remote) => ({
value: remote.name,
label: `${remote.name} (${remote.type})`,
})),
},
}),
createLabeledControl({
scope: '#/properties/destinationConfig/properties/rcloneConfig/properties/destinationPath',
label: 'Destination Path',
description:
'The path on the remote where files will be stored (e.g., backups/documents)',
controlOptions: {
placeholder: 'backups/',
format: 'string',
},
}),
createLabeledControl({
scope: '#/properties/destinationConfig/properties/rcloneConfig/properties/rcloneOptions/properties/transfers',
label: 'Number of Transfers',
description: 'Number of file transfers to run in parallel (default: 4)',
controlOptions: {
placeholder: '4',
format: 'number',
},
}),
createLabeledControl({
scope: '#/properties/destinationConfig/properties/rcloneConfig/properties/rcloneOptions/properties/checkers',
label: 'Number of Checkers',
description: 'Number of checkers to run in parallel (default: 8)',
controlOptions: {
placeholder: '8',
format: 'number',
},
}),
],
},
];
const destinationConfigProperties: Record<string, JsonSchema7> = {
destinationConfig: {
type: 'object',
title: 'Destination Configuration',
description: 'Configuration for backup destination',
properties: {
type: {
type: 'string',
title: 'Destination Type',
description: 'Type of destination to use for backup',
enum: [DestinationType.RCLONE],
default: DestinationType.RCLONE,
},
rcloneConfig: {
type: 'object',
title: 'RClone Configuration',
properties: {
remoteName: {
type: 'string',
title: 'Remote Name',
description: 'Remote name from rclone config',
enum:
remotes.length > 0
? remotes.map((remote) => remote.name)
: ['No remotes configured'],
},
destinationPath: {
type: 'string',
title: 'Destination Path',
description: 'Destination path on the remote',
minLength: 1,
},
rcloneOptions: {
type: 'object',
title: 'RClone Options',
description: 'Advanced RClone configuration options',
properties: {
transfers: {
type: 'integer',
title: 'Transfers',
description: 'Number of file transfers to run in parallel',
minimum: 1,
maximum: 100,
default: 4,
},
checkers: {
type: 'integer',
title: 'Checkers',
description: 'Number of checkers to run in parallel',
minimum: 1,
maximum: 100,
default: 8,
},
},
},
},
required: ['remoteName', 'destinationPath'],
},
},
required: ['type'],
},
};
// Apply conditional logic for destinationConfig
if (
destinationConfigProperties.destinationConfig &&
typeof destinationConfigProperties.destinationConfig === 'object'
) {
destinationConfigProperties.destinationConfig.allOf = [
{
if: { properties: { type: { const: DestinationType.RCLONE } }, required: ['type'] },
then: {
required: ['rcloneConfig'],
},
},
];
}
const verticalLayoutElement: UIElement = {
type: 'VerticalLayout',
elements: destinationConfigElements,
options: { step: 2 },
};
return {
properties: destinationConfigProperties,
elements: [verticalLayoutElement],
};
}

View File

@@ -0,0 +1,59 @@
import { Writable } from 'stream';
import { DestinationType } from '@app/unraid-api/graph/resolvers/backup/destination/backup-destination.types.js';
export interface BackupDestinationConfig {
timeout: number;
cleanupOnFailure: boolean;
useStreaming?: boolean;
supportsStreaming?: boolean;
}
export interface BackupDestinationResult {
success: boolean;
destinationPath?: string;
uploadedBytes?: number;
error?: string;
cleanupRequired?: boolean;
metadata?: Record<string, unknown>;
}
export interface StreamingDestinationHandle {
stream: Writable;
completionPromise: Promise<BackupDestinationResult>;
}
export interface BackupDestinationProcessorOptions {
jobId?: string;
onProgress?: (progress: number) => void;
onOutput?: (data: string) => void;
onError?: (error: string) => void;
}
export abstract class BackupDestinationProcessor<TConfig extends BackupDestinationConfig> {
abstract readonly destinationType: DestinationType;
abstract execute(
sourcePath: string,
config: TConfig,
options?: BackupDestinationProcessorOptions
): Promise<BackupDestinationResult>;
abstract validate(config: TConfig): Promise<{ valid: boolean; error?: string; warnings?: string[] }>;
abstract cleanup(result: BackupDestinationResult): Promise<void>;
// Getter to check if processor supports streaming
abstract get supportsStreaming(): boolean;
// Optional getter to get a writable stream for streaming backups
get getWritableStream():
| ((
config: TConfig,
jobId: string,
options?: BackupDestinationProcessorOptions
) => Promise<StreamingDestinationHandle>)
| undefined {
return undefined;
}
}

View File

@@ -0,0 +1,13 @@
import { forwardRef, Module } from '@nestjs/common';
import { BackupDestinationService } from '@app/unraid-api/graph/resolvers/backup/destination/backup-destination.service.js';
import { RCloneDestinationProcessor } from '@app/unraid-api/graph/resolvers/backup/destination/rclone/rclone-destination-processor.service.js';
import { RCloneApiService } from '@app/unraid-api/graph/resolvers/rclone/rclone-api.service.js';
import { RCloneModule } from '@app/unraid-api/graph/resolvers/rclone/rclone.module.js';
@Module({
imports: [forwardRef(() => RCloneModule)],
providers: [RCloneApiService, BackupDestinationService, RCloneDestinationProcessor],
exports: [BackupDestinationService, RCloneDestinationProcessor],
})
export class BackupDestinationModule {}

View File

@@ -0,0 +1,85 @@
import { BadRequestException, Injectable, Logger } from '@nestjs/common';
import { EventEmitter } from 'events';
import { v4 as uuidv4 } from 'uuid';
import {
BackupDestinationConfig,
BackupDestinationProcessor,
BackupDestinationProcessorOptions,
BackupDestinationResult,
} from '@app/unraid-api/graph/resolvers/backup/destination/backup-destination-processor.interface.js';
import { DestinationType } from '@app/unraid-api/graph/resolvers/backup/destination/backup-destination.types.js';
import { RCloneDestinationProcessor } from '@app/unraid-api/graph/resolvers/backup/destination/rclone/rclone-destination-processor.service.js';
export interface BackupDestinationOptions {
jobId?: string;
onProgress?: (progress: number) => void;
onOutput?: (data: string) => void;
onError?: (error: string) => void;
}
@Injectable()
export class BackupDestinationService extends EventEmitter {
private readonly logger = new Logger(BackupDestinationService.name);
constructor(private readonly rcloneDestinationProcessor: RCloneDestinationProcessor) {
super();
}
async processDestination<T extends BackupDestinationConfig & { type: DestinationType }>(
sourcePath: string,
config: T,
options?: BackupDestinationOptions
): Promise<BackupDestinationResult> {
const processor = this.getProcessor(config.type);
if (!processor) {
throw new BadRequestException(`Unsupported destination type: ${config.type}`);
}
const processorOptions: BackupDestinationProcessorOptions = {
jobId: options?.jobId || uuidv4(),
onProgress: options?.onProgress,
onOutput: options?.onOutput,
onError: options?.onError,
};
try {
const result = await processor.execute(sourcePath, config, processorOptions);
this.logger.log(`Destination processing completed for type: ${config.type}`);
return result;
} catch (error) {
this.logger.error(`Destination processing failed for type: ${config.type}`, error);
throw error;
}
}
async cancelDestinationJob(jobId: string): Promise<boolean> {
this.logger.log(`Attempting to cancel destination job: ${jobId}`);
try {
const result = await this.rcloneDestinationProcessor.execute('', {} as any, { jobId });
if (result.metadata?.jobId) {
this.logger.log(`Cancelled destination job: ${jobId}`);
return true;
}
} catch (error) {
this.logger.warn(`Failed to cancel destination job ${jobId}:`, error);
}
return false;
}
async cleanup(): Promise<void> {
this.logger.log('Cleaning up destination service...');
}
public getProcessor(type: DestinationType): BackupDestinationProcessor<any> | null {
switch (type) {
case DestinationType.RCLONE:
return this.rcloneDestinationProcessor;
default:
return null;
}
}
}

View File

@@ -0,0 +1,95 @@
import { createUnionType, Field, InputType, ObjectType, registerEnumType } from '@nestjs/graphql';
import { Type } from 'class-transformer';
import { IsEnum, IsNotEmpty, IsObject, IsOptional, IsString, ValidateNested } from 'class-validator';
import { GraphQLJSON } from 'graphql-scalars';
import { BackupJobStatus } from '@app/unraid-api/graph/resolvers/backup/orchestration/backup-job-status.model.js';
export enum DestinationType {
RCLONE = 'RCLONE',
}
registerEnumType(DestinationType, {
name: 'DestinationType',
});
export interface StreamingJobInfo {
jobId: string;
status: BackupJobStatus;
progress?: number;
startTime: Date;
endTime?: Date;
error?: string;
}
@ObjectType()
export class RcloneDestinationConfig {
@Field(() => String)
type!: 'RCLONE';
@Field(() => String, { description: 'Remote name from rclone config' })
remoteName!: string;
@Field(() => String, { description: 'Destination path on the remote' })
destinationPath!: string;
@Field(() => GraphQLJSON, {
description: 'RClone options (e.g., --transfers, --checkers)',
nullable: true,
})
rcloneOptions?: Record<string, unknown>;
static isTypeOf(obj: any): obj is RcloneDestinationConfig {
return (
obj &&
obj.type === 'RCLONE' &&
typeof obj.remoteName === 'string' &&
typeof obj.destinationPath === 'string'
);
}
}
@InputType()
export class RcloneDestinationConfigInput {
@Field(() => String)
@IsString()
@IsNotEmpty()
remoteName!: string;
@Field(() => String)
@IsString()
@IsNotEmpty()
destinationPath!: string;
@Field(() => GraphQLJSON, { nullable: true })
@IsOptional()
@IsObject()
rcloneOptions?: Record<string, unknown>;
}
@InputType()
export class DestinationConfigInput {
@Field(() => DestinationType, { nullable: false })
@IsEnum(DestinationType, { message: 'Invalid destination type' })
type!: DestinationType;
@Field(() => RcloneDestinationConfigInput, { nullable: true })
@IsOptional()
@ValidateNested()
@Type(() => RcloneDestinationConfigInput)
rcloneConfig?: RcloneDestinationConfigInput;
}
export const DestinationConfigUnion = createUnionType({
name: 'DestinationConfigUnion',
types: () => [RcloneDestinationConfig] as const,
resolveType(obj: any) {
if (RcloneDestinationConfig.isTypeOf && RcloneDestinationConfig.isTypeOf(obj)) {
return RcloneDestinationConfig;
}
return null;
},
});
export const DestinationConfigInputUnion = DestinationConfigInput;

View File

@@ -0,0 +1,357 @@
import { Injectable, Logger } from '@nestjs/common';
import { execa } from 'execa';
import { getBackupJobGroupId } from '@app/unraid-api/graph/resolvers/backup/backup.utils.js';
import {
BackupDestinationConfig,
BackupDestinationProcessor,
BackupDestinationProcessorOptions,
BackupDestinationResult,
StreamingDestinationHandle,
} from '@app/unraid-api/graph/resolvers/backup/destination/backup-destination-processor.interface.js';
import { DestinationType } from '@app/unraid-api/graph/resolvers/backup/destination/backup-destination.types.js';
import { SourceType } from '@app/unraid-api/graph/resolvers/backup/source/backup-source.types.js';
import { RCloneApiService } from '@app/unraid-api/graph/resolvers/rclone/rclone-api.service.js';
export interface RCloneDestinationConfig extends BackupDestinationConfig {
remoteName: string;
destinationPath: string;
transferOptions?: Record<string, unknown>;
useStreaming?: boolean;
sourceCommand?: string;
sourceArgs?: string[];
sourceType?: SourceType;
}
@Injectable()
export class RCloneDestinationProcessor extends BackupDestinationProcessor<RCloneDestinationConfig> {
readonly destinationType = DestinationType.RCLONE;
private readonly logger = new Logger(RCloneDestinationProcessor.name);
constructor(private readonly rcloneApiService: RCloneApiService) {
super();
}
async execute(
sourcePath: string,
config: RCloneDestinationConfig,
options: BackupDestinationProcessorOptions = {}
): Promise<BackupDestinationResult> {
const { jobId = 'unknown', onProgress, onOutput, onError } = options;
try {
this.logger.log(
`Starting RClone upload job ${jobId} from ${sourcePath} to ${config.remoteName}:${config.destinationPath}`
);
return await this.executeRegularBackup(sourcePath, config, options);
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown RClone error';
this.logger.error(`RClone upload job ${jobId} failed: ${errorMessage}`, error);
if (onError) {
onError(errorMessage);
}
return {
success: false,
error: errorMessage,
cleanupRequired: config.cleanupOnFailure,
};
}
}
private async executeRegularBackup(
sourcePath: string,
config: RCloneDestinationConfig,
options: BackupDestinationProcessorOptions
): Promise<BackupDestinationResult> {
const { jobId: backupConfigId, onOutput, onProgress, onError } = options;
if (!backupConfigId) {
const errorMsg = 'Backup Configuration ID (jobId) is required to start RClone backup.';
this.logger.error(errorMsg);
if (onError) {
onError(errorMsg);
}
return {
success: false,
error: errorMsg,
cleanupRequired: config.cleanupOnFailure,
};
}
await this.rcloneApiService.startBackup({
srcPath: sourcePath,
dstPath: `${config.remoteName}:${config.destinationPath}`,
async: true,
configId: backupConfigId,
options: config.transferOptions,
});
const groupIdToMonitor = getBackupJobGroupId(backupConfigId);
if (onOutput) {
onOutput(
`RClone backup process initiated for group: ${groupIdToMonitor}. Monitoring progress...`
);
}
let jobStatus = await this.rcloneApiService.getEnhancedJobStatus(
groupIdToMonitor,
backupConfigId
);
this.logger.debug('Rclone Job Status: %o', jobStatus);
let retries = 0;
const effectiveTimeout = config.timeout && config.timeout >= 60000 ? config.timeout : 3600000;
const maxRetries = Math.floor(effectiveTimeout / 5000);
while (jobStatus && !jobStatus.finished && retries < maxRetries) {
await new Promise((resolve) => setTimeout(resolve, 5000));
try {
jobStatus = await this.rcloneApiService.getEnhancedJobStatus(
groupIdToMonitor,
backupConfigId
);
if (jobStatus && onProgress && jobStatus.progressPercentage !== undefined) {
onProgress(jobStatus.progressPercentage);
}
if (jobStatus && onOutput && jobStatus.stats?.speed) {
onOutput(`Group ${groupIdToMonitor} - Transfer speed: ${jobStatus.stats.speed} B/s`);
}
} catch (pollError: any) {
this.logger.warn(
`[${backupConfigId}] Error polling group status for ${groupIdToMonitor}: ${(pollError as Error).message}`
);
}
retries++;
}
if (!jobStatus) {
const errorMsg = `Failed to get final job status for RClone group ${groupIdToMonitor}`;
this.logger.error(`[${backupConfigId}] ${errorMsg}`);
if (onError) {
onError(errorMsg);
}
return {
success: false,
error: errorMsg,
destinationPath: `${config.remoteName}:${config.destinationPath}`,
cleanupRequired: config.cleanupOnFailure,
};
}
if (jobStatus.finished && jobStatus.success) {
if (onProgress) {
onProgress(100);
}
if (onOutput) {
onOutput(`RClone backup for group ${groupIdToMonitor} completed successfully.`);
}
return {
success: true,
destinationPath: `${config.remoteName}:${config.destinationPath}`,
metadata: {
groupId: groupIdToMonitor,
remoteName: config.remoteName,
remotePath: config.destinationPath,
transferOptions: config.transferOptions,
stats: jobStatus.stats,
},
};
} else {
let errorMsg: string;
if (!jobStatus.finished && retries >= maxRetries) {
errorMsg = `RClone group ${groupIdToMonitor} timed out after ${effectiveTimeout / 1000} seconds.`;
this.logger.error(`[${backupConfigId}] ${errorMsg}`);
} else {
errorMsg = jobStatus.error || `RClone group ${groupIdToMonitor} failed.`;
this.logger.error(`[${backupConfigId}] ${errorMsg}`, jobStatus.stats?.lastError);
}
if (onError) {
onError(errorMsg);
}
return {
success: false,
error: errorMsg,
destinationPath: `${config.remoteName}:${config.destinationPath}`,
metadata: {
groupId: groupIdToMonitor,
remoteName: config.remoteName,
remotePath: config.destinationPath,
transferOptions: config.transferOptions,
stats: jobStatus.stats,
},
cleanupRequired: config.cleanupOnFailure,
};
}
}
async validate(
config: RCloneDestinationConfig
): Promise<{ valid: boolean; error?: string; warnings?: string[] }> {
const warnings: string[] = [];
if (!config.remoteName) {
return { valid: false, error: 'Remote name is required' };
}
if (!config.destinationPath) {
return { valid: false, error: 'Remote path is required' };
}
if (config.useStreaming) {
if (!config.sourceCommand) {
return { valid: false, error: 'Source command is required for streaming backups' };
}
if (!config.sourceArgs || config.sourceArgs.length === 0) {
return { valid: false, error: 'Source arguments are required for streaming backups' };
}
}
try {
const remotes = await this.rcloneApiService.listRemotes();
if (!remotes.includes(config.remoteName)) {
return { valid: false, error: `Remote '${config.remoteName}' not found` };
}
} catch (error) {
return { valid: false, error: 'Failed to validate remote configuration' };
}
if (config.timeout < 60000) {
warnings.push('Timeout is less than 1 minute, which may be too short for large uploads');
}
return { valid: true, warnings };
}
async cleanup(result: BackupDestinationResult): Promise<void> {
if (!result.cleanupRequired || !result.destinationPath) {
return;
}
const idToStop = result.metadata?.groupId || result.metadata?.jobId;
try {
this.logger.log(`Cleaning up failed upload at ${result.destinationPath}`);
if (idToStop) {
await this.rcloneApiService.stopJob(idToStop as string);
if (result.metadata?.groupId) {
this.logger.log(`Stopped RClone group: ${result.metadata.groupId}`);
} else if (result.metadata?.jobId) {
this.logger.log(
`Attempted to stop RClone job: ${result.metadata.jobId} (Note: Group ID preferred for cleanup)`
);
}
}
} catch (error) {
this.logger.warn(
`Failed to cleanup destination: ${error instanceof Error ? error.message : 'Unknown error'}`
);
}
}
get supportsStreaming(): boolean {
return true;
}
get getWritableStream(): (
config: RCloneDestinationConfig,
jobId: string,
options?: BackupDestinationProcessorOptions
) => Promise<StreamingDestinationHandle> {
return async (
config: RCloneDestinationConfig,
jobId: string,
options: BackupDestinationProcessorOptions = {}
): Promise<StreamingDestinationHandle> => {
const validation = await this.validate(config);
if (!validation.valid) {
const errorMsg = `RClone destination configuration validation failed: ${validation.error}`;
this.logger.error(`[${jobId}] ${errorMsg}`);
throw new Error(errorMsg);
}
const rcloneDest = `${config.remoteName}:${config.destinationPath}`;
const rcloneArgs = ['rcat', rcloneDest, '--progress'];
this.logger.log(
`[${jobId}] Preparing writable stream for rclone rcat to ${rcloneDest} with progress`
);
try {
const rcloneProcess = execa('rclone', rcloneArgs, {});
const completionPromise = new Promise<BackupDestinationResult>((resolve, reject) => {
let stderrOutput = '';
let stdoutOutput = '';
rcloneProcess.stderr?.on('data', (data) => {
const chunk = data.toString();
stderrOutput += chunk;
this.logger.verbose(`[${jobId}] rclone rcat stderr: ${chunk.trim()}`);
const progressMatch = chunk.match(/(\d+)%/);
if (progressMatch && progressMatch[1] && options.onProgress) {
const percentage = parseInt(progressMatch[1], 10);
if (!isNaN(percentage)) {
options.onProgress(percentage);
}
}
});
rcloneProcess.stdout?.on('data', (data) => {
const chunk = data.toString();
stdoutOutput += chunk;
this.logger.verbose(`[${jobId}] rclone rcat stdout: ${chunk.trim()}`);
});
rcloneProcess
.then((result) => {
this.logger.log(
`[${jobId}] rclone rcat to ${rcloneDest} completed successfully.`
);
resolve({
success: true,
destinationPath: rcloneDest,
metadata: { stdout: stdoutOutput, stderr: stderrOutput },
});
})
.catch((error) => {
const errorMessage =
error.stderr || error.message || 'rclone rcat command failed';
this.logger.error(
`[${jobId}] rclone rcat to ${rcloneDest} failed: ${errorMessage}`,
error.stack
);
reject({
success: false,
error: errorMessage,
destinationPath: rcloneDest,
metadata: { stdout: stdoutOutput, stderr: stderrOutput },
});
});
});
if (!rcloneProcess.stdin) {
const errMsg = 'Failed to get stdin stream from rclone process.';
this.logger.error(`[${jobId}] ${errMsg}`);
throw new Error(errMsg);
}
return {
stream: rcloneProcess.stdin,
completionPromise,
};
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
this.logger.error(`[${jobId}] Failed to start rclone rcat process: ${errorMessage}`);
throw new Error(`Failed to start rclone rcat process: ${errorMessage}`);
}
};
}
}

View File

@@ -0,0 +1,189 @@
import type { LabelElement, Layout, SchemaBasedCondition } from '@jsonforms/core';
import { JsonSchema7, RuleEffect } from '@jsonforms/core';
import type { RCloneRemote } from '@app/unraid-api/graph/resolvers/rclone/rclone.model.js';
import type { DataSlice, SettingSlice, UIElement } from '@app/unraid-api/types/json-forms.js';
import { getDestinationConfigSlice } from '@app/unraid-api/graph/resolvers/backup/destination/backup-destination-jsonforms.config.js';
import { getSourceConfigSlice } from '@app/unraid-api/graph/resolvers/backup/source/backup-source-jsonforms.config.js';
import { createLabeledControl } from '@app/unraid-api/graph/utils/form-utils.js';
import { mergeSettingSlices } from '@app/unraid-api/types/json-forms.js';
function getBasicBackupConfigSlice(): SettingSlice {
const basicConfigElements: UIElement[] = [
createLabeledControl({
scope: '#/properties/name',
label: 'Backup Job Name',
description: 'A descriptive name for this backup job (e.g., "Weekly Documents Backup")',
controlOptions: {
placeholder: 'Enter backup job name',
format: 'string',
},
}),
createLabeledControl({
scope: '#/properties/schedule',
label: 'Schedule (Cron Expression)',
description:
'When to run this backup job. Leave empty for manual execution only. Examples: "0 2 * * *" (daily at 2AM), "0 2 * * 0" (weekly on Sunday at 2AM)',
controlOptions: {
placeholder: 'Leave empty for manual backup',
format: 'string',
suggestions: [
{
value: '',
label: 'Manual Only',
tooltip: 'No automatic schedule - run manually only',
},
{
value: '0 2 * * *',
label: 'Daily at 2:00 AM',
tooltip: 'Runs every day at 2:00 AM',
},
{
value: '0 2 * * 0',
label: 'Weekly (Sunday 2:00 AM)',
tooltip: 'Runs every Sunday at 2:00 AM',
},
{
value: '0 9 * * 1',
label: 'Mondays at 9:00 AM',
tooltip: 'Runs every Monday at 9:00 AM',
},
{
value: '0 0 1 * *',
label: 'Monthly (1st day at midnight)',
tooltip: 'Runs on the 1st day of every month at midnight',
},
{
value: '0 2 1 * *',
label: 'Monthly (1st at 2:00 AM)',
tooltip: 'Runs on the 1st of every month at 2:00 AM',
},
{
value: '0 2 * * 1-5',
label: 'Weekdays at 2:00 AM',
tooltip: 'Runs Monday through Friday at 2:00 AM',
},
],
},
}),
createLabeledControl({
scope: '#/properties/enabled',
label: 'Enable Backup Job',
description: 'Whether this backup job should run automatically according to the schedule',
controlOptions: {
toggle: true,
},
rule: {
effect: RuleEffect.SHOW,
condition: {
scope: '#/properties/schedule',
schema: {
type: 'string',
minLength: 1,
},
} as SchemaBasedCondition,
},
}),
];
const basicConfigProperties: Record<string, JsonSchema7> = {
name: {
type: 'string',
title: 'Backup Job Name',
description: 'Human-readable name for this backup job',
minLength: 1,
maxLength: 100,
},
schedule: {
type: 'string',
title: 'Cron Schedule',
description: 'Cron schedule expression (empty for manual execution)',
},
enabled: {
type: 'boolean',
title: 'Enabled',
description: 'Whether this backup job is enabled',
default: true,
},
configStep: {
type: 'object',
properties: {
current: { type: 'integer', default: 0 },
total: { type: 'integer', default: 3 },
},
default: { current: 0, total: 3 },
},
};
const verticalLayoutElement: UIElement = {
type: 'VerticalLayout',
elements: basicConfigElements,
options: { step: 0 },
};
return {
properties: basicConfigProperties as unknown as DataSlice,
elements: [verticalLayoutElement],
};
}
export function buildBackupJobConfigSchema({ remotes = [] }: { remotes?: RCloneRemote[] }): {
dataSchema: { properties: DataSlice; type: 'object' };
uiSchema: Layout;
} {
const slicesToMerge: SettingSlice[] = [];
const basicSlice = getBasicBackupConfigSlice();
slicesToMerge.push(basicSlice);
const sourceSlice = getSourceConfigSlice();
slicesToMerge.push(sourceSlice);
const destinationSlice = getDestinationConfigSlice({ remotes });
slicesToMerge.push(destinationSlice);
const mergedSlices = mergeSettingSlices(slicesToMerge);
const dataSchema: { properties: DataSlice; type: 'object' } = {
type: 'object',
properties: mergedSlices.properties,
};
const steps = [
{ label: 'Backup Configuration', description: 'Basic backup job settings' },
{ label: 'Source Configuration', description: 'Configure what to backup' },
{ label: 'Destination Configuration', description: 'Configure where to backup' },
];
const step0Elements = basicSlice.elements;
const step1Elements = sourceSlice.elements;
const step2Elements = destinationSlice.elements;
const steppedLayoutElement: UIElement = {
type: 'SteppedLayout',
options: {
steps: steps,
},
elements: [...(step0Elements || []), ...(step1Elements || []), ...(step2Elements || [])].filter(
(el) => el
) as UIElement[],
};
const titleLabel: UIElement = {
type: 'Label',
text: 'Create Backup Job',
options: {
format: 'title',
description: 'Configure a new scheduled backup job with RClone.',
},
};
const uiSchema: Layout = {
type: 'VerticalLayout',
elements: [titleLabel, steppedLayoutElement],
};
return { dataSchema, uiSchema };
}

View File

@@ -0,0 +1,76 @@
import { Field, GraphQLISODateTime, Int, ObjectType, registerEnumType } from '@nestjs/graphql';
import { Node } from '@unraid/shared/graphql.model';
// Moved BackupJobStatus enum here
export enum BackupJobStatus {
QUEUED = 'Queued',
RUNNING = 'Running',
COMPLETED = 'Completed',
FAILED = 'Failed',
CANCELLED = 'Cancelled',
}
registerEnumType(BackupJobStatus, {
name: 'BackupJobStatus',
description: 'Status of a backup job',
});
@ObjectType({
implements: () => Node,
})
export class JobStatus extends Node {
@Field(() => String, { description: 'External job ID from the job execution system' })
externalJobId!: string;
@Field()
name!: string;
@Field(() => BackupJobStatus)
status!: BackupJobStatus;
@Field(() => Int, { description: 'Progress percentage (0-100)' })
progress!: number;
@Field({ nullable: true })
message?: string;
@Field({ nullable: true })
error?: string;
@Field(() => GraphQLISODateTime)
startTime!: Date;
@Field(() => GraphQLISODateTime, { nullable: true })
endTime?: Date;
@Field(() => Int, { nullable: true, description: 'Bytes transferred' })
bytesTransferred?: number;
@Field(() => Int, { nullable: true, description: 'Total bytes to transfer' })
totalBytes?: number;
@Field(() => Int, { nullable: true, description: 'Transfer speed in bytes per second' })
speed?: number;
@Field(() => Int, { nullable: true, description: 'Elapsed time in seconds' })
elapsedTime?: number;
@Field(() => Int, { nullable: true, description: 'Estimated time to completion in seconds' })
eta?: number;
@Field(() => String, { nullable: true, description: 'Human-readable bytes transferred' })
formattedBytesTransferred?: string;
@Field(() => String, { nullable: true, description: 'Human-readable transfer speed' })
formattedSpeed?: string;
@Field(() => String, { nullable: true, description: 'Human-readable elapsed time' })
formattedElapsedTime?: string;
@Field(() => String, { nullable: true, description: 'Human-readable ETA' })
formattedEta?: string;
}
// Use JobStatus as the unified type for both GraphQL and TypeScript
export type JobStatusInfo = JobStatus;

View File

@@ -0,0 +1,30 @@
import { Args, Query, Resolver } from '@nestjs/graphql';
import { PrefixedID } from '@unraid/shared/prefixed-id-scalar.js';
import { JobStatus } from '@app/unraid-api/graph/resolvers/backup/orchestration/backup-job-status.model.js';
import { BackupJobTrackingService } from '@app/unraid-api/graph/resolvers/backup/orchestration/backup-job-tracking.service.js';
@Resolver(() => JobStatus)
export class BackupJobStatusResolver {
constructor(private readonly jobTrackingService: BackupJobTrackingService) {}
@Query(() => JobStatus, { name: 'backupJobStatus', nullable: true })
async getJobStatus(
@Args('jobId', { type: () => PrefixedID }) jobId: string
): Promise<JobStatus | null> {
const jobStatus = this.jobTrackingService.getJobStatus(jobId);
if (!jobStatus) {
// Optionally throw NotFoundException or return null based on desired API behavior
// throw new NotFoundException(`Job with ID ${jobId} not found.`);
return null;
}
return jobStatus as JobStatus; // Map JobStatusInfo to JobStatusGQL if necessary
}
@Query(() => [JobStatus], { name: 'allBackupJobStatuses' })
async getAllJobStatuses(): Promise<JobStatus[]> {
const statuses = this.jobTrackingService.getAllJobStatuses();
return statuses as JobStatus[]; // Map JobStatusInfo[] to JobStatusGQL[] if necessary
}
}

View File

@@ -0,0 +1,119 @@
import { Injectable, Logger } from '@nestjs/common';
import {
BackupJobStatus,
JobStatus,
} from '@app/unraid-api/graph/resolvers/backup/orchestration/backup-job-status.model.js';
@Injectable()
export class BackupJobTrackingService {
private readonly logger = new Logger(BackupJobTrackingService.name);
private activeJobs: Map<string, JobStatus> = new Map(); // Maps internal ID -> JobStatus
private externalJobIndex: Map<string, string> = new Map(); // Maps external ID -> internal ID
constructor() {
// Potentially load persisted jobs if needed
}
initializeJob(externalJobId: string, jobName: string): JobStatus {
// Check if external job already exists
const existingInternalId = this.externalJobIndex.get(externalJobId);
if (existingInternalId && this.activeJobs.has(existingInternalId)) {
this.logger.warn(`Job with external ID ${externalJobId} is already initialized.`);
return this.activeJobs.get(existingInternalId)!;
}
const internalId = `job_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
const newJobStatus: JobStatus = {
id: internalId,
externalJobId,
name: jobName,
status: BackupJobStatus.QUEUED,
progress: 0,
startTime: new Date(),
message: 'Job initialized.',
};
this.activeJobs.set(internalId, newJobStatus);
this.externalJobIndex.set(externalJobId, internalId);
this.logger.log(
`Job initialized: ${jobName} (Internal ID: ${internalId}, External ID: ${externalJobId})`
);
return newJobStatus;
}
updateJobStatus(
internalId: string,
updates: Partial<Omit<JobStatus, 'externalJobId' | 'startTime' | 'name' | 'id'>>
): JobStatus | null {
const job = this.activeJobs.get(internalId);
if (!job) {
this.logger.warn(`Cannot update status for unknown internal job ID: ${internalId}`);
return null;
}
const updatedJob = { ...job, ...updates };
if (
updates.status === BackupJobStatus.COMPLETED ||
updates.status === BackupJobStatus.FAILED ||
updates.status === BackupJobStatus.CANCELLED
) {
updatedJob.endTime = new Date();
updatedJob.progress = updates.status === BackupJobStatus.COMPLETED ? 100 : job.progress;
}
if (updatedJob.progress > 100) {
updatedJob.progress = 100;
}
this.activeJobs.set(internalId, updatedJob);
this.logger.log(
`Job status updated for ${job.name} (Internal ID: ${internalId}): Status: ${updatedJob.status}, Progress: ${updatedJob.progress}%`
);
return updatedJob;
}
// For external systems (like RClone) to update job status
updateJobStatusByExternalId(
externalJobId: string,
updates: Partial<Omit<JobStatus, 'externalJobId' | 'startTime' | 'name' | 'id'>>
): JobStatus | null {
const internalId = this.externalJobIndex.get(externalJobId);
if (!internalId) {
this.logger.warn(`Cannot find internal job for external ID: ${externalJobId}`);
return null;
}
return this.updateJobStatus(internalId, updates);
}
getJobStatus(internalId: string): JobStatus | undefined {
return this.activeJobs.get(internalId);
}
getJobStatusByExternalId(externalJobId: string): JobStatus | undefined {
const internalId = this.externalJobIndex.get(externalJobId);
return internalId ? this.activeJobs.get(internalId) : undefined;
}
getAllJobStatuses(): JobStatus[] {
return Array.from(this.activeJobs.values());
}
clearJob(internalId: string): boolean {
const job = this.activeJobs.get(internalId);
if (job) {
this.externalJobIndex.delete(job.externalJobId);
}
return this.activeJobs.delete(internalId);
}
clearJobByExternalId(externalJobId: string): boolean {
const internalId = this.externalJobIndex.get(externalJobId);
if (internalId) {
this.externalJobIndex.delete(externalJobId);
return this.activeJobs.delete(internalId);
}
return false;
}
}

View File

@@ -0,0 +1,534 @@
import { forwardRef, Inject, Injectable, Logger } from '@nestjs/common';
import { Readable } from 'stream';
import { pipeline } from 'stream/promises'; // Using stream.pipeline for better error handling
import { BackupConfigService } from '@app/unraid-api/graph/resolvers/backup/backup-config.service.js';
import { BackupJobConfig } from '@app/unraid-api/graph/resolvers/backup/backup.model.js';
import {
BackupDestinationProcessor,
BackupDestinationProcessorOptions,
BackupDestinationResult,
StreamingDestinationHandle, // Assuming this will be defined in the interface file
} from '@app/unraid-api/graph/resolvers/backup/destination/backup-destination-processor.interface.js';
import { BackupDestinationService } from '@app/unraid-api/graph/resolvers/backup/destination/backup-destination.service.js';
import {
BackupJobStatus,
JobStatus,
} from '@app/unraid-api/graph/resolvers/backup/orchestration/backup-job-status.model.js';
import { BackupJobTrackingService } from '@app/unraid-api/graph/resolvers/backup/orchestration/backup-job-tracking.service.js';
import {
BackupSourceProcessor,
BackupSourceProcessorOptions,
BackupSourceResult,
} from '@app/unraid-api/graph/resolvers/backup/source/backup-source-processor.interface.js';
import { BackupSourceService } from '@app/unraid-api/graph/resolvers/backup/source/backup-source.service.js';
@Injectable()
export class BackupOrchestrationService {
private readonly logger = new Logger(BackupOrchestrationService.name);
constructor(
private readonly jobTrackingService: BackupJobTrackingService,
private readonly backupSourceService: BackupSourceService,
private readonly backupDestinationService: BackupDestinationService,
@Inject(forwardRef(() => BackupConfigService))
private readonly backupConfigService: BackupConfigService
) {}
async executeBackupJob(jobConfig: BackupJobConfig, configId: string): Promise<string> {
this.logger.log(
`Starting orchestration for backup job: ${jobConfig.name} (Config ID: ${configId})`
);
// Initialize job in tracking service and get the internal tracking object
// configId (original jobConfig.id) is used to link tracking to config, jobConfig.name is for display
const jobStatus = this.jobTrackingService.initializeJob(configId, jobConfig.name);
const internalJobId = jobStatus.id; // This is the actual ID for this specific job run
// DO NOT call backupConfigService.updateBackupJobConfig here for currentJobId
// This will be handled by BackupConfigService itself using the returned internalJobId
this.emitJobStatus(internalJobId, {
status: BackupJobStatus.RUNNING,
progress: 0,
message: 'Job initializing...',
});
const sourceProcessor = this.backupSourceService.getProcessor(jobConfig.sourceType);
const destinationProcessor = this.backupDestinationService.getProcessor(
jobConfig.destinationType
);
if (!sourceProcessor || !destinationProcessor) {
const errorMsg = 'Failed to initialize backup processors.';
this.logger.error(`[Config ID: ${configId}, Job ID: ${internalJobId}] ${errorMsg}`);
this.emitJobStatus(internalJobId, {
status: BackupJobStatus.FAILED,
error: errorMsg,
});
// Call handleJobCompletion before throwing
await this.backupConfigService.handleJobCompletion(
configId,
BackupJobStatus.FAILED,
internalJobId
);
throw new Error(errorMsg);
}
try {
if (sourceProcessor.supportsStreaming && destinationProcessor.supportsStreaming) {
await this.executeStreamingBackup(
sourceProcessor,
destinationProcessor,
jobConfig,
internalJobId
);
} else {
await this.executeRegularBackup(
sourceProcessor,
destinationProcessor,
jobConfig,
internalJobId,
configId // Pass configId for handleJobCompletion
);
}
// If executeStreamingBackup/executeRegularBackup complete without throwing, it implies success for those stages.
// The final status (COMPLETED/FAILED) is set within those methods via emitJobStatus and then handleJobCompletion.
} catch (error) {
// Errors from executeStreamingBackup/executeRegularBackup should have already called handleJobCompletion.
// This catch is a fallback.
this.logger.error(
`[Config ID: ${configId}, Job ID: ${internalJobId}] Orchestration error after backup execution attempt: ${(error as Error).message}`
);
// Ensure completion is handled if not already done by the execution methods
// This might be redundant if execution methods are guaranteed to call it.
// However, direct throws before or after calling those methods would be caught here.
await this.backupConfigService.handleJobCompletion(
configId,
BackupJobStatus.FAILED,
internalJobId
);
throw error; // Re-throw the error
}
// DO NOT clear currentJobId here using updateBackupJobConfig. It's handled by handleJobCompletion.
this.logger.log(
`Finished orchestration logic for backup job: ${jobConfig.name} (Config ID: ${configId}, Job ID: ${internalJobId})`
);
return internalJobId; // Return the actual job ID for this run
}
private async executeStreamingBackup(
sourceProcessor: BackupSourceProcessor<any>,
destinationProcessor: BackupDestinationProcessor<any>,
jobConfig: BackupJobConfig, // This is the config object, not its ID
internalJobId: string
): Promise<void> {
this.logger.log(
`Executing STREAMING backup for job: ${jobConfig.name} (Internal Job ID: ${internalJobId})`
);
this.emitJobStatus(internalJobId, {
status: BackupJobStatus.RUNNING,
progress: 0,
message: 'Starting streaming backup...',
});
if (!sourceProcessor.getReadableStream || !destinationProcessor.getWritableStream) {
const errorMsg =
'Source or destination processor does not support streaming (missing getReadableStream or getWritableStream).';
this.logger.error(`[${internalJobId}] ${errorMsg}`);
this.emitJobStatus(internalJobId, { status: BackupJobStatus.FAILED, error: errorMsg });
// Call handleJobCompletion before throwing
await this.backupConfigService.handleJobCompletion(internalJobId, BackupJobStatus.FAILED);
throw new Error(errorMsg);
}
let sourceStream: Readable | null = null;
let destinationStreamHandle: StreamingDestinationHandle | null = null;
const processorOptions: BackupDestinationProcessorOptions = {
jobId: internalJobId,
onProgress: (progress: number) => {
this.logger.log(`[${internalJobId}] Destination progress: ${progress}%`);
this.emitJobStatus(internalJobId, { progress: Math.min(progress, 99) });
},
onOutput: (data: string) => {
this.logger.debug(`[${internalJobId} Dest. Processor Output]: ${data}`);
},
onError: (errorMsg: string) => {
this.logger.warn(`[${internalJobId} Dest. Processor Error]: ${errorMsg}`);
},
};
try {
this.logger.debug(`[${internalJobId}] Preparing source stream...`);
sourceStream = await sourceProcessor.getReadableStream(jobConfig.sourceConfig);
this.logger.debug(
`[${internalJobId}] Source stream prepared. Preparing destination stream...`
);
destinationStreamHandle = await destinationProcessor.getWritableStream(
jobConfig.destinationConfig,
internalJobId,
processorOptions
);
this.logger.debug(`[${internalJobId}] Destination stream prepared. Starting stream pipe.`);
if (!sourceStream || !destinationStreamHandle?.stream) {
throw new Error('Failed to initialize source or destination stream.');
}
let totalBytesProcessed = 0;
sourceStream.on('data', (chunk) => {
totalBytesProcessed += chunk.length;
this.logger.verbose(
`[${internalJobId}] Stream data: ${chunk.length} bytes, Total: ${totalBytesProcessed}`
);
});
await pipeline(sourceStream, destinationStreamHandle.stream);
this.logger.log(
`[${internalJobId}] Stream piping completed. Waiting for destination processor to finalize...`
);
const destinationResult = await destinationStreamHandle.completionPromise;
if (!destinationResult.success) {
const errorMsg =
destinationResult.error || 'Destination processor failed after streaming.';
this.logger.error(`[${internalJobId}] ${errorMsg}`);
this.emitJobStatus(internalJobId, { status: BackupJobStatus.FAILED, error: errorMsg });
// Call handleJobCompletion before throwing
await this.backupConfigService.handleJobCompletion(
jobConfig.id,
BackupJobStatus.FAILED,
internalJobId
);
throw new Error(errorMsg);
}
this.logger.log(
`Streaming backup job ${jobConfig.name} (Internal ID: ${internalJobId}) completed successfully.`
);
this.emitJobStatus(internalJobId, {
status: BackupJobStatus.COMPLETED,
progress: 100,
message: 'Backup completed successfully.',
});
// Call handleJobCompletion on success
await this.backupConfigService.handleJobCompletion(
jobConfig.id,
BackupJobStatus.COMPLETED,
internalJobId
);
if (sourceProcessor.cleanup) {
this.logger.debug(`[${internalJobId}] Performing post-success cleanup for source...`);
await sourceProcessor.cleanup({
success: true,
outputPath: 'streamed',
cleanupRequired: false,
} as any);
}
if (destinationProcessor.cleanup) {
this.logger.debug(
`[${internalJobId}] Performing post-success cleanup for destination...`
);
await destinationProcessor.cleanup({ success: true, cleanupRequired: false });
}
} catch (e) {
const error = e as Error;
this.logger.error(
`Streaming backup job ${jobConfig.name} (Internal ID: ${internalJobId}) failed: ${error.message}`,
error.stack
);
this.emitJobStatus(internalJobId, {
status: BackupJobStatus.FAILED,
error: error.message,
message: 'Backup failed during streaming execution.',
});
// Call handleJobCompletion on failure
await this.backupConfigService.handleJobCompletion(
jobConfig.id,
BackupJobStatus.FAILED,
internalJobId
);
this.logger.error(
`[${internalJobId}] Performing cleanup due to failure for job ${jobConfig.name}...`
);
try {
if (sourceProcessor.cleanup) {
this.logger.debug(`[${internalJobId}] Cleaning up source processor...`);
await sourceProcessor.cleanup({
success: false,
error: error.message,
cleanupRequired: true,
} as any);
}
} catch (cleanupError) {
this.logger.error(
`[${internalJobId}] Error during source processor cleanup: ${(cleanupError as Error).message}`,
(cleanupError as Error).stack
);
}
try {
if (destinationProcessor.cleanup) {
this.logger.debug(`[${internalJobId}] Cleaning up destination processor...`);
const destCleanupError =
(
destinationStreamHandle?.completionPromise &&
((await destinationStreamHandle.completionPromise.catch(
(er) => er
)) as BackupDestinationResult)
)?.error || error.message;
await destinationProcessor.cleanup({
success: false,
error: destCleanupError,
cleanupRequired: true,
});
}
} catch (cleanupError) {
this.logger.error(
`[${internalJobId}] Error during destination processor cleanup: ${(cleanupError as Error).message}`,
(cleanupError as Error).stack
);
}
throw error;
}
}
private async executeRegularBackup(
sourceProcessor: BackupSourceProcessor<any>,
destinationProcessor: BackupDestinationProcessor<any>,
jobConfig: BackupJobConfig, // This is the config object, not its ID
internalJobId: string,
configId: string // Pass the configId for handleJobCompletion
): Promise<void> {
this.logger.log(
`Executing REGULAR backup for job: ${jobConfig.name} (Config ID: ${configId}, Internal Job ID: ${internalJobId})`
);
this.emitJobStatus(internalJobId, {
status: BackupJobStatus.RUNNING,
progress: 0,
message: 'Starting regular backup...',
});
let sourceResult: BackupSourceResult | null = null;
let destinationResult: BackupDestinationResult | null = null;
const processorOptions: BackupSourceProcessorOptions & BackupDestinationProcessorOptions = {
jobId: internalJobId,
onProgress: (progressUpdate) => {
const numericProgress =
typeof progressUpdate === 'number'
? progressUpdate
: (progressUpdate as any).progress;
if (typeof numericProgress === 'number') {
this.emitJobStatus(internalJobId, { progress: numericProgress });
}
},
onOutput: (data: string) => {
this.logger.debug(`[${internalJobId} Processor Output]: ${data}`);
},
onError: (errorMsg: string) => {
this.logger.warn(`[${internalJobId} Processor Error]: ${errorMsg}`);
},
};
try {
this.logger.debug(`[${internalJobId}] Executing source processor...`);
sourceResult = await sourceProcessor.execute(jobConfig.sourceConfig, processorOptions);
this.logger.debug(
`[${internalJobId}] Source processor execution completed. Success: ${sourceResult.success}, OutputPath: ${sourceResult.outputPath}`
);
if (!sourceResult.success || !sourceResult.outputPath) {
const errorMsg =
sourceResult.error || 'Source processor failed to produce an output path.';
this.logger.error(`[${internalJobId}] Source processor failed: ${errorMsg}`);
this.emitJobStatus(internalJobId, {
status: BackupJobStatus.FAILED,
error: errorMsg,
message: 'Source processing failed.',
});
this.jobTrackingService.updateJobStatus(internalJobId, {
status: BackupJobStatus.FAILED,
error: errorMsg,
});
// Call handleJobCompletion before throwing
await this.backupConfigService.handleJobCompletion(
configId,
BackupJobStatus.FAILED,
internalJobId
);
throw new Error(errorMsg);
}
this.emitJobStatus(internalJobId, {
progress: 50,
message: 'Source processing complete. Starting destination processing.',
});
this.logger.debug(
`[${internalJobId}] Executing destination processor with source output: ${sourceResult.outputPath}...`
);
destinationResult = await destinationProcessor.execute(
sourceResult.outputPath,
jobConfig.destinationConfig,
processorOptions
);
this.logger.debug(
`[${internalJobId}] Destination processor execution completed. Success: ${destinationResult.success}`
);
if (!destinationResult.success) {
const errorMsg = destinationResult.error || 'Destination processor failed.';
this.logger.error(`[${internalJobId}] Destination processor failed: ${errorMsg}`);
this.emitJobStatus(internalJobId, {
status: BackupJobStatus.FAILED,
error: errorMsg,
message: 'Destination processing failed.',
});
this.jobTrackingService.updateJobStatus(internalJobId, {
status: BackupJobStatus.FAILED,
error: errorMsg,
});
// Call handleJobCompletion before throwing
await this.backupConfigService.handleJobCompletion(
configId,
BackupJobStatus.FAILED,
internalJobId
);
throw new Error(errorMsg);
}
this.logger.log(
`Regular backup job ${jobConfig.name} (Internal ID: ${internalJobId}) completed successfully.`
);
this.emitJobStatus(internalJobId, {
status: BackupJobStatus.COMPLETED,
progress: 100,
message: 'Backup completed successfully.',
});
// Call handleJobCompletion on success
await this.backupConfigService.handleJobCompletion(
configId,
BackupJobStatus.COMPLETED,
internalJobId
);
if (sourceResult && sourceProcessor.cleanup) {
this.logger.debug(
`[${internalJobId}] Performing post-success cleanup for source processor...`
);
await sourceProcessor.cleanup(sourceResult);
}
if (destinationResult && destinationProcessor.cleanup) {
this.logger.debug(
`[${internalJobId}] Performing post-success cleanup for destination processor...`
);
await destinationProcessor.cleanup(destinationResult);
}
} catch (e) {
const error = e as Error;
this.logger.error(
`Regular backup job ${jobConfig.name} (Internal ID: ${internalJobId}) failed: ${error.message}`,
error.stack
);
this.emitJobStatus(internalJobId, {
status: BackupJobStatus.FAILED,
error: error.message,
message: 'Backup failed during regular execution.',
});
this.jobTrackingService.updateJobStatus(internalJobId, {
status: BackupJobStatus.FAILED,
error: error.message,
});
// Call handleJobCompletion on failure
await this.backupConfigService.handleJobCompletion(
configId,
BackupJobStatus.FAILED,
internalJobId
);
this.logger.error(
`[${internalJobId}] Performing cleanup due to failure for job ${jobConfig.name}...`
);
if (sourceResult && sourceProcessor.cleanup) {
try {
this.logger.debug(
`[${internalJobId}] Cleaning up source processor after failure...`
);
await sourceProcessor.cleanup({
...sourceResult,
success: false,
error: sourceResult.error || error.message,
});
} catch (cleanupError) {
this.logger.error(
`[${internalJobId}] Error during source processor cleanup: ${(cleanupError as Error).message}`,
(cleanupError as Error).stack
);
}
}
if (destinationResult && destinationProcessor.cleanup) {
try {
this.logger.debug(
`[${internalJobId}] Cleaning up destination processor after failure...`
);
await destinationProcessor.cleanup({
...destinationResult,
success: false,
error: destinationResult.error || error.message,
});
} catch (cleanupError) {
this.logger.error(
`[${internalJobId}] Error during destination processor cleanup: ${(cleanupError as Error).message}`,
(cleanupError as Error).stack
);
}
} else if (sourceResult?.success && destinationProcessor.cleanup) {
try {
this.logger.debug(
`[${internalJobId}] Cleaning up destination processor after a failure (destinationResult not available)...`
);
await destinationProcessor.cleanup({
success: false,
error: error.message,
cleanupRequired: true,
});
} catch (cleanupError) {
this.logger.error(
`[${internalJobId}] Error during destination processor cleanup (no result): ${(cleanupError as Error).message}`,
(cleanupError as Error).stack
);
}
}
throw error;
}
}
private emitJobStatus(
internalJobId: string,
statusUpdate: {
status?: BackupJobStatus;
progress?: number;
message?: string;
error?: string;
}
): void {
this.logger.log(
`[Job Status Update - ${internalJobId}]: Status: ${statusUpdate.status}, Progress: ${statusUpdate.progress}, Msg: ${statusUpdate.message}, Err: ${statusUpdate.error}`
);
const updatePayload: Partial<Omit<JobStatus, 'externalJobId' | 'startTime' | 'name' | 'id'>> = {
...statusUpdate,
};
this.jobTrackingService.updateJobStatus(internalJobId, updatePayload);
}
}

View File

@@ -0,0 +1,503 @@
import type { LabelElement, SchemaBasedCondition } from '@jsonforms/core';
import { JsonSchema7, RuleEffect } from '@jsonforms/core';
import type { DataSlice, SettingSlice, UIElement } from '@app/unraid-api/types/json-forms.js';
import { SourceType } from '@app/unraid-api/graph/resolvers/backup/source/backup-source.types.js';
import { createLabeledControl } from '@app/unraid-api/graph/utils/form-utils.js';
export function getSourceConfigSlice(): SettingSlice {
const sourceConfigElements: UIElement[] = [
{
type: 'Control',
scope: '#/properties/sourceConfig/properties/type',
options: {
format: 'radio',
radioLayout: 'horizontal',
options: [
{
label: 'ZFS Snapshot',
value: SourceType.ZFS,
description: 'Create ZFS snapshot and backup',
},
{
label: 'Flash Drive',
value: SourceType.FLASH,
description: 'Backup flash drive contents',
},
{
label: 'Custom Script',
value: SourceType.SCRIPT,
description: 'Run custom script to generate backup data',
},
{
label: 'Raw Files',
value: SourceType.RAW,
description: 'Direct file backup without preprocessing',
},
],
},
},
createLabeledControl({
scope: '#/properties/sourceConfig/properties/timeout',
label: 'Timeout',
description: 'Timeout in seconds for backup operation',
controlOptions: {
placeholder: '3600',
format: 'number',
},
}),
createLabeledControl({
scope: '#/properties/sourceConfig/properties/cleanupOnFailure',
label: 'Cleanup on Failure',
description: 'Clean up backup artifacts on failure',
controlOptions: {
format: 'toggle',
},
}),
// Raw Backup Configuration
{
type: 'VerticalLayout',
rule: {
effect: RuleEffect.SHOW,
condition: {
scope: '#/properties/sourceConfig/properties/type',
schema: { const: SourceType.RAW },
} as SchemaBasedCondition,
},
elements: [
{
type: 'Label',
text: 'Raw Backup Configuration',
options: {
description: 'Configure direct file/folder backup settings.',
},
} as LabelElement,
createLabeledControl({
scope: '#/properties/sourceConfig/properties/rawConfig/properties/sourcePath',
label: 'Source Path',
description: 'Source path to backup',
controlOptions: {
placeholder: '/mnt/user/data',
},
}),
createLabeledControl({
scope: '#/properties/sourceConfig/properties/rawConfig/properties/excludePatterns',
label: 'Exclude Patterns',
description: 'Patterns to exclude from backup',
controlOptions: {
placeholder: '*.tmp,*.log',
},
}),
createLabeledControl({
scope: '#/properties/sourceConfig/properties/rawConfig/properties/includePatterns',
label: 'Include Patterns',
description: 'Patterns to include in backup',
controlOptions: {
placeholder: '*.txt,*.doc',
},
}),
],
},
// ZFS Configuration
{
type: 'VerticalLayout',
rule: {
effect: RuleEffect.SHOW,
condition: {
scope: '#/properties/sourceConfig/properties/type',
schema: { const: SourceType.ZFS },
} as SchemaBasedCondition,
},
elements: [
{
type: 'Label',
text: 'ZFS Configuration',
options: {
description: 'Configure ZFS snapshot settings for backup.',
},
} as LabelElement,
createLabeledControl({
scope: '#/properties/sourceConfig/properties/zfsConfig/properties/poolName',
label: 'Pool Name',
description: 'ZFS pool name',
controlOptions: {
placeholder: 'tank',
},
}),
createLabeledControl({
scope: '#/properties/sourceConfig/properties/zfsConfig/properties/datasetName',
label: 'Dataset Name',
description: 'ZFS dataset name',
controlOptions: {
placeholder: 'data',
},
}),
createLabeledControl({
scope: '#/properties/sourceConfig/properties/zfsConfig/properties/snapshotPrefix',
label: 'Snapshot Prefix',
description: 'Prefix for snapshot names',
controlOptions: {
placeholder: 'backup',
},
}),
createLabeledControl({
scope: '#/properties/sourceConfig/properties/zfsConfig/properties/cleanupSnapshots',
label: 'Cleanup Snapshots',
description: 'Clean up snapshots after backup',
controlOptions: {
format: 'checkbox',
},
}),
createLabeledControl({
scope: '#/properties/sourceConfig/properties/zfsConfig/properties/retainSnapshots',
label: 'Retain Snapshots',
description: 'Number of snapshots to retain',
controlOptions: {
placeholder: '5',
format: 'number',
},
}),
],
},
// Flash Configuration
{
type: 'VerticalLayout',
rule: {
effect: RuleEffect.SHOW,
condition: {
scope: '#/properties/sourceConfig/properties/type',
schema: { const: SourceType.FLASH },
} as SchemaBasedCondition,
},
elements: [
{
type: 'Label',
text: 'Flash Backup Configuration',
options: {
description: 'Configure Unraid flash drive backup settings.',
},
} as LabelElement,
createLabeledControl({
scope: '#/properties/sourceConfig/properties/flashConfig/properties/flashPath',
label: 'Flash Path',
description: 'Path to flash drive',
controlOptions: {
placeholder: '/boot',
},
}),
createLabeledControl({
scope: '#/properties/sourceConfig/properties/flashConfig/properties/includeGitHistory',
label: 'Include Git History',
description: 'Include git history in backup',
controlOptions: {
format: 'checkbox',
},
}),
createLabeledControl({
scope: '#/properties/sourceConfig/properties/flashConfig/properties/additionalPaths',
label: 'Additional Paths',
description: 'Additional paths to include',
controlOptions: {
placeholder: '/etc/config',
},
}),
],
},
// Script Configuration
{
type: 'VerticalLayout',
rule: {
effect: RuleEffect.SHOW,
condition: {
scope: '#/properties/sourceConfig/properties/type',
schema: { const: SourceType.SCRIPT },
} as SchemaBasedCondition,
},
elements: [
{
type: 'Label',
text: 'Custom Script Configuration',
options: {
description: 'Configure custom script execution settings.',
},
} as LabelElement,
createLabeledControl({
scope: '#/properties/sourceConfig/properties/scriptConfig/properties/scriptPath',
label: 'Script Path',
description: 'Path to script file',
controlOptions: {
placeholder: '/usr/local/bin/backup.sh',
},
}),
createLabeledControl({
scope: '#/properties/sourceConfig/properties/scriptConfig/properties/scriptArgs',
label: 'Script Arguments',
description: 'Arguments for script',
controlOptions: {
placeholder: '--verbose --compress',
},
}),
createLabeledControl({
scope: '#/properties/sourceConfig/properties/scriptConfig/properties/workingDirectory',
label: 'Working Directory',
description: 'Working directory for script',
controlOptions: {
placeholder: '/tmp',
},
}),
createLabeledControl({
scope: '#/properties/sourceConfig/properties/scriptConfig/properties/outputPath',
label: 'Output Path',
description: 'Path for script output',
controlOptions: {
placeholder: '/tmp/backup.tar.gz',
},
}),
],
},
];
const sourceConfigProperties: Record<string, JsonSchema7> = {
sourceConfig: {
type: 'object',
title: 'Source Configuration',
description: 'Configuration for backup source',
properties: {
type: {
type: 'string',
title: 'Backup Type',
description: 'Type of backup to perform',
enum: [SourceType.ZFS, SourceType.FLASH, SourceType.SCRIPT, SourceType.RAW],
default: SourceType.ZFS,
},
timeout: {
type: 'integer',
title: 'Timeout',
description: 'Timeout in seconds for backup operation',
minimum: 30,
maximum: 86400,
default: 3600,
},
cleanupOnFailure: {
type: 'boolean',
title: 'Cleanup on Failure',
description: 'Clean up backup artifacts on failure',
default: true,
},
rawConfig: {
type: 'object',
title: 'Raw Backup Configuration',
properties: {
sourcePath: {
type: 'string',
title: 'Source Path',
description: 'Source path to backup',
minLength: 1,
},
excludePatterns: {
type: 'array',
title: 'Exclude Patterns',
description: 'Patterns to exclude from backup',
items: {
type: 'string',
},
default: [],
},
includePatterns: {
type: 'array',
title: 'Include Patterns',
description: 'Patterns to include in backup',
items: {
type: 'string',
},
default: [],
},
},
required: ['sourcePath'],
},
zfsConfig: {
type: 'object',
title: 'ZFS Configuration',
properties: {
poolName: {
type: 'string',
title: 'Pool Name',
description: 'ZFS pool name',
minLength: 1,
},
datasetName: {
type: 'string',
title: 'Dataset Name',
description: 'ZFS dataset name',
minLength: 1,
},
snapshotPrefix: {
type: 'string',
title: 'Snapshot Prefix',
description: 'Prefix for snapshot names',
default: 'backup',
},
cleanupSnapshots: {
type: 'boolean',
title: 'Cleanup Snapshots',
description: 'Clean up snapshots after backup',
default: true,
},
retainSnapshots: {
type: 'integer',
title: 'Retain Snapshots',
description: 'Number of snapshots to retain',
minimum: 0,
default: 5,
},
},
required: ['poolName', 'datasetName'],
},
flashConfig: {
type: 'object',
title: 'Flash Configuration',
properties: {
flashPath: {
type: 'string',
title: 'Flash Path',
description: 'Path to flash drive',
default: '/boot',
},
includeGitHistory: {
type: 'boolean',
title: 'Include Git History',
description: 'Include git history in backup',
default: true,
},
additionalPaths: {
type: 'array',
title: 'Additional Paths',
description: 'Additional paths to include',
items: {
type: 'string',
},
default: [],
},
},
},
scriptConfig: {
type: 'object',
title: 'Script Configuration',
properties: {
scriptPath: {
type: 'string',
title: 'Script Path',
description: 'Path to script file',
minLength: 1,
},
scriptArgs: {
type: 'array',
title: 'Script Arguments',
description: 'Arguments for script',
items: {
type: 'string',
},
default: [],
},
workingDirectory: {
type: 'string',
title: 'Working Directory',
description: 'Working directory for script',
default: '/tmp',
},
outputPath: {
type: 'string',
title: 'Output Path',
description: 'Path for script output',
minLength: 1,
},
},
required: ['scriptPath', 'outputPath'],
},
},
required: ['type'],
},
};
// Apply conditional logic for sourceConfig
if (sourceConfigProperties.sourceConfig && typeof sourceConfigProperties.sourceConfig === 'object') {
sourceConfigProperties.sourceConfig.allOf = [
{
if: { properties: { type: { const: SourceType.RAW } }, required: ['type'] },
then: {
required: ['rawConfig'],
properties: {
zfsConfig: { not: {} },
flashConfig: { not: {} },
scriptConfig: { not: {} },
},
},
},
{
if: { properties: { type: { const: SourceType.ZFS } }, required: ['type'] },
then: {
required: ['zfsConfig'],
properties: {
rawConfig: { not: {} },
flashConfig: { not: {} },
scriptConfig: { not: {} },
},
},
},
{
if: { properties: { type: { const: SourceType.FLASH } }, required: ['type'] },
then: {
required: ['flashConfig'],
properties: {
rawConfig: { not: {} },
zfsConfig: { not: {} },
scriptConfig: { not: {} },
},
},
},
{
if: { properties: { type: { const: SourceType.SCRIPT } }, required: ['type'] },
then: {
required: ['scriptConfig'],
properties: {
rawConfig: { not: {} },
zfsConfig: { not: {} },
flashConfig: { not: {} },
},
},
},
];
}
const verticalLayoutElement: UIElement = {
type: 'VerticalLayout',
elements: sourceConfigElements,
options: { step: 1 },
};
return {
properties: sourceConfigProperties,
elements: [verticalLayoutElement],
};
}

View File

@@ -0,0 +1,53 @@
import { Readable } from 'stream';
import { SourceType } from '@app/unraid-api/graph/resolvers/backup/source/backup-source.types.js';
export interface BackupSourceConfig {
timeout: number;
cleanupOnFailure: boolean;
}
export interface BackupSourceResult {
success: boolean;
outputPath?: string;
streamPath?: string;
snapshotName?: string;
error?: string;
cleanupRequired?: boolean;
metadata?: Record<string, unknown>;
// Streaming support
streamCommand?: string;
streamArgs?: string[];
supportsStreaming?: boolean;
isStreamingMode?: boolean;
}
export interface BackupSourceProcessorOptions {
jobId?: string;
onProgress?: (progress: number) => void;
onOutput?: (data: string) => void;
onError?: (error: string) => void;
useStreaming?: boolean;
}
export abstract class BackupSourceProcessor<TConfig extends BackupSourceConfig> {
abstract readonly sourceType: SourceType;
abstract execute(
config: TConfig,
options?: BackupSourceProcessorOptions
): Promise<BackupSourceResult>;
abstract validate(config: TConfig): Promise<{ valid: boolean; error?: string; warnings?: string[] }>;
abstract cleanup(result: BackupSourceResult): Promise<void>;
// Getter to check if processor supports streaming
abstract get supportsStreaming(): boolean;
// Optional getter to get a readable stream for streaming backups
get getReadableStream(): ((config: TConfig) => Promise<Readable>) | undefined {
return undefined;
}
}

View File

@@ -0,0 +1,30 @@
import { Module } from '@nestjs/common';
import { BackupSourceService } from '@app/unraid-api/graph/resolvers/backup/source/backup-source.service.js';
import { FlashSourceProcessor } from '@app/unraid-api/graph/resolvers/backup/source/flash/flash-source-processor.service.js';
import { FlashValidationService } from '@app/unraid-api/graph/resolvers/backup/source/flash/flash-validation.service.js';
import { RawSourceProcessor } from '@app/unraid-api/graph/resolvers/backup/source/raw/raw-source-processor.service.js';
import { ScriptSourceProcessor } from '@app/unraid-api/graph/resolvers/backup/source/script/script-source-processor.service.js';
import { ZfsSourceProcessor } from '@app/unraid-api/graph/resolvers/backup/source/zfs/zfs-source-processor.service.js';
import { ZfsValidationService } from '@app/unraid-api/graph/resolvers/backup/source/zfs/zfs-validation.service.js';
@Module({
providers: [
BackupSourceService,
FlashSourceProcessor,
FlashValidationService,
RawSourceProcessor,
ScriptSourceProcessor,
ZfsSourceProcessor,
ZfsValidationService,
],
exports: [
BackupSourceService,
FlashSourceProcessor,
RawSourceProcessor,
ScriptSourceProcessor,
ZfsSourceProcessor,
ZfsValidationService,
],
})
export class BackupSourceModule {}

View File

@@ -0,0 +1,99 @@
import { BadRequestException, Injectable, Logger } from '@nestjs/common';
import { EventEmitter } from 'events';
import { v4 as uuidv4 } from 'uuid';
import {
BackupSourceConfig,
BackupSourceProcessor,
BackupSourceProcessorOptions,
BackupSourceResult,
} from '@app/unraid-api/graph/resolvers/backup/source/backup-source-processor.interface.js';
import { SourceType } from '@app/unraid-api/graph/resolvers/backup/source/backup-source.types.js';
import {
FlashSourceConfig,
FlashSourceProcessor,
} from '@app/unraid-api/graph/resolvers/backup/source/flash/flash-source-processor.service.js';
import {
RawSourceConfig,
RawSourceProcessor,
} from '@app/unraid-api/graph/resolvers/backup/source/raw/raw-source-processor.service.js';
import {
ScriptSourceConfig,
ScriptSourceProcessor,
} from '@app/unraid-api/graph/resolvers/backup/source/script/script-source-processor.service.js';
import {
ZfsSourceConfig,
ZfsSourceProcessor,
} from '@app/unraid-api/graph/resolvers/backup/source/zfs/zfs-source-processor.service.js';
export interface BackupSourceOptions {
jobId?: string;
onProgress?: (progress: number) => void;
onOutput?: (data: string) => void;
onError?: (error: string) => void;
}
@Injectable()
export class BackupSourceService extends EventEmitter {
private readonly logger = new Logger(BackupSourceService.name);
constructor(
private readonly flashSourceProcessor: FlashSourceProcessor,
private readonly rawSourceProcessor: RawSourceProcessor,
private readonly scriptSourceProcessor: ScriptSourceProcessor,
private readonly zfsSourceProcessor: ZfsSourceProcessor
) {
super();
}
async processSource<T extends BackupSourceConfig & { type: SourceType }>(
config: T,
options?: BackupSourceOptions
): Promise<BackupSourceResult> {
const processor = this.getProcessor(config.type);
if (!processor) {
throw new BadRequestException(`Unsupported source type: ${config.type}`);
}
const processorOptions: BackupSourceProcessorOptions = {
jobId: options?.jobId || uuidv4(),
onProgress: options?.onProgress,
onOutput: options?.onOutput,
onError: options?.onError,
};
try {
const result = await processor.execute(config, processorOptions);
this.logger.log(`Source processing completed for type: ${config.type}`);
return result;
} catch (error) {
this.logger.error(`Source processing failed for type: ${config.type}`, error);
throw error;
}
}
async cancelSourceJob(jobId: string): Promise<boolean> {
this.logger.log(`Attempting to cancel source job: ${jobId}`);
return false;
}
async cleanup(): Promise<void> {
this.logger.log('Cleaning up source service...');
}
public getProcessor(type: SourceType): BackupSourceProcessor<any> | null {
switch (type) {
case SourceType.FLASH:
return this.flashSourceProcessor;
case SourceType.RAW:
return this.rawSourceProcessor;
case SourceType.SCRIPT:
return this.scriptSourceProcessor;
case SourceType.ZFS:
return this.zfsSourceProcessor;
default:
return null;
}
}
}

View File

@@ -0,0 +1,136 @@
import { createUnionType, Field, InputType, ObjectType, registerEnumType } from '@nestjs/graphql';
import { Type } from 'class-transformer';
import { IsBoolean, IsEnum, IsNumber, IsOptional, Min, ValidateNested } from 'class-validator';
import {
FlashPreprocessConfig,
FlashPreprocessConfigInput,
} from '@app/unraid-api/graph/resolvers/backup/source/flash/flash-source.types.js';
import {
RawBackupConfig,
RawBackupConfigInput,
} from '@app/unraid-api/graph/resolvers/backup/source/raw/raw-source.types.js';
import {
ScriptPreprocessConfig,
ScriptPreprocessConfigInput,
} from '@app/unraid-api/graph/resolvers/backup/source/script/script-source.types.js';
import {
ZfsPreprocessConfig,
ZfsPreprocessConfigInput,
} from '@app/unraid-api/graph/resolvers/backup/source/zfs/zfs-source.types.js';
export enum SourceType {
ZFS = 'ZFS',
FLASH = 'FLASH',
SCRIPT = 'SCRIPT',
RAW = 'RAW',
}
registerEnumType(SourceType, {
name: 'SourceType',
description:
'Type of backup to perform (ZFS snapshot, Flash backup, Custom script, or Raw file backup)',
});
export { ZfsPreprocessConfigInput, ZfsPreprocessConfig };
export { FlashPreprocessConfigInput, FlashPreprocessConfig };
export { ScriptPreprocessConfigInput, ScriptPreprocessConfig };
export { RawBackupConfigInput, RawBackupConfig };
@InputType()
export class SourceConfigInput {
@Field(() => SourceType, { nullable: false })
@IsEnum(SourceType, { message: 'Invalid source type' })
type!: SourceType;
@Field(() => Number, { description: 'Timeout for backup operation in seconds', defaultValue: 3600 })
@IsOptional()
@IsNumber()
@Min(1)
timeout?: number;
@Field(() => Boolean, { description: 'Whether to cleanup on failure', defaultValue: true })
@IsOptional()
@IsBoolean()
cleanupOnFailure?: boolean;
@Field(() => ZfsPreprocessConfigInput, { nullable: true })
@IsOptional()
@ValidateNested()
@Type(() => ZfsPreprocessConfigInput)
zfsConfig?: ZfsPreprocessConfigInput;
@Field(() => FlashPreprocessConfigInput, { nullable: true })
@IsOptional()
@ValidateNested()
@Type(() => FlashPreprocessConfigInput)
flashConfig?: FlashPreprocessConfigInput;
@Field(() => ScriptPreprocessConfigInput, { nullable: true })
@IsOptional()
@ValidateNested()
@Type(() => ScriptPreprocessConfigInput)
scriptConfig?: ScriptPreprocessConfigInput;
@Field(() => RawBackupConfigInput, { nullable: true })
@IsOptional()
@ValidateNested()
@Type(() => RawBackupConfigInput)
rawConfig?: RawBackupConfigInput;
}
@ObjectType()
export class SourceConfig {
@Field(() => Number)
timeout!: number;
@Field(() => Boolean)
cleanupOnFailure!: boolean;
@Field(() => ZfsPreprocessConfig, { nullable: true })
zfsConfig?: ZfsPreprocessConfig;
@Field(() => FlashPreprocessConfig, { nullable: true })
flashConfig?: FlashPreprocessConfig;
@Field(() => ScriptPreprocessConfig, { nullable: true })
scriptConfig?: ScriptPreprocessConfig;
@Field(() => RawBackupConfig, { nullable: true })
rawConfig?: RawBackupConfig;
}
export const SourceConfigUnion = createUnionType({
name: 'SourceConfigUnion',
types: () =>
[ZfsPreprocessConfig, FlashPreprocessConfig, ScriptPreprocessConfig, RawBackupConfig] as const,
resolveType(obj: any, context, info) {
if (ZfsPreprocessConfig.isTypeOf && ZfsPreprocessConfig.isTypeOf(obj)) {
return ZfsPreprocessConfig;
}
if (FlashPreprocessConfig.isTypeOf && FlashPreprocessConfig.isTypeOf(obj)) {
return FlashPreprocessConfig;
}
if (ScriptPreprocessConfig.isTypeOf && ScriptPreprocessConfig.isTypeOf(obj)) {
return ScriptPreprocessConfig;
}
if (RawBackupConfig.isTypeOf && RawBackupConfig.isTypeOf(obj)) {
return RawBackupConfig;
}
console.error(`[SourceConfigUnion] Could not resolve type for object: ${JSON.stringify(obj)}`);
return null;
},
});
export const SourceConfigInputUnion = SourceConfigInput;
export interface PreprocessResult {
success: boolean;
streamPath?: string;
outputPath?: string;
snapshotName?: string;
error?: string;
cleanupRequired?: boolean;
metadata?: Record<string, unknown>;
}

View File

@@ -0,0 +1,18 @@
import { Field, InputType, ObjectType } from '@nestjs/graphql';
import { IsOptional, IsString } from 'class-validator';
@InputType()
export abstract class BaseSourceConfigInput {
@Field(() => String, {
description: 'Human-readable label for this source configuration',
nullable: true,
})
@IsOptional()
@IsString()
label?: string;
}
export interface BaseSourceConfig {
label: string;
}

View File

@@ -0,0 +1,307 @@
import { Injectable, Logger } from '@nestjs/common';
import { access, mkdir, writeFile } from 'fs/promises';
import { dirname, join } from 'path';
import { Readable } from 'stream';
import { execa } from 'execa';
import {
BackupSourceConfig,
BackupSourceProcessor,
BackupSourceProcessorOptions,
BackupSourceResult,
} from '@app/unraid-api/graph/resolvers/backup/source/backup-source-processor.interface.js';
import { SourceType } from '@app/unraid-api/graph/resolvers/backup/source/backup-source.types.js';
import { FlashPreprocessConfigInput } from '@app/unraid-api/graph/resolvers/backup/source/flash/flash-source.types.js';
import { FlashValidationService } from '@app/unraid-api/graph/resolvers/backup/source/flash/flash-validation.service.js';
export interface FlashSourceConfig extends BackupSourceConfig {
flashPath: string;
includeGitHistory: boolean;
additionalPaths?: string[];
}
@Injectable()
export class FlashSourceProcessor extends BackupSourceProcessor<FlashSourceConfig> {
readonly sourceType = SourceType.FLASH;
private readonly logger = new Logger(FlashSourceProcessor.name);
constructor(private readonly flashValidationService: FlashValidationService) {
super();
}
async execute(
config: FlashSourceConfig,
options?: BackupSourceProcessorOptions
): Promise<BackupSourceResult> {
const validation = await this.validate(config);
if (!validation.valid) {
return {
success: false,
error: `Flash configuration validation failed: ${validation.error}`,
metadata: { validationError: validation.error, validationWarnings: validation.warnings },
};
}
if (validation.warnings?.length) {
this.logger.warn(`Flash backup warnings: ${validation.warnings.join(', ')}`);
}
const tempGitPath = join(config.flashPath, '.git-backup-temp');
let gitRepoInitialized = false;
try {
if (config.includeGitHistory) {
gitRepoInitialized = await this.initializeGitRepository(config.flashPath, tempGitPath);
if (gitRepoInitialized) {
this.logger.log(`Initialized git repository for Flash backup at: ${tempGitPath}`);
}
}
// Generate streaming command for tar compression
const streamCommand = this.generateStreamCommand(config, gitRepoInitialized, tempGitPath);
return {
success: true,
outputPath: config.flashPath,
streamPath: config.flashPath,
metadata: {
flashPath: config.flashPath,
gitHistoryIncluded: config.includeGitHistory && gitRepoInitialized,
additionalPaths: config.additionalPaths,
validationWarnings: validation.warnings,
tempGitPath: gitRepoInitialized ? tempGitPath : undefined,
streamCommand: streamCommand.command,
streamArgs: streamCommand.args,
sourceType: this.sourceType,
},
cleanupRequired: gitRepoInitialized,
};
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
this.logger.error(`Flash backup failed: ${errorMessage}`, error);
if (gitRepoInitialized) {
try {
await this.cleanupTempGitRepo(tempGitPath);
this.logger.log(`Cleaned up temporary git repository after failure: ${tempGitPath}`);
} catch (cleanupError) {
const cleanupErrorMessage =
cleanupError instanceof Error ? cleanupError.message : String(cleanupError);
this.logger.error(
`Failed to cleanup temporary git repository: ${cleanupErrorMessage}`
);
}
}
return {
success: false,
error: errorMessage,
cleanupRequired: gitRepoInitialized,
metadata: {
flashPath: config.flashPath,
gitRepoInitialized,
cleanupAttempted: gitRepoInitialized,
},
};
}
}
async validate(
config: FlashSourceConfig
): Promise<{ valid: boolean; error?: string; warnings?: string[] }> {
const legacyConfig: FlashPreprocessConfigInput = {
flashPath: config.flashPath,
includeGitHistory: config.includeGitHistory,
additionalPaths: config.additionalPaths,
};
const validationResult = await this.flashValidationService.validateFlashConfig(legacyConfig);
return {
valid: validationResult.isValid,
error: validationResult.errors.length > 0 ? validationResult.errors.join(', ') : undefined,
warnings: validationResult.warnings,
};
}
async cleanup(result: BackupSourceResult): Promise<void> {
if (result.cleanupRequired && result.metadata?.tempGitPath) {
await this.cleanupTempGitRepo(result.metadata.tempGitPath as string);
}
}
private async initializeGitRepository(flashPath: string, tempGitPath: string): Promise<boolean> {
try {
const existingGitPath = join(flashPath, '.git');
const hasExistingRepo = await this.flashValidationService.validateGitRepository(flashPath);
if (hasExistingRepo) {
await execa('cp', ['-r', existingGitPath, tempGitPath]);
this.logger.log('Copied existing git repository to temporary location');
return true;
}
await mkdir(tempGitPath, { recursive: true });
await execa('git', ['init'], { cwd: tempGitPath });
const gitignorePath = join(tempGitPath, '.gitignore');
const gitignoreContent = [
'# Exclude sensitive files',
'*.key',
'*.pem',
'*.p12',
'*.pfx',
'config/passwd',
'config/shadow',
'config/ssh/',
'config/ssl/',
'config/wireguard/',
'config/network.cfg',
'config/ident.cfg',
].join('\n');
await writeFile(gitignorePath, gitignoreContent);
await execa('git', ['add', '.'], { cwd: flashPath });
await execa(
'git',
[
'-c',
'user.name=Unraid Backup',
'-c',
'user.email=backup@unraid.net',
'commit',
'-m',
'Flash backup snapshot',
],
{ cwd: flashPath }
);
await execa('mv', [join(flashPath, '.git'), tempGitPath]);
this.logger.log('Initialized new git repository for Flash backup');
return true;
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
this.logger.warn(`Failed to initialize git repository: ${errorMessage}`);
return false;
}
}
private async cleanupTempGitRepo(tempGitPath: string): Promise<void> {
try {
await execa('rm', ['-rf', tempGitPath]);
this.logger.log(`Cleaned up temporary git repository: ${tempGitPath}`);
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
this.logger.error(`Failed to cleanup temporary git repository: ${errorMessage}`);
}
}
private generateStreamCommand(
config: FlashSourceConfig,
gitRepoInitialized: boolean,
tempGitPath?: string
): { command: string; args: string[] } {
const excludeArgs: string[] = [];
// Standard exclusions for flash backups
const standardExcludes = ['lost+found', '*.tmp', '*.temp', '.DS_Store', 'Thumbs.db'];
standardExcludes.forEach((pattern) => {
excludeArgs.push('--exclude', pattern);
});
// If git repo was initialized, include it in the backup
if (gitRepoInitialized && tempGitPath) {
excludeArgs.push('--exclude', '.git-backup-temp');
}
const tarArgs = [
'-czf', // create, gzip, file
'-', // output to stdout for streaming
'-C', // change to directory
config.flashPath,
...excludeArgs,
'.', // backup everything in the directory
];
// Add additional paths if specified
if (config.additionalPaths?.length) {
config.additionalPaths.forEach((path) => {
tarArgs.push('-C', path, '.');
});
}
return {
command: 'tar',
args: tarArgs,
};
}
get supportsStreaming(): boolean {
return true;
}
get getReadableStream(): (config: FlashSourceConfig) => Promise<Readable> {
return async (config: FlashSourceConfig): Promise<Readable> => {
const validation = await this.validate(config);
if (!validation.valid) {
const errorMsg = `Flash configuration validation failed: ${validation.error}`;
this.logger.error(errorMsg);
const errorStream = new Readable({
read() {
this.emit('error', new Error(errorMsg));
this.push(null);
},
});
return errorStream;
}
const { command, args } = this.generateStreamCommand(config, false);
this.logger.log(
`[getReadableStream] Streaming flash backup with command: ${command} ${args.join(' ')}`
);
try {
const tarProcess = execa(command, args, {
cwd: config.flashPath,
});
tarProcess.catch((error) => {
this.logger.error(
`Error executing tar command for streaming: ${error.message}`,
error.stack
);
});
if (!tarProcess.stdout) {
throw new Error('Failed to get stdout stream from tar process.');
}
tarProcess.stdout.on('end', () => {
this.logger.log('[getReadableStream] Tar process stdout stream ended.');
});
tarProcess.stdout.on('error', (err) => {
this.logger.error(
`[getReadableStream] Tar process stdout stream error: ${err.message}`
);
});
return tarProcess.stdout;
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
this.logger.error(`[getReadableStream] Failed to start tar process: ${errorMessage}`);
const errorStream = new Readable({
read() {
this.emit('error', new Error(errorMessage));
this.push(null);
},
});
return errorStream;
}
};
}
}

View File

@@ -0,0 +1,43 @@
import { Field, InputType, ObjectType } from '@nestjs/graphql';
import { IsBoolean, IsNotEmpty, IsOptional, IsString } from 'class-validator';
import {
BaseSourceConfig,
BaseSourceConfigInput,
} from '@app/unraid-api/graph/resolvers/backup/source/base-source.types.js';
@InputType()
export class FlashPreprocessConfigInput extends BaseSourceConfigInput {
@Field(() => String, { description: 'Flash drive mount path', defaultValue: '/boot' })
@IsString()
@IsNotEmpty()
flashPath!: string;
@Field(() => Boolean, { description: 'Whether to include git history', defaultValue: true })
@IsBoolean()
includeGitHistory!: boolean;
@Field(() => [String], { description: 'Additional paths to include in backup', nullable: true })
@IsOptional()
additionalPaths?: string[];
}
@ObjectType()
export class FlashPreprocessConfig implements BaseSourceConfig {
@Field(() => String, { nullable: false })
label: string = 'Flash drive backup';
@Field(() => String)
flashPath!: string;
@Field(() => Boolean)
includeGitHistory!: boolean;
@Field(() => [String], { nullable: true })
additionalPaths?: string[];
static isTypeOf(obj: any): obj is FlashPreprocessConfig {
return obj && typeof obj.flashPath === 'string' && typeof obj.includeGitHistory === 'boolean';
}
}

View File

@@ -0,0 +1,260 @@
import { Injectable, Logger } from '@nestjs/common';
import { access, constants, readdir, stat } from 'fs/promises';
import { join } from 'path';
import { execa } from 'execa';
import { FlashPreprocessConfigInput } from '@app/unraid-api/graph/resolvers/backup/source/flash/flash-source.types.js';
export interface FlashValidationResult {
isValid: boolean;
errors: string[];
warnings: string[];
metadata: {
flashPathExists?: boolean;
flashPathMounted?: boolean;
gitRepoExists?: boolean;
gitRepoSize?: number | null;
additionalPathsValid?: boolean[];
totalSize?: number | null;
availableSpace?: number | null;
};
}
@Injectable()
export class FlashValidationService {
private readonly logger = new Logger(FlashValidationService.name);
async validateFlashConfig(config: FlashPreprocessConfigInput): Promise<FlashValidationResult> {
const result: FlashValidationResult = {
isValid: true,
errors: [],
warnings: [],
metadata: {},
};
try {
// Validate flash path exists and is accessible
const flashPathValid = await this.validateFlashPath(config.flashPath);
result.metadata.flashPathExists = flashPathValid;
if (!flashPathValid) {
result.errors.push(
`Flash path '${config.flashPath}' does not exist or is not accessible`
);
result.isValid = false;
return result;
}
// Check if flash path is mounted
const isMounted = await this.isFlashMounted(config.flashPath);
result.metadata.flashPathMounted = isMounted;
if (!isMounted) {
result.warnings.push(`Flash path '${config.flashPath}' may not be properly mounted`);
}
// Validate git repository if includeGitHistory is enabled
if (config.includeGitHistory) {
const gitRepoExists = await this.validateGitRepository(config.flashPath);
result.metadata.gitRepoExists = gitRepoExists;
if (!gitRepoExists) {
result.warnings.push(
`Git repository not found in '${config.flashPath}'. Git history will be skipped.`
);
} else {
const gitRepoSize = await this.getGitRepositorySize(config.flashPath);
result.metadata.gitRepoSize = gitRepoSize;
if (gitRepoSize && gitRepoSize > 100 * 1024 * 1024) {
// 100MB
result.warnings.push(
`Git repository is large (${Math.round(gitRepoSize / 1024 / 1024)}MB). Backup may take longer.`
);
}
}
}
// Validate additional paths
if (config.additionalPaths && config.additionalPaths.length > 0) {
const pathValidations = await Promise.all(
config.additionalPaths.map((path) => this.validateAdditionalPath(path))
);
result.metadata.additionalPathsValid = pathValidations;
const invalidPaths = config.additionalPaths.filter(
(_, index) => !pathValidations[index]
);
if (invalidPaths.length > 0) {
result.warnings.push(
`Some additional paths are not accessible: ${invalidPaths.join(', ')}`
);
}
}
// Calculate total backup size
const totalSize = await this.calculateTotalBackupSize(config);
result.metadata.totalSize = totalSize;
// Check available space
const availableSpace = await this.getAvailableSpace(config.flashPath);
result.metadata.availableSpace = availableSpace;
if (totalSize && availableSpace && totalSize > availableSpace * 0.8) {
result.warnings.push(
'Backup size may be close to available space. Monitor disk usage during backup.'
);
}
} catch (error: unknown) {
const errorMessage = error instanceof Error ? error.message : String(error);
result.errors.push(`Validation failed: ${errorMessage}`);
result.isValid = false;
}
return result;
}
async validateFlashPath(flashPath: string): Promise<boolean> {
try {
await access(flashPath);
const stats = await stat(flashPath);
return stats.isDirectory();
} catch {
return false;
}
}
async isFlashMounted(flashPath: string): Promise<boolean> {
try {
// Check if the path is a mount point by comparing device IDs
const pathStat = await stat(flashPath);
const parentStat = await stat(join(flashPath, '..'));
return pathStat.dev !== parentStat.dev;
} catch {
return false;
}
}
async validateGitRepository(flashPath: string): Promise<boolean> {
const gitPath = join(flashPath, '.git');
try {
await access(gitPath);
const stats = await stat(gitPath);
return stats.isDirectory();
} catch {
return false;
}
}
async getGitRepositorySize(flashPath: string): Promise<number | null> {
const gitPath = join(flashPath, '.git');
try {
const { stdout } = await execa('du', ['-sb', gitPath]);
const size = parseInt(stdout.split('\t')[0], 10);
return isNaN(size) ? null : size;
} catch {
return null;
}
}
async validateAdditionalPath(path: string): Promise<boolean> {
try {
await access(path);
return true;
} catch {
return false;
}
}
async calculateTotalBackupSize(config: FlashPreprocessConfigInput): Promise<number | null> {
try {
let totalSize = 0;
// Get flash directory size
const { stdout: flashSize } = await execa('du', ['-sb', config.flashPath]);
totalSize += parseInt(flashSize.split('\t')[0], 10) || 0;
// Add additional paths if specified
if (config.additionalPaths) {
for (const path of config.additionalPaths) {
try {
const { stdout: pathSize } = await execa('du', ['-sb', path]);
totalSize += parseInt(pathSize.split('\t')[0], 10) || 0;
} catch (error: unknown) {
this.logger.warn(
`Failed to get size for additional path ${path}: ${error instanceof Error ? error.message : String(error)}`
);
}
}
}
return totalSize;
} catch {
return null;
}
}
async getAvailableSpace(path: string): Promise<number | null> {
try {
const { stdout } = await execa('df', ['-B1', path]);
const lines = stdout.split('\n');
if (lines.length > 1) {
const fields = lines[1].split(/\s+/);
if (fields.length >= 4) {
const available = parseInt(fields[3], 10);
return isNaN(available) ? null : available;
}
}
return null;
} catch {
return null;
}
}
async checkGitStatus(flashPath: string): Promise<{
hasUncommittedChanges: boolean;
currentBranch: string | null;
lastCommitHash: string | null;
}> {
const result = {
hasUncommittedChanges: false,
currentBranch: null as string | null,
lastCommitHash: null as string | null,
};
try {
// Check for uncommitted changes
const { stdout: statusOutput } = await execa('git', ['status', '--porcelain'], {
cwd: flashPath,
});
result.hasUncommittedChanges = statusOutput.trim().length > 0;
// Get current branch
try {
const { stdout: branchOutput } = await execa(
'git',
['rev-parse', '--abbrev-ref', 'HEAD'],
{ cwd: flashPath }
);
result.currentBranch = branchOutput.trim();
} catch {
// Ignore branch detection errors
}
// Get last commit hash
try {
const { stdout: commitOutput } = await execa('git', ['rev-parse', 'HEAD'], {
cwd: flashPath,
});
result.lastCommitHash = commitOutput.trim();
} catch {
// Ignore commit hash detection errors
}
} catch {
// Git commands failed, repository might not be initialized
}
return result;
}
}

View File

@@ -0,0 +1,144 @@
import { Injectable, Logger } from '@nestjs/common';
import { access, constants, stat } from 'fs/promises';
import { join } from 'path';
import {
BackupSourceConfig,
BackupSourceProcessor,
BackupSourceProcessorOptions,
BackupSourceResult,
} from '@app/unraid-api/graph/resolvers/backup/source/backup-source-processor.interface.js';
import { SourceType } from '@app/unraid-api/graph/resolvers/backup/source/backup-source.types.js';
import { RawBackupConfigInput } from '@app/unraid-api/graph/resolvers/backup/source/raw/raw-source.types.js';
export interface RawSourceConfig extends BackupSourceConfig {
sourcePath: string;
excludePatterns?: string[];
includePatterns?: string[];
}
@Injectable()
export class RawSourceProcessor extends BackupSourceProcessor<RawSourceConfig> {
readonly sourceType = SourceType.RAW;
private readonly logger = new Logger(RawSourceProcessor.name);
get supportsStreaming(): boolean {
return false;
}
async execute(
config: RawSourceConfig,
options?: BackupSourceProcessorOptions
): Promise<BackupSourceResult> {
const startTime = Date.now();
try {
this.logger.log(`Starting RAW backup validation for path: ${config.sourcePath}`);
const validation = await this.validate(config);
if (!validation.valid) {
return {
success: false,
error: validation.error || 'Validation failed',
metadata: {
validationError: validation.error,
supportsStreaming: this.supportsStreaming,
},
supportsStreaming: this.supportsStreaming,
};
}
if (validation.warnings?.length) {
this.logger.warn(
`RAW backup warnings for ${config.sourcePath}: ${validation.warnings.join(', ')}`
);
}
const sourceStats = await stat(config.sourcePath);
const duration = Date.now() - startTime;
this.logger.log(`RAW backup: Providing direct path for ${config.sourcePath}`);
return {
success: true,
outputPath: config.sourcePath,
supportsStreaming: this.supportsStreaming,
isStreamingMode: false,
metadata: {
sourcePath: config.sourcePath,
isDirectory: sourceStats.isDirectory(),
size: sourceStats.size,
duration,
excludePatterns: config.excludePatterns,
includePatterns: config.includePatterns,
validationWarnings: validation.warnings,
supportsStreaming: this.supportsStreaming,
},
};
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
const errorStack = error instanceof Error ? error.stack : undefined;
this.logger.error(
`RAW backup preparation failed for ${config.sourcePath}: ${errorMessage}`,
errorStack
);
return {
success: false,
error: errorMessage,
supportsStreaming: this.supportsStreaming,
metadata: {
sourcePath: config.sourcePath,
duration: Date.now() - startTime,
supportsStreaming: this.supportsStreaming,
},
};
}
}
async validate(
config: RawSourceConfig
): Promise<{ valid: boolean; error?: string; warnings?: string[] }> {
const warnings: string[] = [];
try {
await access(config.sourcePath, constants.F_OK | constants.R_OK);
} catch {
return {
valid: false,
error: `Source path does not exist or is not readable: ${config.sourcePath}`,
};
}
const restrictedPaths = ['/proc', '/sys', '/dev'];
const isRestricted = restrictedPaths.some((path) => config.sourcePath.startsWith(path));
if (isRestricted) {
return {
valid: false,
error: `Cannot backup restricted system paths: ${config.sourcePath}`,
};
}
if (config.excludePatterns?.length && config.includePatterns?.length) {
warnings.push(
'Both include and exclude patterns specified - exclude patterns take precedence'
);
}
const stats = await stat(config.sourcePath);
if (stats.isDirectory()) {
const largeDirPaths = ['/mnt/user', '/mnt/disk'];
const isLargeDir = largeDirPaths.some((path) => config.sourcePath.startsWith(path));
if (isLargeDir && !config.excludePatterns?.length && !config.includePatterns?.length) {
warnings.push(
'Backing up large directory without filters may take significant time and space'
);
}
}
return { valid: true, warnings };
}
async cleanup(result: BackupSourceResult): Promise<void> {
this.logger.log(`RAW backup cleanup completed for: ${result.metadata?.sourcePath}`);
}
}

View File

@@ -0,0 +1,45 @@
import { Field, InputType, ObjectType } from '@nestjs/graphql';
import { IsArray, IsNotEmpty, IsOptional, IsString } from 'class-validator';
import {
BaseSourceConfig,
BaseSourceConfigInput,
} from '@app/unraid-api/graph/resolvers/backup/source/base-source.types.js';
@InputType()
export class RawBackupConfigInput extends BaseSourceConfigInput {
@Field(() => String, { description: 'Source path to backup' })
@IsString()
@IsNotEmpty()
sourcePath!: string;
@Field(() => [String], { description: 'File patterns to exclude from backup', nullable: true })
@IsOptional()
@IsArray()
excludePatterns?: string[];
@Field(() => [String], { description: 'File patterns to include in backup', nullable: true })
@IsOptional()
@IsArray()
includePatterns?: string[];
}
@ObjectType()
export class RawBackupConfig implements BaseSourceConfig {
@Field(() => String, { nullable: false })
label: string = 'Raw file backup';
@Field(() => String)
sourcePath!: string;
@Field(() => [String], { nullable: true })
excludePatterns?: string[];
@Field(() => [String], { nullable: true })
includePatterns?: string[];
static isTypeOf(obj: any): obj is RawBackupConfig {
return obj && typeof obj.sourcePath === 'string';
}
}

View File

@@ -0,0 +1,252 @@
import { Injectable, Logger } from '@nestjs/common';
import { promises as fs } from 'fs';
import { dirname } from 'path';
import { execa } from 'execa';
import {
BackupSourceConfig,
BackupSourceProcessor,
BackupSourceProcessorOptions,
BackupSourceResult,
} from '@app/unraid-api/graph/resolvers/backup/source/backup-source-processor.interface.js';
import { SourceType } from '@app/unraid-api/graph/resolvers/backup/source/backup-source.types.js';
export interface ScriptSourceConfig extends BackupSourceConfig {
scriptPath: string;
scriptArgs?: string[];
workingDirectory?: string;
environment?: Record<string, string>;
outputPath: string;
}
@Injectable()
export class ScriptSourceProcessor extends BackupSourceProcessor<ScriptSourceConfig> {
readonly sourceType = SourceType.SCRIPT;
private readonly logger = new Logger(ScriptSourceProcessor.name);
private readonly tempDir = '/tmp/unraid-script-preprocessing';
private readonly maxOutputSize = 100 * 1024 * 1024; // 100MB limit
get supportsStreaming(): boolean {
return false;
}
async execute(
config: ScriptSourceConfig,
options?: BackupSourceProcessorOptions
): Promise<BackupSourceResult> {
const startTime = Date.now();
const validation = await this.validate(config);
if (!validation.valid) {
return {
success: false,
error: `Script configuration validation failed: ${validation.error}`,
metadata: { validationError: validation.error, validationWarnings: validation.warnings },
};
}
if (validation.warnings?.length) {
this.logger.warn(`Script backup warnings: ${validation.warnings.join(', ')}`);
}
try {
await this.ensureTempDirectory();
const { command, args } = this.buildCommand(config);
this.logger.log(`Executing script: ${command} ${args.join(' ')}`);
await this.runScriptWithTimeout(command, args, config.timeout / 1000);
const outputSize = await this.getFileSize(config.outputPath);
if (outputSize === 0) {
throw new Error('Script produced no output');
}
if (outputSize > this.maxOutputSize) {
throw new Error(
`Script output too large: ${outputSize} bytes (max: ${this.maxOutputSize})`
);
}
const duration = Date.now() - startTime;
this.logger.log(
`Script completed successfully in ${duration}ms, output size: ${outputSize} bytes`
);
return {
success: true,
outputPath: config.outputPath,
metadata: {
scriptPath: config.scriptPath,
duration,
outputSize,
workingDirectory: config.workingDirectory,
scriptArgs: config.scriptArgs,
validationWarnings: validation.warnings,
},
};
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
this.logger.error(`Script backup failed: ${errorMessage}`);
try {
await fs.unlink(config.outputPath);
} catch {
// Ignore cleanup errors
}
return {
success: false,
error: errorMessage,
metadata: {
scriptPath: config.scriptPath,
duration: Date.now() - startTime,
workingDirectory: config.workingDirectory,
scriptArgs: config.scriptArgs,
},
};
}
}
async validate(
config: ScriptSourceConfig
): Promise<{ valid: boolean; error?: string; warnings?: string[] }> {
try {
await fs.access(config.scriptPath, fs.constants.F_OK | fs.constants.X_OK);
const restrictedPaths = ['/boot', '/mnt/user', '/mnt/disk'];
const isRestricted = restrictedPaths.some((path) => config.scriptPath.startsWith(path));
if (isRestricted) {
return {
valid: false,
error: 'Script cannot be located in restricted paths (/boot, /mnt/user, /mnt/disk*)',
};
}
if (config.workingDirectory) {
try {
await fs.access(config.workingDirectory, fs.constants.F_OK);
} catch {
return {
valid: false,
error: `Working directory does not exist: ${config.workingDirectory}`,
};
}
}
const outputDir = dirname(config.outputPath);
try {
await fs.access(outputDir, fs.constants.F_OK | fs.constants.W_OK);
} catch {
return {
valid: false,
error: `Output directory does not exist or is not writable: ${outputDir}`,
};
}
if (config.scriptArgs) {
for (const arg of config.scriptArgs) {
if (arg.length > 1000) {
return {
valid: false,
error: `Script argument too long (max 1000 characters): ${arg.substring(0, 50)}...`,
};
}
}
}
return { valid: true };
} catch {
return {
valid: false,
error: `Script does not exist or is not executable: ${config.scriptPath}`,
};
}
}
async cleanup(result: BackupSourceResult): Promise<void> {
if (result.outputPath) {
await this.cleanupFile(result.outputPath);
}
}
private async ensureTempDirectory(): Promise<void> {
try {
await fs.access(this.tempDir);
} catch {
await fs.mkdir(this.tempDir, { recursive: true, mode: 0o700 });
}
}
private buildCommand(config: ScriptSourceConfig): { command: string; args: string[] } {
const command = 'timeout';
const args = [
`${config.timeout / 1000}s`,
'nice',
'-n',
'10',
'ionice',
'-c',
'3',
'bash',
'-c',
`cd "${config.workingDirectory || '/tmp'}" && exec "${config.scriptPath}" ${(config.scriptArgs || []).join(' ')}`,
];
return { command, args };
}
private async runScriptWithTimeout(
command: string,
args: string[],
timeoutSeconds: number
): Promise<void> {
try {
await execa(command, args, {
timeout: timeoutSeconds * 1000,
stdio: ['ignore', 'pipe', 'pipe'],
env: {
...process.env,
PATH: '/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin',
},
uid: 99, // nobody user
gid: 99, // nobody group
});
} catch (error: any) {
if (error.timedOut) {
throw new Error(`Script timeout after ${timeoutSeconds} seconds`);
}
if (error.signal) {
throw new Error(`Script killed by signal: ${error.signal}`);
}
if (error.exitCode !== undefined && error.exitCode !== 0) {
throw new Error(
`Script exited with code ${error.exitCode}. stderr: ${error.stderr || ''}`
);
}
throw new Error(`Failed to execute script: ${error.message}`);
}
}
private async getFileSize(filePath: string): Promise<number> {
try {
const stats = await fs.stat(filePath);
return stats.size;
} catch {
return 0;
}
}
private async cleanupFile(filePath: string): Promise<void> {
try {
await fs.unlink(filePath);
this.logger.log(`Cleaned up script output file: ${filePath}`);
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
this.logger.error(`Failed to cleanup script output ${filePath}: ${errorMessage}`);
}
}
}

View File

@@ -0,0 +1,63 @@
import { Field, InputType, ObjectType } from '@nestjs/graphql';
import { IsNotEmpty, IsOptional, IsString } from 'class-validator';
import { GraphQLJSON } from 'graphql-scalars';
import {
BaseSourceConfig,
BaseSourceConfigInput,
} from '@app/unraid-api/graph/resolvers/backup/source/base-source.types.js';
@InputType()
export class ScriptPreprocessConfigInput extends BaseSourceConfigInput {
@Field(() => String, { description: 'Path to the script file' })
@IsString()
@IsNotEmpty()
scriptPath!: string;
@Field(() => [String], { description: 'Arguments to pass to the script', nullable: true })
@IsOptional()
scriptArgs?: string[];
@Field(() => String, { description: 'Working directory for script execution', nullable: true })
@IsOptional()
@IsString()
workingDirectory?: string;
@Field(() => GraphQLJSON, {
description: 'Environment variables for script execution',
nullable: true,
})
@IsOptional()
environment?: Record<string, string>;
@Field(() => String, { description: 'Output file path where script should write data' })
@IsString()
@IsNotEmpty()
outputPath!: string;
}
@ObjectType()
export class ScriptPreprocessConfig implements BaseSourceConfig {
@Field(() => String, { nullable: false })
label: string = 'Script backup';
@Field(() => String)
scriptPath!: string;
@Field(() => [String], { nullable: true })
scriptArgs?: string[];
@Field(() => String, { nullable: true })
workingDirectory?: string;
@Field(() => GraphQLJSON, { nullable: true })
environment?: Record<string, string>;
@Field(() => String)
outputPath!: string;
static isTypeOf(obj: any): obj is ScriptPreprocessConfig {
return obj && typeof obj.scriptPath === 'string' && typeof obj.outputPath === 'string';
}
}

View File

@@ -0,0 +1,285 @@
import { Injectable, Logger } from '@nestjs/common';
import { access, constants, stat } from 'fs/promises';
import { dirname, isAbsolute, resolve } from 'path';
import { ScriptPreprocessConfigInput } from '@app/unraid-api/graph/resolvers/backup/source/script/script-source.types.js';
export interface ScriptValidationResult {
isValid: boolean;
errors: string[];
warnings: string[];
metadata: {
scriptExists?: boolean;
scriptExecutable?: boolean;
workingDirectoryExists?: boolean;
outputDirectoryExists?: boolean;
outputDirectoryWritable?: boolean;
environmentVariablesValid?: boolean;
resolvedScriptPath?: string;
resolvedWorkingDirectory?: string;
resolvedOutputPath?: string;
};
}
@Injectable()
export class ScriptValidationService {
private readonly logger = new Logger(ScriptValidationService.name);
async validateScriptConfig(config: ScriptPreprocessConfigInput): Promise<ScriptValidationResult> {
const result: ScriptValidationResult = {
isValid: true,
errors: [],
warnings: [],
metadata: {},
};
try {
// Resolve and validate script path
const resolvedScriptPath = this.resolveScriptPath(
config.scriptPath,
config.workingDirectory
);
result.metadata.resolvedScriptPath = resolvedScriptPath;
const scriptExists = await this.validateScriptExists(resolvedScriptPath);
result.metadata.scriptExists = scriptExists;
if (!scriptExists) {
result.errors.push(`Script file '${resolvedScriptPath}' does not exist`);
result.isValid = false;
return result;
}
// Check if script is executable
const scriptExecutable = await this.validateScriptExecutable(resolvedScriptPath);
result.metadata.scriptExecutable = scriptExecutable;
if (!scriptExecutable) {
result.warnings.push(`Script file '${resolvedScriptPath}' may not be executable`);
}
// Validate working directory
if (config.workingDirectory) {
const resolvedWorkingDir = resolve(config.workingDirectory);
result.metadata.resolvedWorkingDirectory = resolvedWorkingDir;
const workingDirExists = await this.validateDirectory(resolvedWorkingDir);
result.metadata.workingDirectoryExists = workingDirExists;
if (!workingDirExists) {
result.errors.push(`Working directory '${resolvedWorkingDir}' does not exist`);
result.isValid = false;
}
}
// Validate output path and directory
const resolvedOutputPath = this.resolveOutputPath(
config.outputPath,
config.workingDirectory
);
result.metadata.resolvedOutputPath = resolvedOutputPath;
const outputDirectory = dirname(resolvedOutputPath);
const outputDirExists = await this.validateDirectory(outputDirectory);
result.metadata.outputDirectoryExists = outputDirExists;
if (!outputDirExists) {
result.errors.push(`Output directory '${outputDirectory}' does not exist`);
result.isValid = false;
} else {
// Check if output directory is writable
const outputDirWritable = await this.validateDirectoryWritable(outputDirectory);
result.metadata.outputDirectoryWritable = outputDirWritable;
if (!outputDirWritable) {
result.errors.push(`Output directory '${outputDirectory}' is not writable`);
result.isValid = false;
}
}
// Validate environment variables
if (config.environment) {
const envValid = this.validateEnvironmentVariables(config.environment);
result.metadata.environmentVariablesValid = envValid;
if (!envValid) {
result.warnings.push('Some environment variables may contain invalid values');
}
}
// Security validations
this.performSecurityValidations(config, result);
} catch (error: unknown) {
const errorMessage = error instanceof Error ? error.message : String(error);
result.errors.push(`Validation failed: ${errorMessage}`);
result.isValid = false;
}
return result;
}
private resolveScriptPath(scriptPath: string, workingDirectory?: string): string {
if (isAbsolute(scriptPath)) {
return scriptPath;
}
const baseDir = workingDirectory || process.cwd();
return resolve(baseDir, scriptPath);
}
private resolveOutputPath(outputPath: string, workingDirectory?: string): string {
if (isAbsolute(outputPath)) {
return outputPath;
}
const baseDir = workingDirectory || process.cwd();
return resolve(baseDir, outputPath);
}
async validateScriptExists(scriptPath: string): Promise<boolean> {
try {
await access(scriptPath);
const stats = await stat(scriptPath);
return stats.isFile();
} catch {
return false;
}
}
async validateScriptExecutable(scriptPath: string): Promise<boolean> {
try {
const stats = await stat(scriptPath);
// Check if file has execute permissions (basic check)
return (stats.mode & parseInt('111', 8)) !== 0;
} catch {
return false;
}
}
async validateDirectory(dirPath: string): Promise<boolean> {
try {
await access(dirPath);
const stats = await stat(dirPath);
return stats.isDirectory();
} catch {
return false;
}
}
async validateDirectoryWritable(dirPath: string): Promise<boolean> {
try {
const stats = await stat(dirPath);
// Check if directory has write permissions (basic check)
return (stats.mode & parseInt('200', 8)) !== 0;
} catch {
return false;
}
}
validateEnvironmentVariables(environment: Record<string, string>): boolean {
try {
// Check for potentially dangerous environment variables
const dangerousVars = ['PATH', 'LD_LIBRARY_PATH', 'HOME', 'USER'];
const hasDangerousVars = Object.keys(environment).some((key) =>
dangerousVars.includes(key.toUpperCase())
);
if (hasDangerousVars) {
this.logger.warn('Script environment contains potentially dangerous variables');
}
// Check for valid variable names (basic validation)
const validVarName = /^[A-Za-z_][A-Za-z0-9_]*$/;
const invalidVars = Object.keys(environment).filter((key) => !validVarName.test(key));
if (invalidVars.length > 0) {
this.logger.warn(`Invalid environment variable names: ${invalidVars.join(', ')}`);
return false;
}
return true;
} catch {
return false;
}
}
private performSecurityValidations(
config: ScriptPreprocessConfigInput,
result: ScriptValidationResult
): void {
// Check for potentially dangerous script paths
const dangerousPaths = ['/bin', '/usr/bin', '/sbin', '/usr/sbin'];
const scriptInDangerousPath = dangerousPaths.some((path) =>
result.metadata.resolvedScriptPath?.startsWith(path)
);
if (scriptInDangerousPath) {
result.warnings.push(
'Script is located in a system directory. Ensure it is safe to execute.'
);
}
// Check for dangerous script arguments
if (config.scriptArgs) {
const dangerousArgs = config.scriptArgs.filter(
(arg) =>
arg.includes('..') ||
arg.includes('rm ') ||
arg.includes('sudo ') ||
arg.includes('su ')
);
if (dangerousArgs.length > 0) {
result.warnings.push(
'Script arguments contain potentially dangerous commands or paths.'
);
}
}
// Check if output path is in a safe location
if (result.metadata.resolvedOutputPath) {
const systemPaths = ['/bin', '/usr', '/etc', '/var', '/sys', '/proc'];
const outputInSystemPath = systemPaths.some((path) =>
result.metadata.resolvedOutputPath?.startsWith(path)
);
if (outputInSystemPath) {
result.errors.push('Output path cannot be in system directories for security reasons.');
result.isValid = false;
}
}
// Validate script file extension for common script types
if (result.metadata.resolvedScriptPath) {
const scriptExt = result.metadata.resolvedScriptPath.split('.').pop()?.toLowerCase();
const allowedExtensions = ['sh', 'bash', 'py', 'pl', 'rb', 'js', 'php'];
if (scriptExt && !allowedExtensions.includes(scriptExt)) {
result.warnings.push(
`Script extension '.${scriptExt}' is not commonly recognized. Ensure it is executable.`
);
}
}
}
async getScriptInfo(scriptPath: string): Promise<{
size: number | null;
lastModified: Date | null;
permissions: string | null;
}> {
try {
const stats = await stat(scriptPath);
return {
size: stats.size,
lastModified: stats.mtime,
permissions: '0' + (stats.mode & parseInt('777', 8)).toString(8),
};
} catch {
return {
size: null,
lastModified: null,
permissions: null,
};
}
}
}

View File

@@ -0,0 +1,139 @@
import { Injectable, Logger } from '@nestjs/common';
import { execa } from 'execa';
import {
BackupSourceConfig,
BackupSourceProcessor,
BackupSourceProcessorOptions,
BackupSourceResult,
} from '@app/unraid-api/graph/resolvers/backup/source/backup-source-processor.interface.js';
import { SourceType } from '@app/unraid-api/graph/resolvers/backup/source/backup-source.types.js';
import { ZfsPreprocessConfig } from '@app/unraid-api/graph/resolvers/backup/source/zfs/zfs-source.types.js';
import { ZfsValidationService } from '@app/unraid-api/graph/resolvers/backup/source/zfs/zfs-validation.service.js';
export interface ZfsSourceConfig extends BackupSourceConfig {
poolName: string;
datasetName: string;
snapshotPrefix?: string;
cleanupSnapshots: boolean;
retainSnapshots?: number;
}
@Injectable()
export class ZfsSourceProcessor extends BackupSourceProcessor<ZfsSourceConfig> {
readonly sourceType = SourceType.ZFS;
private readonly logger = new Logger(ZfsSourceProcessor.name);
constructor(private readonly zfsValidationService: ZfsValidationService) {
super();
}
get supportsStreaming(): boolean {
return true;
}
async validate(
config: ZfsSourceConfig
): Promise<{ valid: boolean; error?: string; warnings?: string[] }> {
try {
const result = await this.zfsValidationService.validateZfsConfig(config as any);
return {
valid: result.isValid,
error: result.errors.length > 0 ? result.errors.join(', ') : undefined,
warnings: result.warnings,
};
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
return { valid: false, error: errorMessage };
}
}
async execute(
config: ZfsSourceConfig,
options?: BackupSourceProcessorOptions
): Promise<BackupSourceResult> {
try {
this.logger.log(`Starting ZFS backup for dataset: ${config.poolName}/${config.datasetName}`);
const validation = await this.validate(config);
if (!validation.valid) {
return {
success: false,
error: validation.error || 'ZFS validation failed',
cleanupRequired: false,
};
}
const snapshotName = await this.createSnapshot(config);
const snapshotPath = `${config.poolName}/${config.datasetName}@${snapshotName}`;
this.logger.log(`Created ZFS snapshot: ${snapshotPath}`);
const result: BackupSourceResult = {
success: true,
outputPath: snapshotPath,
snapshotName,
cleanupRequired: config.cleanupSnapshots,
metadata: {
poolName: config.poolName,
datasetName: config.datasetName,
snapshotPath,
},
};
return result;
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
this.logger.error(`ZFS backup failed: ${errorMessage}`, error);
return {
success: false,
error: errorMessage,
cleanupRequired: false,
};
}
}
async cleanup(result: BackupSourceResult): Promise<void> {
if (!result.cleanupRequired || !result.snapshotName) {
return;
}
try {
const snapshotPath = (result.metadata?.snapshotPath as string) || result.outputPath;
if (snapshotPath && typeof snapshotPath === 'string') {
await this.destroySnapshot(snapshotPath);
this.logger.log(`Cleaned up ZFS snapshot: ${snapshotPath}`);
}
} catch (error) {
this.logger.error(`Failed to cleanup ZFS snapshot: ${error}`);
}
}
private async createSnapshot(config: ZfsSourceConfig): Promise<string> {
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
const prefix = config.snapshotPrefix || 'backup';
const snapshotName = `${prefix}-${timestamp}`;
const snapshotPath = `${config.poolName}/${config.datasetName}@${snapshotName}`;
const { stdout, stderr } = await execa('zfs', ['snapshot', snapshotPath]);
if (stderr) {
this.logger.warn(`ZFS snapshot creation warning: ${stderr}`);
}
this.logger.debug(`ZFS snapshot created: ${stdout}`);
return snapshotName;
}
private async destroySnapshot(snapshotPath: string): Promise<void> {
const { stdout, stderr } = await execa('zfs', ['destroy', snapshotPath]);
if (stderr) {
this.logger.warn(`ZFS snapshot destruction warning: ${stderr}`);
}
this.logger.debug(`ZFS snapshot destroyed: ${stdout}`);
}
}

View File

@@ -0,0 +1,64 @@
import { Field, InputType, ObjectType } from '@nestjs/graphql';
import { IsBoolean, IsNotEmpty, IsNumber, IsOptional, IsString, Min } from 'class-validator';
import {
BaseSourceConfig,
BaseSourceConfigInput,
} from '@app/unraid-api/graph/resolvers/backup/source/base-source.types.js';
@InputType()
export class ZfsPreprocessConfigInput extends BaseSourceConfigInput {
@Field(() => String, { description: 'ZFS pool name' })
@IsString()
@IsNotEmpty()
poolName!: string;
@Field(() => String, { description: 'Dataset name within the pool' })
@IsString()
@IsNotEmpty()
datasetName!: string;
@Field(() => String, { description: 'Snapshot name prefix', nullable: true })
@IsOptional()
@IsString()
snapshotPrefix?: string;
@Field(() => Boolean, {
description: 'Whether to cleanup snapshots after backup',
defaultValue: true,
})
@IsBoolean()
cleanupSnapshots!: boolean;
@Field(() => Number, { description: 'Number of snapshots to retain', nullable: true })
@IsOptional()
@IsNumber()
@Min(1)
retainSnapshots?: number;
}
@ObjectType()
export class ZfsPreprocessConfig implements BaseSourceConfig {
@Field(() => String, { nullable: false })
label: string = 'ZFS backup';
@Field(() => String)
poolName!: string;
@Field(() => String)
datasetName!: string;
@Field(() => String, { nullable: true })
snapshotPrefix?: string;
@Field(() => Boolean)
cleanupSnapshots!: boolean;
@Field(() => Number, { nullable: true })
retainSnapshots?: number;
static isTypeOf(obj: any): obj is ZfsPreprocessConfig {
return obj && typeof obj.poolName === 'string' && typeof obj.datasetName === 'string';
}
}

View File

@@ -0,0 +1,245 @@
import { Injectable, Logger } from '@nestjs/common';
import { access, constants } from 'fs/promises';
import { execa } from 'execa';
import { ZfsPreprocessConfigInput } from '@app/unraid-api/graph/resolvers/backup/source/zfs/zfs-source.types.js';
export interface ZfsValidationResult {
isValid: boolean;
errors: string[];
warnings: string[];
metadata: {
poolExists?: boolean;
datasetExists?: boolean;
datasetSize?: number;
availableSpace?: number;
mountpoint?: string;
};
}
@Injectable()
export class ZfsValidationService {
private readonly logger = new Logger(ZfsValidationService.name);
async validateZfsConfig(config: ZfsPreprocessConfigInput): Promise<ZfsValidationResult> {
const result: ZfsValidationResult = {
isValid: true,
errors: [],
warnings: [],
metadata: {},
};
try {
// Validate pool exists
const poolExists = await this.validatePool(config.poolName);
result.metadata.poolExists = poolExists;
if (!poolExists) {
result.errors.push(`ZFS pool '${config.poolName}' does not exist`);
result.isValid = false;
return result;
}
// Validate dataset exists
const datasetExists = await this.validateDataset(config.poolName, config.datasetName);
result.metadata.datasetExists = datasetExists;
if (!datasetExists) {
result.errors.push(
`ZFS dataset '${config.poolName}/${config.datasetName}' does not exist`
);
result.isValid = false;
return result;
}
// Get dataset information
const datasetInfo = await this.getDatasetInfo(config.poolName, config.datasetName);
result.metadata = { ...result.metadata, ...datasetInfo };
// Validate dataset is mounted
if (!datasetInfo.mountpoint || datasetInfo.mountpoint === 'none') {
result.warnings.push(
`Dataset '${config.poolName}/${config.datasetName}' is not mounted`
);
}
// Check available space for snapshots
if (datasetInfo.availableSpace && datasetInfo.datasetSize) {
const spaceRatio = datasetInfo.availableSpace / datasetInfo.datasetSize;
if (spaceRatio < 0.1) {
result.warnings.push(
'Low available space for snapshot creation (less than 10% of dataset size)'
);
}
}
// Validate snapshot retention settings
if (config.retainSnapshots && config.retainSnapshots < 1) {
result.errors.push('Retain snapshots must be at least 1');
result.isValid = false;
}
// Check for existing snapshots if cleanup is disabled
if (!config.cleanupSnapshots) {
const existingSnapshots = await this.getExistingSnapshots(
config.poolName,
config.datasetName,
config.snapshotPrefix
);
if (existingSnapshots.length > 10) {
result.warnings.push(
`Found ${existingSnapshots.length} existing snapshots. Consider enabling cleanup.`
);
}
}
} catch (error: unknown) {
const errorMessage = error instanceof Error ? error.message : String(error);
result.errors.push(`Validation failed: ${errorMessage}`);
result.isValid = false;
}
return result;
}
async validatePool(poolName: string): Promise<boolean> {
try {
await execa('zpool', ['list', '-H', '-o', 'name', poolName]);
return true;
} catch {
return false;
}
}
async validateDataset(poolName: string, datasetName: string): Promise<boolean> {
const fullPath = `${poolName}/${datasetName}`;
try {
await execa('zfs', ['list', '-H', '-o', 'name', fullPath]);
return true;
} catch {
return false;
}
}
async getDatasetInfo(
poolName: string,
datasetName: string
): Promise<{
datasetSize?: number;
availableSpace?: number;
mountpoint?: string;
}> {
const fullPath = `${poolName}/${datasetName}`;
const result: { datasetSize?: number; availableSpace?: number; mountpoint?: string } = {};
try {
// Get dataset size
const { stdout: sizeOutput } = await execa('zfs', [
'list',
'-H',
'-p',
'-o',
'used',
fullPath,
]);
const size = parseInt(sizeOutput.trim(), 10);
if (!isNaN(size)) {
result.datasetSize = size;
}
} catch (error: unknown) {
this.logger.warn(
`Failed to get dataset size: ${error instanceof Error ? error.message : String(error)}`
);
}
try {
// Get available space
const { stdout: availOutput } = await execa('zfs', [
'list',
'-H',
'-p',
'-o',
'avail',
fullPath,
]);
const avail = parseInt(availOutput.trim(), 10);
if (!isNaN(avail)) {
result.availableSpace = avail;
}
} catch (error: unknown) {
this.logger.warn(
`Failed to get available space: ${error instanceof Error ? error.message : String(error)}`
);
}
try {
// Get mountpoint
const { stdout: mountOutput } = await execa('zfs', [
'list',
'-H',
'-o',
'mountpoint',
fullPath,
]);
result.mountpoint = mountOutput.trim();
} catch (error: unknown) {
this.logger.warn(
`Failed to get mountpoint: ${error instanceof Error ? error.message : String(error)}`
);
}
return result;
}
async getExistingSnapshots(
poolName: string,
datasetName: string,
prefix?: string
): Promise<string[]> {
const fullPath = `${poolName}/${datasetName}`;
try {
const { stdout } = await execa('zfs', [
'list',
'-H',
'-t',
'snapshot',
'-o',
'name',
'-r',
fullPath,
]);
const snapshots = stdout.split('\n').filter((line) => line.trim());
if (prefix) {
const prefixPattern = `${fullPath}@${prefix}`;
return snapshots.filter((snapshot) => snapshot.startsWith(prefixPattern));
}
return snapshots.filter((snapshot) => snapshot.startsWith(`${fullPath}@`));
} catch {
return [];
}
}
async getPoolHealth(poolName: string): Promise<string | null> {
try {
const { stdout } = await execa('zpool', ['list', '-H', '-o', 'health', poolName]);
return stdout.trim();
} catch {
return null;
}
}
async canCreateSnapshot(poolName: string, datasetName: string): Promise<boolean> {
// Check if we have write permissions and the dataset is not readonly
const fullPath = `${poolName}/${datasetName}`;
try {
const { stdout } = await execa('zfs', ['get', '-H', '-o', 'value', 'readonly', fullPath]);
return stdout.trim() === 'off';
} catch {
return false;
}
}
}

View File

@@ -19,6 +19,11 @@ export class DockerMutations {}
@ObjectType()
export class VmMutations {}
@ObjectType({
description: 'Backup related mutations',
})
export class BackupMutations {}
@ObjectType({
description: 'API Key related mutations',
})
@@ -51,6 +56,9 @@ export class RootMutations {
@Field(() => VmMutations, { description: 'VM related mutations' })
vm: VmMutations = new VmMutations();
@Field(() => BackupMutations, { description: 'Backup related mutations' })
backup: BackupMutations = new BackupMutations();
@Field(() => ApiKeyMutations, { description: 'API Key related mutations' })
apiKey: ApiKeyMutations = new ApiKeyMutations();

View File

@@ -3,6 +3,7 @@ import { Mutation, Resolver } from '@nestjs/graphql';
import {
ApiKeyMutations,
ArrayMutations,
BackupMutations,
DockerMutations,
ParityCheckMutations,
RCloneMutations,
@@ -27,6 +28,11 @@ export class RootMutationsResolver {
return new VmMutations();
}
@Mutation(() => BackupMutations, { name: 'backup' })
backup(): BackupMutations {
return new BackupMutations();
}
@Mutation(() => ParityCheckMutations, { name: 'parityCheck' })
parityCheck(): ParityCheckMutations {
return new ParityCheckMutations();

File diff suppressed because it is too large Load Diff

View File

@@ -5,18 +5,27 @@ import { existsSync } from 'node:fs';
import { mkdir, rm, writeFile } from 'node:fs/promises';
import { dirname, join } from 'node:path';
import { convert } from 'convert';
import { execa } from 'execa';
import got, { HTTPError } from 'got';
import pRetry from 'p-retry';
import { sanitizeParams } from '@app/core/log.js';
import {
getConfigIdFromGroupId,
isBackupJobGroup,
} from '@app/unraid-api/graph/resolvers/backup/backup.utils.js';
import { BackupJobStatus } from '@app/unraid-api/graph/resolvers/backup/orchestration/backup-job-status.model.js';
import { RCloneStatusService } from '@app/unraid-api/graph/resolvers/rclone/rclone-status.service.js';
import {
CreateRCloneRemoteDto,
DeleteRCloneRemoteDto,
GetRCloneJobStatusDto,
GetRCloneRemoteConfigDto,
GetRCloneRemoteDetailsDto,
RCloneProviderOptionResponse,
RCloneJob,
RCloneJobListResponse,
RCloneJobStats,
RCloneProviderResponse,
RCloneRemoteConfig,
RCloneStartBackupInput,
@@ -24,72 +33,109 @@ import {
} from '@app/unraid-api/graph/resolvers/rclone/rclone.model.js';
import { validateObject } from '@app/unraid-api/graph/resolvers/validation.utils.js';
// Constants for the service
const CONSTANTS = {
LOG_LEVEL: {
DEBUG: 'DEBUG',
INFO: 'INFO',
},
RETRY_CONFIG: {
retries: 6,
minTimeout: 100,
maxTimeout: 5000,
factor: 2,
maxRetryTime: 30000,
},
TIMEOUTS: {
GRACEFUL_SHUTDOWN: 2000,
PROCESS_CLEANUP: 1000,
},
};
// Internal interface for job status response from RClone API
interface RCloneJobStatusResponse {
id?: string | number;
group?: string;
stats?: RCloneJobStats;
finished?: boolean;
error?: string;
[key: string]: any;
}
interface BackupStatusResult {
isRunning: boolean;
stats: RCloneJobStats | null;
jobCount: number;
activeJobs: RCloneJobStatusResponse[];
}
interface JobOperationResult {
stopped: string[];
forgotten?: string[];
errors: string[];
}
@Injectable()
export class RCloneApiService implements OnModuleInit, OnModuleDestroy {
private isInitialized: boolean = false;
private initialized: boolean = false;
private readonly logger = new Logger(RCloneApiService.name);
private rcloneSocketPath: string = '';
private rcloneBaseUrl: string = '';
private rcloneProcess: ChildProcess | null = null;
private readonly rcloneUsername: string =
process.env.RCLONE_USERNAME || crypto.randomBytes(12).toString('base64');
process.env.RCLONE_USERNAME ||
(process.env.NODE_ENV === 'test' ? 'test-user' : crypto.randomBytes(12).toString('hex'));
private readonly rclonePassword: string =
process.env.RCLONE_PASSWORD || crypto.randomBytes(24).toString('base64');
constructor() {}
process.env.RCLONE_PASSWORD ||
(process.env.NODE_ENV === 'test' ? 'test-pass' : crypto.randomBytes(24).toString('hex'));
/**
* Returns whether the RClone service is initialized and ready to use
*/
get initialized(): boolean {
return this.isInitialized;
constructor(private readonly statusService: RCloneStatusService) {}
get isInitialized(): boolean {
return this.initialized;
}
async onModuleInit(): Promise<void> {
try {
// Check if rclone binary is available first
const isBinaryAvailable = await this.checkRcloneBinaryExists();
if (!isBinaryAvailable) {
this.logger.warn('RClone binary not found on system, skipping initialization');
this.isInitialized = false;
return;
}
// Check if rclone binary is available first
const isBinaryAvailable = await this.checkRcloneBinaryExists();
if (!isBinaryAvailable) {
this.logger.warn('RClone binary not found on system, skipping initialization');
this.initialized = false;
return;
}
const { getters } = await import('@app/store/index.js');
// Check if Rclone Socket is running, if not, start it.
this.rcloneSocketPath = getters.paths()['rclone-socket'];
const logFilePath = join(getters.paths()['log-base'], 'rclone-unraid-api.log');
this.logger.log(`RClone socket path: ${this.rcloneSocketPath}`);
this.logger.log(`RClone log file path: ${logFilePath}`);
const { getters } = await import('@app/store/index.js');
// Check if Rclone Socket is running, if not, start it.
this.rcloneSocketPath = getters.paths()['rclone-socket'];
const logFilePath = join(getters.paths()['log-base'], 'rclone-unraid-api.log');
this.logger.log(`RClone socket path: ${this.rcloneSocketPath}`);
this.logger.log(`RClone log file path: ${logFilePath}`);
// Format the base URL for Unix socket
this.rcloneBaseUrl = `http://unix:${this.rcloneSocketPath}:`;
// Format the base URL for Unix socket
this.rcloneBaseUrl = `http://unix:${this.rcloneSocketPath}:`;
// Check if the RClone socket exists, if not, create it.
const socketExists = await this.checkRcloneSocketExists(this.rcloneSocketPath);
// Check if the RClone socket exists, if not, create it.
const socketExists = await this.checkRcloneSocketExists(this.rcloneSocketPath);
if (socketExists) {
const isRunning = await this.checkRcloneSocketRunning();
if (isRunning) {
this.isInitialized = true;
return;
} else {
this.logger.warn(
'RClone socket is not running but socket exists, removing socket before starting...'
);
await rm(this.rcloneSocketPath, { force: true });
}
this.logger.warn('RClone socket is not running, starting it...');
this.isInitialized = await this.startRcloneSocket(this.rcloneSocketPath, logFilePath);
if (socketExists) {
const isRunning = await this.checkRcloneSocketRunning();
if (isRunning) {
this.initialized = true;
return;
} else {
this.logger.warn('RClone socket does not exist, creating it...');
this.isInitialized = await this.startRcloneSocket(this.rcloneSocketPath, logFilePath);
return;
this.logger.warn(
'RClone socket is not running but socket exists, removing socket before starting...'
);
await rm(this.rcloneSocketPath, { force: true });
}
} catch (error: unknown) {
this.logger.error(`Error initializing RCloneApiService: ${error}`);
this.isInitialized = false;
this.logger.warn('RClone socket is not running, starting it...');
this.initialized = await this.startRcloneSocket(this.rcloneSocketPath, logFilePath);
return;
} else {
this.logger.warn('RClone socket does not exist, creating it...');
this.initialized = await this.startRcloneSocket(this.rcloneSocketPath, logFilePath);
return;
}
}
@@ -98,95 +144,145 @@ export class RCloneApiService implements OnModuleInit, OnModuleDestroy {
this.logger.log('RCloneApiService module destroyed');
}
/**
* Starts the RClone RC daemon on the specified socket path
*/
private async initializeRCloneService(): Promise<void> {
const { getters } = await import('@app/store/index.js');
this.rcloneSocketPath = getters.paths()['rclone-socket'];
const logFilePath = join(getters.paths()['log-base'], 'rclone-unraid-api.log');
this.rcloneBaseUrl = `http://unix:${this.rcloneSocketPath}:`;
this.logger.log(
`Ensuring RClone is stopped and socket is clean before initialization. Socket path: ${this.rcloneSocketPath}`
);
// Stop any existing rclone instances and remove the socket file.
await this.stopRcloneSocket();
this.logger.warn('Proceeding to start new RClone socket...');
this.initialized = await this.startRcloneSocket(this.rcloneSocketPath, logFilePath);
}
private async startRcloneSocket(socketPath: string, logFilePath: string): Promise<boolean> {
try {
// Make log file exists
if (!existsSync(logFilePath)) {
this.logger.debug(`Creating log file: ${logFilePath}`);
await mkdir(dirname(logFilePath), { recursive: true });
await writeFile(logFilePath, '', 'utf-8');
}
await this.ensureLogFileExists(logFilePath);
const rcloneArgs = this.buildRcloneArgs(socketPath, logFilePath);
this.logger.log(`Starting RClone RC daemon on socket: ${socketPath}`);
// Start the process but don't wait for it to finish
this.rcloneProcess = execa(
'rclone',
[
'rcd',
'--rc-addr',
socketPath,
'--log-level',
'INFO',
'--log-file',
logFilePath,
...(this.rcloneUsername ? ['--rc-user', this.rcloneUsername] : []),
...(this.rclonePassword ? ['--rc-pass', this.rclonePassword] : []),
],
{ detached: false } // Keep attached to manage lifecycle
);
// Handle potential errors during process spawning (e.g., command not found)
this.rcloneProcess.on('error', (error: Error) => {
this.logger.error(`RClone process failed to start: ${error.message}`);
this.rcloneProcess = null; // Clear the handle on error
this.isInitialized = false;
});
const rcloneProcessExecution = execa('rclone', rcloneArgs, { detached: false });
this.rcloneProcess = rcloneProcessExecution;
this.setupProcessListeners();
// Handle unexpected exit
this.rcloneProcess.on('exit', (code, signal) => {
this.logger.warn(
`RClone process exited unexpectedly with code: ${code}, signal: ${signal}`
rcloneProcessExecution.catch((error) => {
this.logger.debug(
`Rclone process execution promise rejected (expected if process failed to start or exited prematurely): ${
error.shortMessage || error.message
}`
);
this.rcloneProcess = null;
this.isInitialized = false;
});
// Wait for socket to be ready using p-retry with exponential backoff
await pRetry(
async () => {
const isRunning = await this.checkRcloneSocketRunning();
if (!isRunning) throw new Error('Rclone socket not ready');
},
{
retries: 6, // 7 attempts total
minTimeout: 100,
maxTimeout: 5000,
factor: 2,
maxRetryTime: 30000,
}
);
await this.waitForSocketReady();
this.logger.log('RClone RC daemon started and socket is ready.');
return true;
} catch (error: unknown) {
this.logger.error(`Error starting RClone RC daemon: ${error}`);
this.rcloneProcess?.kill(); // Attempt to kill if started but failed later
this.rcloneProcess = null;
this.logger.error(`Error during RClone RC daemon startup sequence: ${error}`);
this.cleanupFailedProcess();
return false;
}
}
private async stopRcloneSocket(): Promise<void> {
if (this.rcloneProcess && !this.rcloneProcess.killed) {
this.logger.log(`Stopping RClone RC daemon process (PID: ${this.rcloneProcess.pid})...`);
try {
const killed = this.rcloneProcess.kill('SIGTERM'); // Send SIGTERM first
if (!killed) {
this.logger.warn('Failed to kill RClone process with SIGTERM, trying SIGKILL.');
this.rcloneProcess.kill('SIGKILL'); // Force kill if SIGTERM failed
}
this.logger.log('RClone process stopped.');
} catch (error: unknown) {
this.logger.error(`Error stopping RClone process: ${error}`);
} finally {
this.rcloneProcess = null; // Clear the handle
}
private async ensureLogFileExists(logFilePath: string): Promise<void> {
if (!existsSync(logFilePath)) {
await mkdir(dirname(logFilePath), { recursive: true });
await writeFile(logFilePath, '', 'utf-8');
}
}
private buildRcloneArgs(socketPath: string, logFilePath: string): string[] {
// Unix sockets don't require HTTP authentication - the socket itself provides security
const isUnixSocket = socketPath.startsWith('/');
if (isUnixSocket) {
this.logger.log('Using Unix socket - HTTP authentication not required, using --rc-no-auth');
} else {
this.logger.log('RClone process not running or already stopped.');
this.logger.log(
`Building RClone args with username: ${this.rcloneUsername ? '[SET]' : '[NOT SET]'}, password: ${this.rclonePassword ? '[SET]' : '[NOT SET]'}`
);
}
// Clean up the socket file if it exists
const args = [
'rcd',
'--rc-addr',
socketPath,
'--log-level',
'INFO',
'--log-file',
logFilePath,
// For Unix sockets, use --rc-no-auth instead of credentials
...(isUnixSocket ? ['--rc-no-auth'] : []),
// Only add authentication for non-Unix socket connections
...(!isUnixSocket && this.rcloneUsername ? ['--rc-user', this.rcloneUsername] : []),
...(!isUnixSocket && this.rclonePassword ? ['--rc-pass', this.rclonePassword] : []),
];
this.logger.log(`RClone command args: ${args.join(' ')}`);
return args;
}
private setupProcessListeners(): void {
if (!this.rcloneProcess) return;
this.rcloneProcess.on('error', (error: Error) => {
this.logger.error(`RClone process failed to start: ${error.message}`);
this.cleanupFailedProcess();
});
this.rcloneProcess.on('exit', (code, signal) => {
this.logger.warn(`RClone process exited unexpectedly with code: ${code}, signal: ${signal}`);
this.cleanupFailedProcess();
});
}
private cleanupFailedProcess(): void {
this.rcloneProcess = null;
this.initialized = false;
}
private async waitForSocketReady(): Promise<void> {
await pRetry(async () => {
const isRunning = await this.checkRcloneSocketRunning();
if (!isRunning) throw new Error('Rclone socket not ready');
}, CONSTANTS.RETRY_CONFIG);
}
private async stopRcloneSocket(): Promise<void> {
if (this.rcloneProcess && !this.rcloneProcess.killed) {
await this.terminateProcess();
}
await this.killExistingRcloneProcesses();
await this.removeSocketFile();
}
private async terminateProcess(): Promise<void> {
if (!this.rcloneProcess) return;
this.logger.log(`Stopping RClone RC daemon process (PID: ${this.rcloneProcess.pid})...`);
try {
const killed = this.rcloneProcess.kill('SIGTERM');
if (!killed) {
this.logger.warn('Failed to kill with SIGTERM, using SIGKILL');
this.rcloneProcess.kill('SIGKILL');
}
this.logger.log('RClone process stopped');
} catch (error: unknown) {
this.logger.error(`Error stopping RClone process: ${error}`);
} finally {
this.rcloneProcess = null;
}
}
private async removeSocketFile(): Promise<void> {
if (this.rcloneSocketPath && existsSync(this.rcloneSocketPath)) {
this.logger.log(`Removing RClone socket file: ${this.rcloneSocketPath}`);
try {
@@ -197,36 +293,19 @@ export class RCloneApiService implements OnModuleInit, OnModuleDestroy {
}
}
/**
* Checks if the RClone socket exists
*/
private async checkRcloneSocketExists(socketPath: string): Promise<boolean> {
const socketExists = existsSync(socketPath);
if (!socketExists) {
this.logger.warn(`RClone socket does not exist at: ${socketPath}`);
return false;
}
return true;
return socketExists;
}
/**
* Checks if the RClone socket is running
*/
private async checkRcloneSocketRunning(): Promise<boolean> {
try {
// A simple API call to check if the daemon is responsive
await this.callRcloneApi('core/pid');
this.logger.debug('RClone socket is running and responsive.');
return true;
} catch (error: unknown) {
// Silently handle socket connection errors during checks
if (error instanceof Error) {
if (error.message.includes('ENOENT') || error.message.includes('ECONNREFUSED')) {
this.logger.debug('RClone socket not accessible - daemon likely not running');
} else {
this.logger.debug(`RClone socket check failed: ${error.message}`);
}
}
} catch {
return false;
}
}
@@ -267,18 +346,11 @@ export class RCloneApiService implements OnModuleInit, OnModuleDestroy {
return response?.remotes || [];
}
/**
* Get complete remote details
*/
async getRemoteDetails(input: GetRCloneRemoteDetailsDto): Promise<RCloneRemoteConfig> {
await validateObject(GetRCloneRemoteDetailsDto, input);
const config = (await this.getRemoteConfig({ name: input.name })) || {};
return config as RCloneRemoteConfig;
return this.getRemoteConfig({ name: input.name });
}
/**
* Get configuration of a remote
*/
async getRemoteConfig(input: GetRCloneRemoteConfigDto): Promise<RCloneRemoteConfig> {
await validateObject(GetRCloneRemoteConfigDto, input);
return this.callRcloneApi('config/get', { name: input.name });
@@ -300,77 +372,329 @@ export class RCloneApiService implements OnModuleInit, OnModuleDestroy {
return result;
}
/**
* Update an existing remote configuration
*/
async updateRemote(input: UpdateRCloneRemoteDto): Promise<any> {
async updateRemote(input: UpdateRCloneRemoteDto): Promise<unknown> {
await validateObject(UpdateRCloneRemoteDto, input);
this.logger.log(`Updating remote: ${input.name}`);
const params = {
return this.callRcloneApi('config/update', {
name: input.name,
...input.parameters,
};
return this.callRcloneApi('config/update', params);
});
}
/**
* Delete a remote configuration
*/
async deleteRemote(input: DeleteRCloneRemoteDto): Promise<any> {
async deleteRemote(input: DeleteRCloneRemoteDto): Promise<unknown> {
await validateObject(DeleteRCloneRemoteDto, input);
this.logger.log(`Deleting remote: ${input.name}`);
return this.callRcloneApi('config/delete', { name: input.name });
}
/**
* Start a backup operation using sync/copy
* This copies a directory from source to destination
*/
async startBackup(input: RCloneStartBackupInput): Promise<any> {
async startBackup(input: RCloneStartBackupInput): Promise<unknown> {
await validateObject(RCloneStartBackupInput, input);
this.logger.log(`Starting backup from ${input.srcPath} to ${input.dstPath}`);
this.logger.log(`Starting backup: ${input.srcPath}${input.dstPath}`);
const group = input.configId ? getConfigIdFromGroupId(input.configId) : 'manual';
const params = {
srcFs: input.srcPath,
dstFs: input.dstPath,
...(input.async && { _async: input.async }),
_group: group,
...(input.options || {}),
};
return this.callRcloneApi('sync/copy', params);
const result = await this.callRcloneApi('sync/copy', params);
const jobId = result.jobid || result.jobId || 'unknown';
this.logger.log(`Backup job created with ID: ${jobId} in group: ${group}`);
return result;
}
/**
* Get the status of a running job
* Gets enhanced job status with computed fields
*/
async getJobStatus(input: GetRCloneJobStatusDto): Promise<any> {
await validateObject(GetRCloneJobStatusDto, input);
return this.callRcloneApi('job/status', { jobid: input.jobId });
async getEnhancedJobStatus(jobId: string, configId?: string): Promise<RCloneJob | null> {
try {
await validateObject(GetRCloneJobStatusDto, { jobId });
if (isBackupJobGroup(jobId)) {
try {
const stats = await this.callRcloneApi('core/stats', { group: jobId });
const enhancedStats = this.statusService.enhanceStatsWithFormattedFields({
...stats,
group: jobId,
});
const job = this.statusService.transformStatsToJob(jobId, enhancedStats);
job.configId = configId || getConfigIdFromGroupId(jobId);
// Add computed fields
job.isRunning = job.status === BackupJobStatus.RUNNING;
job.errorMessage = job.error || undefined;
return job;
} catch (error) {
this.logger.warn(`Failed to get group stats for ${jobId}: ${error}`);
}
}
// Fallback to individual job status
const jobStatus = await this.getIndividualJobStatus(jobId);
const enhancedStats = jobStatus.stats
? this.statusService.enhanceStatsWithFormattedFields(jobStatus.stats)
: {};
const job = this.statusService.transformStatsToJob(jobId, enhancedStats);
// Add computed fields
job.isRunning = job.status === BackupJobStatus.RUNNING;
job.errorMessage = job.error || undefined;
// Add configId if provided
if (configId) {
job.configId = configId;
}
return job;
} catch (error) {
this.logger.error(`Failed to fetch enhanced job status for ${jobId}: %o`, error);
return null;
}
}
/**
* List all running jobs
*/
async listRunningJobs(): Promise<any> {
async getJobStatus(input: GetRCloneJobStatusDto): Promise<RCloneJob> {
const enhancedJob = await this.getEnhancedJobStatus(input.jobId);
if (enhancedJob) {
return enhancedJob;
}
// Final fallback
const jobStatus = await this.getIndividualJobStatus(input.jobId);
return this.statusService.parseJobWithStats(input.jobId, jobStatus);
}
async getIndividualJobStatus(jobId: string): Promise<RCloneJobStatusResponse> {
this.logger.debug(`Fetching status for job ${jobId}`);
const result = await this.callRcloneApi('job/status', { jobid: jobId });
if (result.error) {
this.logger.warn(`Job ${jobId} has error: ${result.error}`);
}
return result;
}
async listRunningJobs(): Promise<RCloneJobListResponse> {
this.logger.debug('Fetching job list from RClone API');
return this.callRcloneApi('job/list');
}
/**
* Generic method to call the RClone RC API
*/
private async callRcloneApi(endpoint: string, params: Record<string, any> = {}): Promise<any> {
const url = `${this.rcloneBaseUrl}/${endpoint}`;
async getAllJobsWithStats(): Promise<RCloneJob[]> {
try {
this.logger.debug(
`Calling RClone API: ${url} with params: ${JSON.stringify(sanitizeParams(params))}`
// Get both the job list and group list
const [runningJobs, groupList] = await Promise.all([
this.listRunningJobs(),
this.callRcloneApi('core/group-list'),
]);
this.logger.debug(`Running jobs: ${JSON.stringify(runningJobs)}`);
this.logger.debug(`Group list: ${JSON.stringify(groupList)}`);
// Safety check: if too many groups, something is wrong
if (groupList.groups && groupList.groups.length > 100) {
this.logger.error(
`DANGER: Found ${groupList.groups.length} groups, aborting to prevent job explosion`
);
return [];
}
// Safety check: if too many individual jobs, something is wrong
if (runningJobs.jobids && runningJobs.jobids.length > 1000) {
this.logger.error(
`DANGER: Found ${runningJobs.jobids.length} individual jobs, aborting to prevent performance issues`
);
return [];
}
if (!runningJobs.jobids?.length) {
this.logger.debug('No running jobs found');
return [];
}
const backupGroups = (groupList.groups || []).filter((group: string) =>
isBackupJobGroup(group)
);
const response = await got.post(url, {
json: params,
responseType: 'json',
enableUnixSockets: true,
headers: {
Authorization: `Basic ${Buffer.from(`${this.rcloneUsername}:${this.rclonePassword}`).toString('base64')}`,
},
if (backupGroups.length === 0) {
this.logger.debug('No backup groups found');
return [];
}
// Get group stats for all backup groups to get proper stats and group info
const groupStatsMap = new Map<string, any>();
await Promise.all(
backupGroups.map(async (group: string) => {
try {
const stats = await this.callRcloneApi('core/stats', { group });
groupStatsMap.set(group, stats);
} catch (error) {
this.logger.warn(`Failed to get stats for group ${group}: ${error}`);
}
})
);
const jobs: RCloneJob[] = [];
// For each backup group, create a job entry with proper stats
backupGroups.forEach((group) => {
const groupStats = groupStatsMap.get(group);
if (!groupStats) return;
this.logger.debug(`Processing group ${group}: stats=${JSON.stringify(groupStats)}`);
const extractedConfigId = getConfigIdFromGroupId(group);
const enhancedStats = this.statusService.enhanceStatsWithFormattedFields({
...groupStats,
group,
});
const job = this.statusService.transformStatsToJob(group, enhancedStats);
job.configId = extractedConfigId;
// Only include jobs that are truly active (not completed)
const isActivelyTransferring = groupStats.transferring?.length > 0;
const isActivelyChecking = groupStats.checking?.length > 0;
const hasActiveSpeed = groupStats.speed > 0;
const isNotFinished = !groupStats.finished && groupStats.fatalError !== true;
if ((isActivelyTransferring || isActivelyChecking || hasActiveSpeed) && isNotFinished) {
jobs.push(job);
}
});
this.logger.debug(
`Found ${jobs.length} active backup jobs from ${backupGroups.length} groups`
);
return jobs;
} catch (error) {
this.logger.error('Failed to get jobs with stats:', error);
return [];
}
}
async stopAllJobs(): Promise<JobOperationResult> {
const runningJobs = await this.listRunningJobs();
if (!runningJobs.jobids?.length) {
this.logger.log('No running jobs to stop');
return { stopped: [], errors: [] };
}
this.logger.log(`Stopping ${runningJobs.jobids.length} running jobs`);
return this.executeJobOperation(runningJobs.jobids, 'stop');
}
async stopJob(jobId: string): Promise<JobOperationResult> {
this.logger.log(`Stopping job: ${jobId}`);
if (isBackupJobGroup(jobId)) {
// This is a group, use the stopgroup endpoint
return this.executeGroupOperation([jobId], 'stopgroup');
} else {
// This is an individual job ID, use the regular stop endpoint
return this.executeJobOperation([jobId], 'stop');
}
}
private async executeGroupOperation(
groupNames: string[],
operation: 'stopgroup'
): Promise<JobOperationResult> {
const stopped: string[] = [];
const errors: string[] = [];
const promises = groupNames.map(async (groupName) => {
try {
await this.callRcloneApi(`job/${operation}`, { group: groupName });
stopped.push(groupName);
this.logger.log(`${operation}ped group: ${groupName}`);
} catch (error) {
const errorMsg = `Failed to ${operation} group ${groupName}: ${error}`;
errors.push(errorMsg);
this.logger.error(errorMsg);
}
});
await Promise.allSettled(promises);
return { stopped, errors };
}
private async executeJobOperation(
jobIds: (string | number)[],
operation: 'stop'
): Promise<JobOperationResult> {
const stopped: string[] = [];
const errors: string[] = [];
const promises = jobIds.map(async (jobId) => {
try {
await this.callRcloneApi(`job/${operation}`, { jobid: jobId });
stopped.push(String(jobId));
this.logger.log(`${operation}ped job: ${jobId}`);
} catch (error) {
const errorMsg = `Failed to ${operation} job ${jobId}: ${error}`;
errors.push(errorMsg);
this.logger.error(errorMsg);
}
});
await Promise.allSettled(promises);
return { stopped, errors };
}
async getBackupStatus(): Promise<BackupStatusResult> {
const runningJobs = await this.listRunningJobs();
if (!runningJobs.jobids?.length) {
return this.statusService.parseBackupStatus(runningJobs, []);
}
const jobStatuses = await Promise.allSettled(
runningJobs.jobids.map((jobId) => this.getIndividualJobStatus(String(jobId)))
);
return this.statusService.parseBackupStatus(runningJobs, jobStatuses);
}
private async callRcloneApi(endpoint: string, params: Record<string, unknown> = {}): Promise<any> {
const url = `${this.rcloneBaseUrl}/${endpoint}`;
// Unix sockets don't require HTTP authentication - the socket itself provides security
const isUnixSocket = this.rcloneSocketPath && this.rcloneSocketPath.startsWith('/');
const requestOptions: any = {
json: params,
responseType: 'json',
enableUnixSockets: true,
};
// Only add authentication headers for non-Unix socket connections
if (!isUnixSocket && this.rcloneUsername && this.rclonePassword) {
const authString = `${this.rcloneUsername}:${this.rclonePassword}`;
const authHeader = `Basic ${Buffer.from(authString).toString('base64')}`;
requestOptions.headers = {
Authorization: authHeader,
};
this.logger.debug(
`Calling RClone API: ${endpoint} with auth header: ${authHeader.substring(0, 20)}...`
);
} else {
this.logger.debug(`Calling RClone API: ${endpoint} via Unix socket (no auth required)`);
}
try {
const response = await got.post(url, requestOptions);
return response.body;
} catch (error: unknown) {
this.handleApiError(error, endpoint, params);
@@ -378,54 +702,108 @@ export class RCloneApiService implements OnModuleInit, OnModuleDestroy {
}
private handleApiError(error: unknown, endpoint: string, params: Record<string, unknown>): never {
const sanitizedParams = sanitizeParams(params);
if (error instanceof HTTPError) {
const statusCode = error.response.statusCode;
const rcloneError = this.extractRcloneError(error.response.body, params);
const detailedErrorMessage = `Rclone API Error (${endpoint}, HTTP ${statusCode}): ${rcloneError}`;
const message = `Rclone API Error (${endpoint}, HTTP ${statusCode}): ${rcloneError}`;
const sanitizedParams = sanitizeParams(params);
this.logger.error(
`Original ${detailedErrorMessage} | Params: ${JSON.stringify(sanitizedParams)}`,
error.stack
);
throw new Error(detailedErrorMessage);
} else if (error instanceof Error) {
const detailedErrorMessage = `Error calling RClone API (${endpoint}) with params ${JSON.stringify(sanitizeParams(params))}: ${error.message}`;
this.logger.error(detailedErrorMessage, error.stack);
throw error;
} else {
const detailedErrorMessage = `Unknown error calling RClone API (${endpoint}) with params ${JSON.stringify(sanitizeParams(params))}: ${String(error)}`;
this.logger.error(detailedErrorMessage);
throw new Error(detailedErrorMessage);
this.logger.error(`${message} | Params: ${JSON.stringify(sanitizedParams)}`, error.stack);
throw new Error(message);
}
const message =
error instanceof Error
? `Error calling RClone API (${endpoint}): ${error.message}`
: `Unknown error calling RClone API (${endpoint}): ${String(error)}`;
this.logger.error(
`${message} | Params: ${JSON.stringify(sanitizedParams)}`,
error instanceof Error ? error.stack : undefined
);
throw error instanceof Error ? error : new Error(message);
}
private extractRcloneError(responseBody: unknown, fallbackParams: Record<string, unknown>): string {
try {
let errorBody: unknown;
if (typeof responseBody === 'string') {
errorBody = JSON.parse(responseBody);
} else if (typeof responseBody === 'object' && responseBody !== null) {
errorBody = responseBody;
}
const errorBody = typeof responseBody === 'string' ? JSON.parse(responseBody) : responseBody;
if (errorBody && typeof errorBody === 'object' && 'error' in errorBody) {
const typedErrorBody = errorBody as { error: unknown; input?: unknown };
let rcloneError = `Rclone Error: ${String(typedErrorBody.error)}`;
if (typedErrorBody.input) {
rcloneError += ` | Input: ${JSON.stringify(typedErrorBody.input)}`;
} else if (fallbackParams) {
rcloneError += ` | Original Params: ${JSON.stringify(fallbackParams)}`;
const typedError = errorBody as { error: unknown; input?: unknown };
let message = `Rclone Error: ${String(typedError.error)}`;
if (typedError.input) {
message += ` | Input: ${JSON.stringify(typedError.input)}`;
} else {
message += ` | Params: ${JSON.stringify(fallbackParams)}`;
}
return rcloneError;
} else if (responseBody) {
return `Non-standard error response body: ${typeof responseBody === 'string' ? responseBody : JSON.stringify(responseBody)}`;
} else {
return 'Empty error response body received.';
return message;
}
} catch (parseOrAccessError) {
return `Failed to process error response body. Raw body: ${typeof responseBody === 'string' ? responseBody : JSON.stringify(responseBody)}`;
return responseBody
? `Non-standard error response: ${typeof responseBody === 'string' ? responseBody : JSON.stringify(responseBody)}`
: 'Empty error response received';
} catch {
return `Failed to process error response: ${typeof responseBody === 'string' ? responseBody : JSON.stringify(responseBody)}`;
}
}
private async killExistingRcloneProcesses(): Promise<void> {
try {
this.logger.log('Checking for existing rclone processes...');
const { stdout } = await execa('pgrep', ['-f', 'rclone.*rcd'], { reject: false });
if (!stdout.trim()) {
this.logger.log('No existing rclone processes found');
return;
}
const pids = stdout
.trim()
.split('\n')
.filter((pid) => pid.trim());
this.logger.log(`Found ${pids.length} existing rclone process(es): ${pids.join(', ')}`);
await this.terminateProcesses(pids);
await this.cleanupStaleSocket();
} catch (error) {
this.logger.warn(`Error during rclone process cleanup: ${error}`);
}
}
private async terminateProcesses(pids: string[]): Promise<void> {
for (const pid of pids) {
try {
this.logger.log(`Terminating rclone process PID: ${pid}`);
await execa('kill', ['-TERM', pid], { reject: false });
await new Promise((resolve) =>
setTimeout(resolve, CONSTANTS.TIMEOUTS.GRACEFUL_SHUTDOWN)
);
const { exitCode } = await execa('kill', ['-0', pid], { reject: false });
if (exitCode === 0) {
this.logger.warn(`Process ${pid} still running, using SIGKILL`);
await execa('kill', ['-KILL', pid], { reject: false });
await new Promise((resolve) =>
setTimeout(resolve, CONSTANTS.TIMEOUTS.PROCESS_CLEANUP)
);
}
this.logger.log(`Successfully terminated process ${pid}`);
} catch (error) {
this.logger.warn(`Failed to kill process ${pid}: ${error}`);
}
}
}
private async cleanupStaleSocket(): Promise<void> {
if (this.rcloneSocketPath && existsSync(this.rcloneSocketPath)) {
await rm(this.rcloneSocketPath, { force: true });
this.logger.log('Removed stale socket file');
}
}
}

View File

@@ -0,0 +1,505 @@
import { Test, TestingModule } from '@nestjs/testing';
import { beforeEach, describe, expect, it, vi } from 'vitest';
import { BackupJobStatus } from '@app/unraid-api/graph/resolvers/backup/orchestration/backup-job-status.model.js';
import { RCloneStatusService } from '@app/unraid-api/graph/resolvers/rclone/rclone-status.service.js';
import { RCloneJobStats } from '@app/unraid-api/graph/resolvers/rclone/rclone.model.js';
import { FormatService } from '@app/unraid-api/utils/format.service.js';
// Mock NestJS Logger to suppress logs during tests
vi.mock('@nestjs/common', async (importOriginal) => {
const original = await importOriginal<typeof import('@nestjs/common')>();
return {
...original,
Logger: vi.fn(() => ({
log: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
debug: vi.fn(),
})),
};
});
describe('RCloneStatusService', () => {
let service: RCloneStatusService;
let mockFormatService: FormatService;
beforeEach(() => {
vi.clearAllMocks();
mockFormatService = {
formatBytes: vi.fn().mockImplementation((bytes: number) => `${bytes} B`),
formatSpeed: vi.fn().mockImplementation((bytesPerSecond: number) => `${bytesPerSecond} B/s`),
formatDuration: vi.fn().mockImplementation((seconds: number) => `${seconds}s`),
} as any;
service = new RCloneStatusService(mockFormatService);
});
describe('enhanceStatsWithFormattedFields', () => {
it('should add formatted fields for all numeric stats', () => {
const stats: RCloneJobStats = {
bytes: 1024,
speed: 512,
elapsedTime: 60,
eta: 120,
};
const result = service.enhanceStatsWithFormattedFields(stats);
expect(result).toEqual({
bytes: 1024,
speed: 512,
elapsedTime: 60,
eta: 120,
formattedBytes: '1024 B',
formattedSpeed: '512 B/s',
formattedElapsedTime: '60s',
formattedEta: '120s',
calculatedPercentage: 0,
isActivelyRunning: true,
isCompleted: false,
});
expect(mockFormatService.formatBytes).toHaveBeenCalledWith(1024);
expect(mockFormatService.formatSpeed).toHaveBeenCalledWith(512);
expect(mockFormatService.formatDuration).toHaveBeenCalledWith(60);
expect(mockFormatService.formatDuration).toHaveBeenCalledWith(120);
});
it('should not add formatted fields for undefined values', () => {
const stats: RCloneJobStats = {
bytes: undefined,
speed: undefined,
elapsedTime: undefined,
eta: undefined,
};
const result = service.enhanceStatsWithFormattedFields(stats);
expect(result).toEqual({
bytes: undefined,
speed: undefined,
elapsedTime: undefined,
eta: undefined,
calculatedPercentage: 0,
formattedElapsedTime: '0s',
formattedEta: 'Unknown',
formattedSpeed: '0 B/s',
isActivelyRunning: false,
isCompleted: false,
});
expect(mockFormatService.formatBytes).not.toHaveBeenCalled();
expect(mockFormatService.formatDuration).not.toHaveBeenCalled();
});
it('should not add formatted fields for null values', () => {
const stats: RCloneJobStats = {
bytes: null as any,
speed: null as any,
elapsedTime: null as any,
eta: null as any,
};
const result = service.enhanceStatsWithFormattedFields(stats);
expect(result).toEqual({
bytes: null,
speed: null,
elapsedTime: null,
eta: null,
calculatedPercentage: 0,
formattedElapsedTime: '0s',
formattedEta: 'Unknown',
formattedSpeed: '0 B/s',
isActivelyRunning: false,
isCompleted: false,
});
expect(mockFormatService.formatBytes).not.toHaveBeenCalled();
expect(mockFormatService.formatDuration).not.toHaveBeenCalled();
});
it('should not add formatted speed for zero speed', () => {
const stats: RCloneJobStats = {
speed: 0,
};
const result = service.enhanceStatsWithFormattedFields(stats);
expect(result).toEqual({
speed: 0,
calculatedPercentage: 0,
formattedElapsedTime: '0s',
formattedEta: 'Unknown',
formattedSpeed: '0 B/s',
isActivelyRunning: false,
isCompleted: false,
});
expect(mockFormatService.formatSpeed).not.toHaveBeenCalled();
});
it('should not add formatted eta for zero eta', () => {
const stats: RCloneJobStats = {
eta: 0,
};
const result = service.enhanceStatsWithFormattedFields(stats);
expect(result).toEqual({
eta: 0,
calculatedPercentage: 0,
formattedElapsedTime: '0s',
formattedEta: 'Unknown',
formattedSpeed: '0 B/s',
isActivelyRunning: false,
isCompleted: false,
});
expect(mockFormatService.formatDuration).not.toHaveBeenCalled();
});
});
describe('transformStatsToJob', () => {
it('should create RCloneJob with completed status when transfers match total', () => {
const stats: RCloneJobStats = {
group: 'unraid-backup',
fatalError: false,
transfers: 5,
totalTransfers: 5,
errors: 0,
percentage: 100,
};
const result = service.transformStatsToJob('123', stats);
expect(result).toEqual({
id: '123',
group: 'unraid-backup',
stats,
finished: true,
success: true,
error: undefined,
progressPercentage: 100,
status: BackupJobStatus.COMPLETED,
hasRecentJob: true,
});
});
it('should create RCloneJob with running status when transfers incomplete', () => {
const stats: RCloneJobStats = {
group: 'unraid-backup',
fatalError: false,
transfers: 3,
totalTransfers: 5,
errors: 0,
percentage: 60,
};
const result = service.transformStatsToJob('123', stats);
expect(result).toEqual({
id: '123',
group: 'unraid-backup',
stats,
finished: false,
success: true,
error: undefined,
progressPercentage: 60,
status: BackupJobStatus.RUNNING,
hasRecentJob: true,
});
});
it('should create RCloneJob with error status when lastError exists', () => {
const stats: RCloneJobStats = {
group: 'unraid-backup',
fatalError: false,
transfers: 0,
totalTransfers: 5,
errors: 1,
percentage: 0,
lastError: 'Connection timeout',
};
const result = service.transformStatsToJob('123', stats);
expect(result).toEqual({
id: '123',
group: 'unraid-backup',
stats,
finished: false,
success: false,
error: 'Connection timeout',
progressPercentage: 0,
status: BackupJobStatus.FAILED,
hasRecentJob: true,
});
});
it('should create RCloneJob with cancelled status when lastError is context canceled', () => {
const stats: RCloneJobStats = {
group: 'unraid-backup',
fatalError: false,
transfers: 0,
totalTransfers: 5,
errors: 1,
percentage: 0,
lastError: 'context canceled',
};
const result = service.transformStatsToJob('123', stats);
expect(result).toEqual({
id: '123',
group: 'unraid-backup',
stats,
finished: false,
success: false,
error: 'context canceled',
progressPercentage: 0,
status: BackupJobStatus.CANCELLED,
hasRecentJob: true,
});
});
it('should handle numeric job ID', () => {
const stats: RCloneJobStats = {
fatalError: false,
transfers: 0,
totalTransfers: 0,
};
const result = service.transformStatsToJob(456, stats);
expect(result.id).toBe('456');
});
it('should handle missing group', () => {
const stats: RCloneJobStats = {
fatalError: false,
transfers: 0,
totalTransfers: 0,
};
const result = service.transformStatsToJob('123', stats);
expect(result.group).toBeUndefined();
});
});
describe('calculateCombinedStats', () => {
it('should combine stats from multiple jobs', () => {
const mockActiveJobs = [
{
stats: {
bytes: 1024,
checks: 2,
transfers: 3,
totalBytes: 2048,
totalChecks: 4,
totalTransfers: 6,
speed: 100,
eta: 120,
},
},
{
stats: {
bytes: 512,
checks: 1,
transfers: 2,
totalBytes: 1024,
totalChecks: 2,
totalTransfers: 4,
speed: 200,
eta: 60,
},
},
];
const result = service.calculateCombinedStats(mockActiveJobs);
expect(result).toEqual({
bytes: 1536,
checks: 3,
transfers: 5,
totalBytes: 3072,
totalChecks: 6,
totalTransfers: 10,
speed: 200, // Max speed
eta: 120, // Max eta
});
});
it('should return null for empty jobs array', () => {
const result = service.calculateCombinedStats([]);
expect(result).toBeNull();
});
it('should return null when no valid stats', () => {
const mockActiveJobs = [{ stats: null as any }, { stats: undefined as any }];
const result = service.calculateCombinedStats(mockActiveJobs);
expect(result).toBeNull();
});
});
describe('parseActiveJobs', () => {
it('should return active jobs that are not finished', () => {
const mockJobStatuses = [
{ status: 'fulfilled', value: { id: '1', finished: false } },
{ status: 'fulfilled', value: { id: '2', finished: true } },
{ status: 'rejected', reason: 'Error' },
] as PromiseSettledResult<any>[];
const result = service.parseActiveJobs(mockJobStatuses);
expect(result).toEqual([{ id: '1', finished: false }]);
});
it('should return empty array when all jobs are finished', () => {
const mockJobStatuses = [
{ status: 'fulfilled', value: { id: '1', finished: true } },
] as PromiseSettledResult<any>[];
const result = service.parseActiveJobs(mockJobStatuses);
expect(result).toEqual([]);
});
});
describe('parseBackupStatus', () => {
it('should return running status when active jobs exist', () => {
const mockRunningJobs = { jobids: ['123', '456'] };
const mockJobStatuses = [
{ status: 'fulfilled', value: { id: '123', finished: false, stats: { bytes: 1024 } } },
{ status: 'fulfilled', value: { id: '456', finished: false, stats: { bytes: 512 } } },
] as PromiseSettledResult<any>[];
const result = service.parseBackupStatus(mockRunningJobs, mockJobStatuses);
expect(result).toEqual({
isRunning: true,
stats: expect.objectContaining({ bytes: 1536 }),
jobCount: 2,
activeJobs: expect.arrayContaining([
expect.objectContaining({ id: '123', finished: false }),
expect.objectContaining({ id: '456', finished: false }),
]),
});
});
it('should return not running when no job IDs', () => {
const mockRunningJobs = { jobids: [] };
const mockJobStatuses = [] as PromiseSettledResult<any>[];
const result = service.parseBackupStatus(mockRunningJobs, mockJobStatuses);
expect(result).toEqual({
isRunning: false,
stats: null,
jobCount: 0,
activeJobs: [],
});
});
});
describe('parseJobWithStats', () => {
it('should parse job with enhanced stats', () => {
const mockJobStatus = {
stats: { bytes: 1024, speed: 512 },
};
const result = service.parseJobWithStats('123', mockJobStatus);
expect(result).toEqual(
expect.objectContaining({
id: '123',
stats: expect.objectContaining({
bytes: 1024,
speed: 512,
formattedBytes: '1024 B',
formattedSpeed: '512 B/s',
}),
})
);
});
it('should handle missing stats', () => {
const mockJobStatus = {};
const result = service.parseJobWithStats('123', mockJobStatus);
expect(result.id).toBe('123');
expect(result.stats).toEqual({});
});
});
describe('parseAllJobsWithStats', () => {
it('should return jobs when job IDs exist', () => {
const mockRunningJobs = { jobids: ['123', '456'] };
const mockJobs = [
{ id: '123', group: 'unraid-backup' },
{ id: '456', group: 'unraid-backup' },
] as any[];
const result = service.parseAllJobsWithStats(mockRunningJobs, mockJobs);
expect(result).toEqual(mockJobs);
});
it('should return empty array when no job IDs', () => {
const mockRunningJobs = { jobids: [] };
const mockJobs = [] as any[];
const result = service.parseAllJobsWithStats(mockRunningJobs, mockJobs);
expect(result).toEqual([]);
});
});
describe('parseJobsWithStats', () => {
it('should parse fulfilled job statuses', () => {
const mockJobStatuses = [
{ status: 'fulfilled', value: { id: '123', stats: { bytes: 1024 } } },
{ status: 'fulfilled', value: { id: '456', stats: { bytes: 512 } } },
{ status: 'rejected', reason: 'Error' },
] as PromiseSettledResult<any>[];
const result = service.parseJobsWithStats(mockJobStatuses);
expect(result).toHaveLength(2);
expect(result[0]).toEqual(
expect.objectContaining({
id: '123',
stats: expect.objectContaining({ bytes: 1024, formattedBytes: '1024 B' }),
})
);
expect(result[1]).toEqual(
expect.objectContaining({
id: '456',
stats: expect.objectContaining({ bytes: 512, formattedBytes: '512 B' }),
})
);
});
it('should handle rejected statuses gracefully', () => {
const mockJobStatuses = [
{ status: 'rejected', reason: 'Error' },
] as PromiseSettledResult<any>[];
const result = service.parseJobsWithStats(mockJobStatuses);
expect(result).toEqual([]);
});
});
describe('getBackupStatus', () => {
it('should return default backup status', () => {
const result = service.getBackupStatus();
expect(result).toEqual({
isRunning: false,
stats: null,
jobCount: 0,
});
});
});
});

View File

@@ -0,0 +1,268 @@
import { Injectable, Logger } from '@nestjs/common';
import { BackupJobStatus } from '@app/unraid-api/graph/resolvers/backup/orchestration/backup-job-status.model.js';
import {
RCloneJob,
RCloneJobListResponse,
RCloneJobStats,
RCloneJobWithStats,
} from '@app/unraid-api/graph/resolvers/rclone/rclone.model.js';
import { FormatService } from '@app/unraid-api/utils/format.service.js';
// Internal interface for job status response from RClone API
interface RCloneJobStatusResponse {
id?: string | number;
group?: string;
stats?: RCloneJobStats;
finished?: boolean;
error?: string;
[key: string]: any;
}
interface BackupStatusResult {
isRunning: boolean;
stats: RCloneJobStats | null;
jobCount: number;
activeJobs: RCloneJobStatusResponse[];
}
@Injectable()
export class RCloneStatusService {
private readonly logger = new Logger(RCloneStatusService.name);
constructor(private readonly formatService: FormatService) {}
enhanceStatsWithFormattedFields(stats: RCloneJobStats): RCloneJobStats {
const enhancedStats = { ...stats };
const isFinished =
stats.fatalError === false &&
stats.transfers === (stats.totalTransfers || 0) &&
(stats.totalTransfers || 0) > 0;
// Format bytes
if (stats.bytes !== undefined && stats.bytes !== null) {
enhancedStats.formattedBytes = this.formatService.formatBytes(stats.bytes);
}
// Handle speed formatting and reset for finished jobs
if (isFinished && stats.speed !== undefined && stats.speed !== null) {
enhancedStats.speed = 0;
}
if (stats.speed !== undefined && stats.speed !== null && stats.speed > 0) {
enhancedStats.formattedSpeed = this.formatService.formatSpeed(stats.speed);
} else {
enhancedStats.formattedSpeed = '0 B/s';
}
// Format elapsed time
if (stats.elapsedTime !== undefined && stats.elapsedTime !== null) {
enhancedStats.formattedElapsedTime = this.formatService.formatDuration(stats.elapsedTime);
} else {
enhancedStats.formattedElapsedTime = '0s';
}
// Format ETA
if (stats.eta !== undefined && stats.eta !== null && stats.eta > 0) {
enhancedStats.formattedEta = this.formatService.formatDuration(stats.eta);
} else {
enhancedStats.formattedEta = 'Unknown';
}
// Calculate percentage fallback (what frontend currently does)
let calculatedPercentage = stats.percentage;
if (calculatedPercentage === null || calculatedPercentage === undefined) {
if (stats.bytes && stats.totalBytes && stats.totalBytes > 0) {
calculatedPercentage = Math.round((stats.bytes / stats.totalBytes) * 100);
}
}
// For completed jobs, ensure percentage is 100
if (isFinished && calculatedPercentage !== null && calculatedPercentage !== undefined) {
calculatedPercentage = 100;
}
enhancedStats.calculatedPercentage = Math.round(calculatedPercentage || 0);
// Determine if actively running (what frontend currently calculates)
const isActivelyTransferring =
stats.transferring && Array.isArray(stats.transferring) && stats.transferring.length > 0;
const isActivelyChecking =
stats.checking && Array.isArray(stats.checking) && stats.checking.length > 0;
const hasActiveSpeed = (stats.speed || 0) > 0;
const isNotFinished = !isFinished && stats.fatalError !== true;
enhancedStats.isActivelyRunning =
(isActivelyTransferring || isActivelyChecking || hasActiveSpeed) && isNotFinished;
enhancedStats.isCompleted = isFinished;
return enhancedStats;
}
transformStatsToJob(jobId: string | number, stats: RCloneJobStats): RCloneJob {
this.logger.debug(`Stats for job ${jobId}: %o`, stats);
const group = stats.group || undefined;
this.logger.debug(`Processing job ${jobId}: group="${group}"`);
const isFinished =
stats.fatalError === false &&
stats.transfers === (stats.totalTransfers || 0) &&
(stats.totalTransfers || 0) > 0;
const hasError = Boolean(stats.lastError);
const isCancelled = stats.lastError === 'context canceled';
// Determine status
let status: BackupJobStatus;
if (hasError) {
if (isCancelled) {
status = BackupJobStatus.CANCELLED;
} else {
status = BackupJobStatus.FAILED;
}
} else if (isFinished || stats.calculatedPercentage === 100) {
status = BackupJobStatus.COMPLETED;
} else {
status = BackupJobStatus.RUNNING;
}
return {
id: String(jobId),
group: group,
stats,
finished: isFinished,
success: stats.fatalError === false && (stats.errors || 0) === 0,
error: stats.lastError || undefined,
progressPercentage: stats.calculatedPercentage || stats.percentage,
status,
hasRecentJob: true, // If we have a job object, there's a recent job
};
}
calculateCombinedStats(activeJobs: RCloneJobStatusResponse[]): RCloneJobStats | null {
if (activeJobs.length === 0) return null;
const validStats = activeJobs
.map((job) => job.stats)
.filter((stats): stats is RCloneJobStats => Boolean(stats));
if (validStats.length === 0) return null;
return validStats.reduce(
(combined, stats) => ({
bytes: (combined.bytes || 0) + (stats.bytes || 0),
checks: (combined.checks || 0) + (stats.checks || 0),
transfers: (combined.transfers || 0) + (stats.transfers || 0),
totalBytes: (combined.totalBytes || 0) + (stats.totalBytes || 0),
totalChecks: (combined.totalChecks || 0) + (stats.totalChecks || 0),
totalTransfers: (combined.totalTransfers || 0) + (stats.totalTransfers || 0),
speed: Math.max(combined.speed || 0, stats.speed || 0),
eta: Math.max(combined.eta || 0, stats.eta || 0),
}),
{} as RCloneJobStats
);
}
parseActiveJobs(
jobStatuses: PromiseSettledResult<RCloneJobStatusResponse>[]
): RCloneJobStatusResponse[] {
const activeJobs: RCloneJobStatusResponse[] = [];
this.logger.debug(`Job statuses: ${JSON.stringify(jobStatuses)}`);
jobStatuses.forEach((result, index) => {
if (result.status === 'fulfilled' && !result.value.finished) {
activeJobs.push(result.value);
} else if (result.status === 'rejected') {
this.logger.warn(`Failed to get status for job ${index}: ${result.reason}`);
}
});
return activeJobs;
}
parseBackupStatus(
runningJobs: RCloneJobListResponse,
jobStatuses: PromiseSettledResult<RCloneJobStatusResponse>[]
): BackupStatusResult {
if (!runningJobs.jobids?.length) {
return {
isRunning: false,
stats: null,
jobCount: 0,
activeJobs: [],
};
}
const activeJobs = this.parseActiveJobs(jobStatuses);
const combinedStats = this.calculateCombinedStats(activeJobs);
return {
isRunning: activeJobs.length > 0,
stats: combinedStats,
jobCount: activeJobs.length,
activeJobs,
};
}
parseJobWithStats(jobId: string, jobStatus: RCloneJobStatusResponse): RCloneJob {
const stats = jobStatus.stats ? this.enhanceStatsWithFormattedFields(jobStatus.stats) : {};
return this.transformStatsToJob(jobId, stats);
}
parseAllJobsWithStats(runningJobs: RCloneJobListResponse, jobs: RCloneJob[]): RCloneJob[] {
if (!runningJobs.jobids?.length) {
this.logger.log('No active jobs found in RClone');
return [];
}
this.logger.log(
`Found ${runningJobs.jobids.length} active jobs in RClone: [${runningJobs.jobids.join(', ')}]`
);
return jobs;
}
parseJobsWithStats(jobStatuses: PromiseSettledResult<RCloneJobStatusResponse>[]): RCloneJob[] {
const allJobs: RCloneJob[] = [];
jobStatuses.forEach((result, index) => {
if (result.status === 'fulfilled') {
const jobStatus = result.value;
const stats = jobStatus.stats
? this.enhanceStatsWithFormattedFields(jobStatus.stats)
: {};
const job = this.transformStatsToJob(jobStatus.id || index, stats);
allJobs.push(job);
} else {
this.logger.error(`Failed to get status for job ${index}: ${result.reason}`);
}
});
return allJobs;
}
getBackupStatus(): {
isRunning: boolean;
stats: RCloneJobStats | null;
jobCount: number;
} {
try {
return {
isRunning: false,
stats: null,
jobCount: 0,
};
} catch (error) {
this.logger.debug(`Error getting backup status: ${error}`);
return {
isRunning: false,
stats: null,
jobCount: 0,
};
}
}
}

View File

@@ -1,9 +1,11 @@
import { Field, ID, InputType, ObjectType } from '@nestjs/graphql';
import { type Layout } from '@jsonforms/core';
import { PrefixedID } from '@unraid/shared/prefixed-id-scalar.js';
import { IsBoolean, IsObject, IsOptional, IsString } from 'class-validator';
import { GraphQLJSON } from 'graphql-scalars';
import { BackupJobStatus } from '@app/unraid-api/graph/resolvers/backup/orchestration/backup-job-status.model.js';
import { DataSlice } from '@app/unraid-api/types/json-forms.js';
@ObjectType()
@@ -147,6 +149,19 @@ export class RCloneStartBackupInput {
@IsString()
dstPath!: string;
@Field(() => Boolean, { nullable: true, defaultValue: false })
@IsOptional()
@IsBoolean()
async?: boolean;
@Field(() => String, {
nullable: true,
description: 'Configuration ID for job grouping and identification',
})
@IsOptional()
@IsString()
configId?: string;
@Field(() => GraphQLJSON, { nullable: true })
@IsOptional()
@IsObject()
@@ -206,3 +221,189 @@ export class GetRCloneJobStatusDto {
@IsString()
jobId!: string;
}
@ObjectType()
export class RCloneJobStats {
@Field(() => Number, { description: 'Bytes transferred', nullable: true })
bytes?: number;
@Field(() => Number, { description: 'Transfer speed in bytes/sec', nullable: true })
speed?: number;
@Field(() => Number, { description: 'Estimated time to completion in seconds', nullable: true })
eta?: number;
@Field(() => Number, { description: 'Elapsed time in seconds', nullable: true })
elapsedTime?: number;
@Field(() => Number, { description: 'Progress percentage (0-100)', nullable: true })
percentage?: number;
@Field(() => Number, { description: 'Number of checks completed', nullable: true })
checks?: number;
@Field(() => Number, { description: 'Number of deletes completed', nullable: true })
deletes?: number;
@Field(() => Number, { description: 'Number of errors encountered', nullable: true })
errors?: number;
@Field(() => Boolean, { description: 'Whether a fatal error occurred', nullable: true })
fatalError?: boolean;
@Field(() => String, { description: 'Last error message', nullable: true })
lastError?: string;
@Field(() => Number, { description: 'Number of renames completed', nullable: true })
renames?: number;
@Field(() => Boolean, { description: 'Whether there is a retry error', nullable: true })
retryError?: boolean;
@Field(() => Number, { description: 'Number of server-side copies', nullable: true })
serverSideCopies?: number;
@Field(() => Number, { description: 'Bytes in server-side copies', nullable: true })
serverSideCopyBytes?: number;
@Field(() => Number, { description: 'Number of server-side moves', nullable: true })
serverSideMoves?: number;
@Field(() => Number, { description: 'Bytes in server-side moves', nullable: true })
serverSideMoveBytes?: number;
@Field(() => Number, { description: 'Total bytes to transfer', nullable: true })
totalBytes?: number;
@Field(() => Number, { description: 'Total checks to perform', nullable: true })
totalChecks?: number;
@Field(() => Number, { description: 'Total transfers to perform', nullable: true })
totalTransfers?: number;
@Field(() => Number, { description: 'Time spent transferring in seconds', nullable: true })
transferTime?: number;
@Field(() => Number, { description: 'Number of transfers completed', nullable: true })
transfers?: number;
@Field(() => GraphQLJSON, { description: 'Currently transferring files', nullable: true })
transferring?: any[];
@Field(() => GraphQLJSON, { description: 'Currently checking files', nullable: true })
checking?: any[];
// Formatted fields
@Field(() => String, { description: 'Human-readable bytes transferred', nullable: true })
formattedBytes?: string;
@Field(() => String, { description: 'Human-readable transfer speed', nullable: true })
formattedSpeed?: string;
@Field(() => String, { description: 'Human-readable elapsed time', nullable: true })
formattedElapsedTime?: string;
@Field(() => String, { description: 'Human-readable ETA', nullable: true })
formattedEta?: string;
// Computed fields that frontend currently calculates
@Field(() => Number, {
description: 'Calculated percentage (fallback when percentage is null)',
nullable: true,
})
calculatedPercentage?: number;
@Field(() => Boolean, { description: 'Whether the job is actively running', nullable: true })
isActivelyRunning?: boolean;
@Field(() => Boolean, { description: 'Whether the job is completed', nullable: true })
isCompleted?: boolean;
// Allow additional fields
[key: string]: any;
}
@ObjectType()
export class RCloneJob {
@Field(() => PrefixedID, { description: 'Job ID' })
id!: string;
@Field(() => String, { description: 'RClone group for the job', nullable: true })
group?: string;
@Field(() => RCloneJobStats, { description: 'Job status and statistics', nullable: true })
stats?: RCloneJobStats;
@Field(() => Number, { description: 'Progress percentage (0-100)', nullable: true })
progressPercentage?: number;
@Field(() => PrefixedID, { description: 'Configuration ID that triggered this job', nullable: true })
configId?: string;
@Field(() => BackupJobStatus, { description: 'Current status of the job', nullable: true })
status?: BackupJobStatus;
@Field(() => Boolean, { description: 'Whether the job is finished', nullable: true })
finished?: boolean;
@Field(() => Boolean, { description: 'Whether the job was successful', nullable: true })
success?: boolean;
@Field(() => String, { description: 'Error message if job failed', nullable: true })
error?: string;
// Computed fields that frontend currently calculates
@Field(() => Boolean, { description: 'Whether the job is actively running', nullable: true })
isRunning?: boolean;
@Field(() => String, { description: 'Error message for display', nullable: true })
errorMessage?: string;
@Field(() => Boolean, { description: 'Whether there is a recent job', nullable: true })
hasRecentJob?: boolean;
}
@ObjectType()
export class RCloneJobStatusDto {
@Field(() => Number, { description: 'Job ID' })
id!: number;
@Field(() => String, { description: 'RClone group for the job' })
group!: string;
@Field(() => Boolean, { description: 'Whether the job is finished' })
finished!: boolean;
@Field(() => Boolean, { description: 'Whether the job was successful' })
success!: boolean;
@Field(() => String, { description: 'Error message if any' })
error!: string;
@Field(() => Number, { description: 'Job duration in seconds' })
duration!: number;
@Field(() => String, { description: 'Job start time in ISO format' })
startTime!: string;
@Field(() => String, { description: 'Job end time in ISO format' })
endTime!: string;
@Field(() => GraphQLJSON, { description: 'Job output data', nullable: true })
output?: Record<string, any>;
}
// API Response Types (for internal use)
export interface RCloneJobListResponse {
jobids: (string | number)[];
}
export interface RCloneJobWithStats {
jobId: string | number;
stats: RCloneJobStats;
}
export interface RCloneJobsWithStatsResponse {
jobids: (string | number)[];
stats: RCloneJobStats[];
}

View File

@@ -1,20 +1,24 @@
import { Module } from '@nestjs/common';
import { forwardRef, Module } from '@nestjs/common';
import { BackupSourceModule } from '@app/unraid-api/graph/resolvers/backup/source/backup-source.module.js';
import { RCloneApiService } from '@app/unraid-api/graph/resolvers/rclone/rclone-api.service.js';
import { RCloneFormService } from '@app/unraid-api/graph/resolvers/rclone/rclone-form.service.js';
import { RCloneStatusService } from '@app/unraid-api/graph/resolvers/rclone/rclone-status.service.js';
import { RCloneMutationsResolver } from '@app/unraid-api/graph/resolvers/rclone/rclone.mutation.resolver.js';
import { RCloneBackupSettingsResolver } from '@app/unraid-api/graph/resolvers/rclone/rclone.resolver.js';
import { RCloneService } from '@app/unraid-api/graph/resolvers/rclone/rclone.service.js';
import { UtilsModule } from '@app/unraid-api/utils/utils.module.js';
@Module({
imports: [],
imports: [UtilsModule, forwardRef(() => BackupSourceModule)],
providers: [
RCloneService,
RCloneApiService,
RCloneStatusService,
RCloneFormService,
RCloneBackupSettingsResolver,
RCloneMutationsResolver,
],
exports: [RCloneService, RCloneApiService],
exports: [RCloneService, RCloneApiService, RCloneStatusService],
})
export class RCloneModule {}

View File

@@ -1,7 +1,7 @@
import { Logger } from '@nestjs/common';
import { Args, ResolveField, Resolver } from '@nestjs/graphql';
import { Resource } from '@unraid/shared/graphql.model.js';
import { Resource } from '@unraid/shared/graphql.model';
import {
AuthActionVerb,
AuthPossession,
@@ -14,6 +14,7 @@ import {
CreateRCloneRemoteInput,
DeleteRCloneRemoteInput,
RCloneRemote,
RCloneRemoteConfig,
} from '@app/unraid-api/graph/resolvers/rclone/rclone.model.js';
/**
@@ -38,7 +39,7 @@ export class RCloneMutationsResolver {
name: input.name,
type: input.type,
parameters: {},
config,
config: config as RCloneRemoteConfig,
};
} catch (error) {
this.logger.error(`Error creating remote: ${error}`);

View File

@@ -5,13 +5,13 @@ import { type Layout } from '@jsonforms/core';
import type { SettingSlice } from '@app/unraid-api/types/json-forms.js';
import { RCloneApiService } from '@app/unraid-api/graph/resolvers/rclone/rclone-api.service.js';
import { RCloneFormService } from '@app/unraid-api/graph/resolvers/rclone/rclone-form.service.js';
import { RCloneRemote } from '@app/unraid-api/graph/resolvers/rclone/rclone.model.js';
import { RCloneJob, RCloneRemote } from '@app/unraid-api/graph/resolvers/rclone/rclone.model.js';
/**
* Types for rclone backup configuration UI
*/
export interface RcloneBackupConfigValues {
configStep: number;
configStep: { current: number; total: number };
showAdvanced: boolean;
name?: string;
type?: string;
@@ -48,7 +48,7 @@ export class RCloneService {
*/
async onModuleInit(): Promise<void> {
try {
if (!this.rcloneApiService.initialized) {
if (!this.rcloneApiService.isInitialized) {
this.logger.warn(
'RClone API service is not initialized, skipping provider info loading'
);
@@ -83,7 +83,7 @@ export class RCloneService {
*/
async getCurrentSettings(): Promise<RcloneBackupConfigValues> {
return {
configStep: 0,
configStep: { current: 0, total: 0 },
showAdvanced: false,
};
}
@@ -125,4 +125,11 @@ export class RCloneService {
return [];
}
}
/**
* Gets enhanced job status with computed fields
*/
async getEnhancedJobStatus(jobId: string, configId?: string): Promise<RCloneJob | null> {
return this.rcloneApiService.getEnhancedJobStatus(jobId, configId);
}
}

View File

@@ -2,15 +2,14 @@ import { Module } from '@nestjs/common';
import { AuthModule } from '@app/unraid-api/auth/auth.module.js';
import { ApiKeyModule } from '@app/unraid-api/graph/resolvers/api-key/api-key.module.js';
import { ApiKeyResolver } from '@app/unraid-api/graph/resolvers/api-key/api-key.resolver.js';
import { ArrayModule } from '@app/unraid-api/graph/resolvers/array/array.module.js';
import { BackupModule } from '@app/unraid-api/graph/resolvers/backup/backup.module.js';
import { ConfigResolver } from '@app/unraid-api/graph/resolvers/config/config.resolver.js';
import { CustomizationModule } from '@app/unraid-api/graph/resolvers/customization/customization.module.js';
import { DisksModule } from '@app/unraid-api/graph/resolvers/disks/disks.module.js';
import { DisplayResolver } from '@app/unraid-api/graph/resolvers/display/display.resolver.js';
import { DisplayService } from '@app/unraid-api/graph/resolvers/display/display.service.js';
import { DockerModule } from '@app/unraid-api/graph/resolvers/docker/docker.module.js';
import { FlashBackupModule } from '@app/unraid-api/graph/resolvers/flash-backup/flash-backup.module.js';
import { FlashResolver } from '@app/unraid-api/graph/resolvers/flash/flash.resolver.js';
import { DevicesResolver } from '@app/unraid-api/graph/resolvers/info/devices.resolver.js';
import { DevicesService } from '@app/unraid-api/graph/resolvers/info/devices.service.js';
@@ -34,16 +33,18 @@ import { VmsService } from '@app/unraid-api/graph/resolvers/vms/vms.service.js';
import { ServicesResolver } from '@app/unraid-api/graph/services/services.resolver.js';
import { SharesResolver } from '@app/unraid-api/graph/shares/shares.resolver.js';
import { MeResolver } from '@app/unraid-api/graph/user/user.resolver.js';
import { UtilsModule } from '@app/unraid-api/utils/utils.module.js';
@Module({
imports: [
UtilsModule,
ArrayModule,
ApiKeyModule,
AuthModule,
BackupModule,
CustomizationModule,
DockerModule,
DisksModule,
FlashBackupModule,
RCloneModule,
SettingsModule,
],

View File

@@ -0,0 +1,10 @@
import { Global, Module } from '@nestjs/common';
import { FormatService } from '@app/unraid-api/utils/format.service.js';
@Global()
@Module({
providers: [FormatService],
exports: [FormatService],
})
export class UtilsModule {}

View File

@@ -18,7 +18,8 @@ export async function bootstrapNestServer(): Promise<NestFastifyApplication> {
const app = await NestFactory.create<NestFastifyApplication>(AppModule, new FastifyAdapter(), {
bufferLogs: false,
...(LOG_LEVEL !== 'TRACE' ? { logger: false } : {}),
...(LOG_LEVEL !== 'DEBUG' ? { logger: false } : {}),
});
// Enable validation globally

View File

@@ -65,7 +65,16 @@ export class PluginService {
* @returns A tuple of the plugin name and version.
*/
static async listPlugins(): Promise<[string, string][]> {
const { plugins = [] } = await loadApiConfig();
let plugins: string[] = [];
try {
const config = await loadApiConfig();
plugins = config.plugins || [];
} catch (error) {
PluginService.logger.error(
'Failed to load API config for plugin discovery, using empty list:',
error
);
}
const pluginNames = new Set(
plugins.map((plugin) => {
const { name } = parsePackageArg(plugin);

View File

@@ -65,6 +65,13 @@ if (is_localhost() && !is_good_session()) {
return this.prependDoctypeWithPhp(source, newPhpCode);
}
private addModalsWebComponent(source: string): string {
if (source.includes('<unraid-modals>')) {
return source;
}
return source.replace('<body>', '<body>\n<unraid-modals></unraid-modals>');
}
private hideHeaderLogo(source: string): string {
return source.replace(
'<a href="https://unraid.net" target="_blank"><?readfile("$docroot/webGui/images/UN-logotype-gradient.svg")?></a>',
@@ -72,17 +79,14 @@ if (is_localhost() && !is_good_session()) {
);
}
private addModalsWebComponent(source: string): string {
return source.replace('<body>', '<body>\n<unraid-modals></unraid-modals>');
}
private applyToSource(fileContent: string): string {
const transformers = [
this.removeNotificationBell.bind(this),
this.replaceToasts.bind(this),
this.addToaster.bind(this),
this.patchGuiBootAuth.bind(this),
this.hideHeaderLogo.bind(this),
this.addModalsWebComponent.bind(this),
this.hideHeaderLogo.bind(this),
];
return transformers.reduce((content, transformer) => transformer(content), fileContent);

View File

@@ -0,0 +1,59 @@
import { describe, expect, it } from 'vitest';
import { FormatService } from '@app/unraid-api/utils/format.service.js';
describe('FormatService', () => {
const service = new FormatService();
describe('formatBytes', () => {
it('should format zero bytes', () => {
expect(service.formatBytes(0)).toBe('0 B');
});
it('should format bytes to best unit', () => {
expect(service.formatBytes(1024)).toBe('1.02 KB');
expect(service.formatBytes(1048576)).toBe('1.05 MB');
expect(service.formatBytes(1073741824)).toBe('1.07 GB');
});
it('should format with decimals when needed', () => {
expect(service.formatBytes(1536)).toBe('1.54 KB');
expect(service.formatBytes(9636529)).toBe('9.64 MB');
});
});
describe('formatSpeed', () => {
it('should format zero speed', () => {
expect(service.formatSpeed(0)).toBe('0 B/s');
});
it('should format speed with /s suffix', () => {
expect(service.formatSpeed(1024)).toBe('1.02 KB/s');
expect(service.formatSpeed(1048576)).toBe('1.05 MB/s');
expect(service.formatSpeed(1073741824)).toBe('1.07 GB/s');
});
it('should format with decimals when needed', () => {
expect(service.formatSpeed(1536)).toBe('1.54 KB/s');
expect(service.formatSpeed(9636529.183648435)).toBe('9.64 MB/s');
});
});
describe('formatDuration', () => {
it('should format small durations in seconds', () => {
expect(service.formatDuration(30)).toBe('30s');
expect(service.formatDuration(45.5)).toBe('45.5s');
});
it('should format longer durations to best unit', () => {
expect(service.formatDuration(60)).toBe('60 s');
expect(service.formatDuration(3600)).toBe('60 min');
expect(service.formatDuration(86400)).toBe('24 h');
});
it('should format with decimals when needed', () => {
expect(service.formatDuration(90)).toBe('1.5 min');
expect(service.formatDuration(11.615060290966666 * 60)).toBe('11.62 min');
});
});
});

View File

@@ -0,0 +1,33 @@
import { Injectable } from '@nestjs/common';
import { convert } from 'convert';
@Injectable()
export class FormatService {
formatBytes(bytes: number): string {
if (bytes === 0) return '0 B';
const result = convert(bytes, 'bytes').to('best');
const value =
typeof result.quantity === 'number' ? Number(result.quantity.toFixed(2)) : result.quantity;
return `${value} ${result.unit}`;
}
formatSpeed(bytesPerSecond: number): string {
if (bytesPerSecond === 0) return '0 B/s';
const result = convert(bytesPerSecond, 'bytes').to('best');
const value =
typeof result.quantity === 'number' ? Number(result.quantity.toFixed(2)) : result.quantity;
return `${value} ${result.unit}/s`;
}
formatDuration(seconds: number): string {
if (seconds < 60) return `${Math.round(seconds * 100) / 100}s`;
const result = convert(seconds, 'seconds').to('best');
const value =
typeof result.quantity === 'number' ? Number(result.quantity.toFixed(2)) : result.quantity;
return `${value} ${result.unit}`;
}
}

View File

@@ -0,0 +1,10 @@
import { Global, Module } from '@nestjs/common';
import { FormatService } from '@app/unraid-api/utils/format.service.js';
@Global()
@Module({
providers: [FormatService],
exports: [FormatService],
})
export class UtilsModule {}

View File

@@ -1,13 +1,13 @@
{
"name": "unraid-monorepo",
"private": true,
"version": "4.8.0",
"version": "4.9.5",
"scripts": {
"build": "pnpm -r build",
"build:watch": " pnpm -r --parallel build:watch",
"dev": "pnpm -r dev",
"unraid:deploy": "pnpm -r unraid:deploy",
"test": "pnpm -r test",
"test": "vitest",
"lint": "pnpm -r lint",
"lint:fix": "pnpm -r lint:fix",
"type-check": "pnpm -r type-check",
@@ -43,7 +43,8 @@
"@manypkg/cli": "0.24.0",
"chalk": "5.4.1",
"diff": "8.0.2",
"ignore": "7.0.5"
"ignore": "7.0.5",
"vitest": "3.2.4"
},
"devDependencies": {
"lint-staged": "16.1.2",
@@ -54,8 +55,8 @@
},
"lint-staged": {
"*.{js,jsx,ts,tsx,vue}": [
"pnpm lint:fix"
"npx pnpm lint:fix"
]
},
"packageManager": "pnpm@10.12.4"
"packageManager": "pnpm@10.13.1"
}

View File

@@ -25,10 +25,10 @@
"description": "Unraid Connect plugin for Unraid API",
"devDependencies": {
"@apollo/client": "3.13.8",
"@faker-js/faker": "9.8.0",
"@faker-js/faker": "9.9.0",
"@graphql-codegen/cli": "5.0.7",
"@graphql-typed-document-node/core": "3.2.0",
"@ianvs/prettier-plugin-sort-imports": "4.4.2",
"@ianvs/prettier-plugin-sort-imports": "4.5.1",
"@jsonforms/core": "3.6.0",
"@nestjs/apollo": "13.1.0",
"@nestjs/common": "11.1.3",
@@ -41,13 +41,13 @@
"@types/ini": "4.1.1",
"@types/ip": "1.1.3",
"@types/lodash-es": "4.17.12",
"@types/node": "22.15.32",
"@types/node": "22.16.3",
"@types/ws": "8.18.1",
"camelcase-keys": "9.1.3",
"class-transformer": "0.5.1",
"class-validator": "0.14.2",
"execa": "9.6.0",
"fast-check": "4.1.1",
"fast-check": "4.2.0",
"got": "14.4.7",
"graphql": "16.11.0",
"graphql-scalars": "1.24.2",
@@ -57,13 +57,13 @@
"jose": "6.0.11",
"lodash-es": "4.17.21",
"nest-authz": "2.17.0",
"prettier": "3.5.3",
"prettier": "3.6.2",
"rimraf": "6.0.1",
"rxjs": "7.8.2",
"type-fest": "4.41.0",
"typescript": "5.8.3",
"vitest": "3.2.4",
"ws": "8.18.2",
"ws": "8.18.3",
"zen-observable-ts": "1.1.0"
},
"dependencies": {

View File

@@ -0,0 +1,269 @@
import { EventEmitter2 } from '@nestjs/event-emitter';
import { PubSub } from 'graphql-subscriptions';
import { beforeEach, describe, expect, it, vi } from 'vitest';
import { MinigraphStatus } from '../config/connect.config.js';
import { EVENTS, GRAPHQL_PUBSUB_CHANNEL } from '../helper/nest-tokens.js';
import { MothershipConnectionService } from '../mothership-proxy/connection.service.js';
import { MothershipController } from '../mothership-proxy/mothership.controller.js';
import { MothershipHandler } from '../mothership-proxy/mothership.events.js';
describe('MothershipHandler - Behavioral Tests', () => {
let handler: MothershipHandler;
let connectionService: MothershipConnectionService;
let mothershipController: MothershipController;
let pubSub: PubSub;
let eventEmitter: EventEmitter2;
// Track actual state changes and effects
let connectionAttempts: Array<{ timestamp: number; reason: string }> = [];
let publishedMessages: Array<{ channel: string; data: any }> = [];
let controllerStops: Array<{ timestamp: number; reason?: string }> = [];
beforeEach(() => {
// Reset tracking arrays
connectionAttempts = [];
publishedMessages = [];
controllerStops = [];
// Create real event emitter for integration testing
eventEmitter = new EventEmitter2();
// Mock connection service with realistic behavior
connectionService = {
getIdentityState: vi.fn(),
getConnectionState: vi.fn(),
} as any;
// Mock controller that tracks behavior instead of just method calls
mothershipController = {
initOrRestart: vi.fn().mockImplementation(() => {
connectionAttempts.push({
timestamp: Date.now(),
reason: 'initOrRestart called',
});
return Promise.resolve();
}),
stop: vi.fn().mockImplementation(() => {
controllerStops.push({
timestamp: Date.now(),
});
return Promise.resolve();
}),
} as any;
// Mock PubSub that tracks published messages
pubSub = {
publish: vi.fn().mockImplementation((channel: string, data: any) => {
publishedMessages.push({ channel, data });
return Promise.resolve();
}),
} as any;
handler = new MothershipHandler(connectionService, mothershipController, pubSub);
});
describe('Connection Recovery Behavior', () => {
it('should attempt reconnection when ping fails', async () => {
// Given: Connection is in ping failure state
vi.mocked(connectionService.getConnectionState).mockReturnValue({
status: MinigraphStatus.PING_FAILURE,
error: 'Ping timeout after 3 minutes',
});
// When: Connection status change event occurs
await handler.onMothershipConnectionStatusChanged();
// Then: System should attempt to recover the connection
expect(connectionAttempts).toHaveLength(1);
expect(connectionAttempts[0].reason).toBe('initOrRestart called');
});
it('should NOT interfere with exponential backoff during error retry state', async () => {
// Given: Connection is in error retry state (GraphQL client managing backoff)
vi.mocked(connectionService.getConnectionState).mockReturnValue({
status: MinigraphStatus.ERROR_RETRYING,
error: 'Network error',
timeout: 20000,
timeoutStart: Date.now(),
});
// When: Connection status change event occurs
await handler.onMothershipConnectionStatusChanged();
// Then: System should NOT interfere with ongoing retry logic
expect(connectionAttempts).toHaveLength(0);
});
it('should remain stable during normal connection states', async () => {
const stableStates = [MinigraphStatus.CONNECTED, MinigraphStatus.CONNECTING];
for (const status of stableStates) {
// Reset for each test
connectionAttempts.length = 0;
// Given: Connection is in a stable state
vi.mocked(connectionService.getConnectionState).mockReturnValue({
status,
error: null,
});
// When: Connection status change event occurs
await handler.onMothershipConnectionStatusChanged();
// Then: System should not trigger unnecessary reconnection attempts
expect(connectionAttempts).toHaveLength(0);
}
});
});
describe('Identity-Based Connection Behavior', () => {
it('should establish connection when valid API key becomes available', async () => {
// Given: Valid API key is present
vi.mocked(connectionService.getIdentityState).mockReturnValue({
state: {
apiKey: 'valid-unraid-key-12345',
unraidVersion: '6.12.0',
flashGuid: 'test-flash-guid',
apiVersion: '1.0.0',
},
isLoaded: true,
});
// When: Identity changes
await handler.onIdentityChanged();
// Then: System should establish mothership connection
expect(connectionAttempts).toHaveLength(1);
});
it('should not attempt connection without valid credentials', async () => {
const invalidCredentials = [{ apiKey: undefined }, { apiKey: '' }];
for (const credentials of invalidCredentials) {
// Reset for each test
connectionAttempts.length = 0;
// Given: Invalid or missing API key
vi.mocked(connectionService.getIdentityState).mockReturnValue({
state: credentials,
isLoaded: false,
});
// When: Identity changes
await handler.onIdentityChanged();
// Then: System should not attempt connection
expect(connectionAttempts).toHaveLength(0);
}
});
});
describe('Logout Behavior', () => {
it('should properly clean up connections and notify subscribers on logout', async () => {
// When: User logs out
await handler.logout({ reason: 'User initiated logout' });
// Then: System should clean up connections
expect(controllerStops).toHaveLength(1);
// And: Subscribers should be notified of empty state
expect(publishedMessages).toHaveLength(2);
const serversMessage = publishedMessages.find(
(m) => m.channel === GRAPHQL_PUBSUB_CHANNEL.SERVERS
);
const ownerMessage = publishedMessages.find(
(m) => m.channel === GRAPHQL_PUBSUB_CHANNEL.OWNER
);
expect(serversMessage?.data).toEqual({ servers: [] });
expect(ownerMessage?.data).toEqual({
owner: { username: 'root', url: '', avatar: '' },
});
});
it('should handle logout gracefully even without explicit reason', async () => {
// When: System logout occurs without reason
await handler.logout({});
// Then: Cleanup should still occur properly
expect(controllerStops).toHaveLength(1);
expect(publishedMessages).toHaveLength(2);
});
});
describe('DDoS Prevention Behavior', () => {
it('should demonstrate exponential backoff is respected during network errors', async () => {
// Given: Multiple rapid network errors occur
const errorStates = [
{ status: MinigraphStatus.ERROR_RETRYING, error: 'Network error 1' },
{ status: MinigraphStatus.ERROR_RETRYING, error: 'Network error 2' },
{ status: MinigraphStatus.ERROR_RETRYING, error: 'Network error 3' },
];
// When: Rapid error retry states occur
for (const state of errorStates) {
vi.mocked(connectionService.getConnectionState).mockReturnValue(state);
await handler.onMothershipConnectionStatusChanged();
}
// Then: No linear retry attempts should be made (respecting exponential backoff)
expect(connectionAttempts).toHaveLength(0);
});
it('should differentiate between network errors and ping failures', async () => {
// Given: Network error followed by ping failure
vi.mocked(connectionService.getConnectionState).mockReturnValue({
status: MinigraphStatus.ERROR_RETRYING,
error: 'Network error',
});
// When: Network error occurs
await handler.onMothershipConnectionStatusChanged();
// Then: No immediate reconnection attempt
expect(connectionAttempts).toHaveLength(0);
// Given: Ping failure occurs (different issue)
vi.mocked(connectionService.getConnectionState).mockReturnValue({
status: MinigraphStatus.PING_FAILURE,
error: 'Ping timeout',
});
// When: Ping failure occurs
await handler.onMothershipConnectionStatusChanged();
// Then: Immediate reconnection attempt should occur
expect(connectionAttempts).toHaveLength(1);
});
});
describe('Edge Cases and Error Handling', () => {
it('should handle missing connection state gracefully', async () => {
// Given: Connection service returns undefined
vi.mocked(connectionService.getConnectionState).mockReturnValue(undefined);
// When: Connection status change occurs
await handler.onMothershipConnectionStatusChanged();
// Then: No errors should occur, no reconnection attempts
expect(connectionAttempts).toHaveLength(0);
});
it('should handle malformed connection state', async () => {
// Given: Malformed connection state
vi.mocked(connectionService.getConnectionState).mockReturnValue({
status: 'UNKNOWN_STATUS' as any,
error: 'Malformed state',
});
// When: Connection status change occurs
await handler.onMothershipConnectionStatusChanged();
// Then: Should not trigger reconnection for unknown states
expect(connectionAttempts).toHaveLength(0);
});
});
});

View File

@@ -0,0 +1,158 @@
import { ConfigService } from '@nestjs/config';
import { access, constants, mkdir, readFile, rm } from 'fs/promises';
import { join } from 'path';
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
import { ConfigType } from '../config/connect.config.js';
import { ConnectStatusWriterService } from './connect-status-writer.service.js';
describe('ConnectStatusWriterService Config Behavior', () => {
let service: ConnectStatusWriterService;
let configService: ConfigService<ConfigType, true>;
const testDir = '/tmp/connect-status-config-test';
const testFilePath = join(testDir, 'connectStatus.json');
// Simulate config changes
let configStore: any = {};
beforeEach(async () => {
vi.clearAllMocks();
// Reset config store
configStore = {};
// Create test directory
await mkdir(testDir, { recursive: true });
// Create a ConfigService mock that behaves like the real one
configService = {
get: vi.fn().mockImplementation((key: string) => {
console.log(`ConfigService.get('${key}') called, returning:`, configStore[key]);
return configStore[key];
}),
set: vi.fn().mockImplementation((key: string, value: any) => {
console.log(`ConfigService.set('${key}', ${JSON.stringify(value)}) called`);
configStore[key] = value;
}),
} as unknown as ConfigService<ConfigType, true>;
service = new ConnectStatusWriterService(configService);
// Override the status file path to use our test location
Object.defineProperty(service, 'statusFilePath', {
get: () => testFilePath,
});
});
afterEach(async () => {
await service.onModuleDestroy();
await rm(testDir, { recursive: true, force: true });
});
it('should write status when config is updated directly', async () => {
// Initialize service - should write PRE_INIT
await service.onApplicationBootstrap();
await new Promise(resolve => setTimeout(resolve, 50));
let content = await readFile(testFilePath, 'utf-8');
let data = JSON.parse(content);
console.log('Initial status:', data);
expect(data.connectionStatus).toBe('PRE_INIT');
// Update config directly (simulating what ConnectionService does)
console.log('\n=== Updating config to CONNECTED ===');
configService.set('connect.mothership', {
status: 'CONNECTED',
error: null,
lastPing: Date.now(),
});
// Call the writeStatus method directly (since @OnEvent handles the event)
await service['writeStatus']();
content = await readFile(testFilePath, 'utf-8');
data = JSON.parse(content);
console.log('Status after config update:', data);
expect(data.connectionStatus).toBe('CONNECTED');
});
it('should test the actual flow with multiple status updates', async () => {
await service.onApplicationBootstrap();
await new Promise(resolve => setTimeout(resolve, 50));
const statusUpdates = [
{ status: 'CONNECTING', error: null, lastPing: null },
{ status: 'CONNECTED', error: null, lastPing: Date.now() },
{ status: 'DISCONNECTED', error: 'Lost connection', lastPing: Date.now() - 10000 },
{ status: 'RECONNECTING', error: null, lastPing: Date.now() - 10000 },
{ status: 'CONNECTED', error: null, lastPing: Date.now() },
];
for (const update of statusUpdates) {
console.log(`\n=== Updating to ${update.status} ===`);
// Update config
configService.set('connect.mothership', update);
// Call writeStatus directly
await service['writeStatus']();
const content = await readFile(testFilePath, 'utf-8');
const data = JSON.parse(content);
console.log(`Status file shows: ${data.connectionStatus}`);
expect(data.connectionStatus).toBe(update.status);
}
});
it('should handle case where config is not set before event', async () => {
await service.onApplicationBootstrap();
await new Promise(resolve => setTimeout(resolve, 50));
// Delete the config
delete configStore['connect.mothership'];
// Call writeStatus without config
console.log('\n=== Calling writeStatus with no config ===');
await service['writeStatus']();
const content = await readFile(testFilePath, 'utf-8');
const data = JSON.parse(content);
console.log('Status with no config:', data);
expect(data.connectionStatus).toBe('PRE_INIT');
// Now set config and call writeStatus again
console.log('\n=== Setting config and calling writeStatus ===');
configService.set('connect.mothership', {
status: 'CONNECTED',
error: null,
lastPing: Date.now(),
});
await service['writeStatus']();
const content2 = await readFile(testFilePath, 'utf-8');
const data2 = JSON.parse(content2);
console.log('Status after setting config:', data2);
expect(data2.connectionStatus).toBe('CONNECTED');
});
describe('cleanup on shutdown', () => {
it('should delete status file on module destroy', async () => {
await service.onApplicationBootstrap();
await new Promise(resolve => setTimeout(resolve, 50));
// Verify file exists
await expect(access(testFilePath, constants.F_OK)).resolves.not.toThrow();
// Cleanup
await service.onModuleDestroy();
// Verify file is deleted
await expect(access(testFilePath, constants.F_OK)).rejects.toThrow();
});
it('should handle cleanup when file does not exist', async () => {
// Don't bootstrap (so no file is written)
await expect(service.onModuleDestroy()).resolves.not.toThrow();
});
});
});

View File

@@ -0,0 +1,167 @@
import { ConfigService } from '@nestjs/config';
import { access, constants, mkdir, readFile, rm } from 'fs/promises';
import { join } from 'path';
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
import { ConfigType } from '../config/connect.config.js';
import { ConnectStatusWriterService } from './connect-status-writer.service.js';
describe('ConnectStatusWriterService Integration', () => {
let service: ConnectStatusWriterService;
let configService: ConfigService<ConfigType, true>;
const testDir = '/tmp/connect-status-test';
const testFilePath = join(testDir, 'connectStatus.json');
beforeEach(async () => {
vi.clearAllMocks();
// Create test directory
await mkdir(testDir, { recursive: true });
configService = {
get: vi.fn().mockImplementation((key: string) => {
console.log(`ConfigService.get called with key: ${key}`);
return {
status: 'CONNECTED',
error: null,
lastPing: Date.now(),
};
}),
} as unknown as ConfigService<ConfigType, true>;
service = new ConnectStatusWriterService(configService);
// Override the status file path to use our test location
Object.defineProperty(service, 'statusFilePath', {
get: () => testFilePath,
});
});
afterEach(async () => {
await service.onModuleDestroy();
await rm(testDir, { recursive: true, force: true });
});
it('should write initial PRE_INIT status, then update on event', async () => {
// First, mock the config to return undefined (no connection metadata)
vi.mocked(configService.get).mockReturnValue(undefined);
console.log('=== Starting onApplicationBootstrap ===');
await service.onApplicationBootstrap();
// Wait a bit for the initial write to complete
await new Promise(resolve => setTimeout(resolve, 50));
// Read initial status
const initialContent = await readFile(testFilePath, 'utf-8');
const initialData = JSON.parse(initialContent);
console.log('Initial status written:', initialData);
expect(initialData.connectionStatus).toBe('PRE_INIT');
expect(initialData.error).toBeNull();
expect(initialData.lastPing).toBeNull();
// Now update the mock to return CONNECTED status
vi.mocked(configService.get).mockReturnValue({
status: 'CONNECTED',
error: null,
lastPing: 1234567890,
});
console.log('=== Calling writeStatus directly ===');
await service['writeStatus']();
// Read updated status
const updatedContent = await readFile(testFilePath, 'utf-8');
const updatedData = JSON.parse(updatedContent);
console.log('Updated status after writeStatus:', updatedData);
expect(updatedData.connectionStatus).toBe('CONNECTED');
expect(updatedData.lastPing).toBe(1234567890);
});
it('should handle rapid status changes correctly', async () => {
const statusChanges = [
{ status: 'PRE_INIT', error: null, lastPing: null },
{ status: 'CONNECTING', error: null, lastPing: null },
{ status: 'CONNECTED', error: null, lastPing: Date.now() },
{ status: 'DISCONNECTED', error: 'Connection lost', lastPing: Date.now() - 5000 },
{ status: 'CONNECTED', error: null, lastPing: Date.now() },
];
let changeIndex = 0;
vi.mocked(configService.get).mockImplementation(() => {
const change = statusChanges[changeIndex];
console.log(`Returning status ${changeIndex}: ${change.status}`);
return change;
});
await service.onApplicationBootstrap();
await new Promise(resolve => setTimeout(resolve, 50));
// Simulate the final status change
changeIndex = statusChanges.length - 1;
console.log(`=== Calling writeStatus for final status: ${statusChanges[changeIndex].status} ===`);
await service['writeStatus']();
// Read final status
const finalContent = await readFile(testFilePath, 'utf-8');
const finalData = JSON.parse(finalContent);
console.log('Final status after status change:', finalData);
// Should have the last status
expect(finalData.connectionStatus).toBe('CONNECTED');
expect(finalData.error).toBeNull();
});
it('should handle multiple write calls correctly', async () => {
const writes: number[] = [];
const originalWriteStatus = service['writeStatus'].bind(service);
service['writeStatus'] = async function() {
const timestamp = Date.now();
writes.push(timestamp);
console.log(`writeStatus called at ${timestamp}`);
return originalWriteStatus();
};
await service.onApplicationBootstrap();
await new Promise(resolve => setTimeout(resolve, 50));
const initialWrites = writes.length;
console.log(`Initial writes: ${initialWrites}`);
// Make multiple write calls
for (let i = 0; i < 3; i++) {
console.log(`Calling writeStatus ${i}`);
await service['writeStatus']();
}
console.log(`Total writes: ${writes.length}`);
console.log('Write timestamps:', writes);
// Should have initial write + 3 additional writes
expect(writes.length).toBe(initialWrites + 3);
});
describe('cleanup on shutdown', () => {
it('should delete status file on module destroy', async () => {
await service.onApplicationBootstrap();
await new Promise(resolve => setTimeout(resolve, 50));
// Verify file exists
await expect(access(testFilePath, constants.F_OK)).resolves.not.toThrow();
// Cleanup
await service.onModuleDestroy();
// Verify file is deleted
await expect(access(testFilePath, constants.F_OK)).rejects.toThrow();
});
it('should handle cleanup gracefully when file does not exist', async () => {
// Don't bootstrap (so no file is created)
await expect(service.onModuleDestroy()).resolves.not.toThrow();
});
});
});

View File

@@ -0,0 +1,140 @@
import { ConfigService } from '@nestjs/config';
import { unlink, writeFile } from 'fs/promises';
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
import { ConfigType } from '../config/connect.config.js';
import { ConnectStatusWriterService } from './connect-status-writer.service.js';
vi.mock('fs/promises', () => ({
writeFile: vi.fn(),
unlink: vi.fn(),
}));
describe('ConnectStatusWriterService', () => {
let service: ConnectStatusWriterService;
let configService: ConfigService<ConfigType, true>;
let writeFileMock: ReturnType<typeof vi.fn>;
let unlinkMock: ReturnType<typeof vi.fn>;
beforeEach(async () => {
vi.clearAllMocks();
vi.useFakeTimers();
writeFileMock = vi.mocked(writeFile);
unlinkMock = vi.mocked(unlink);
configService = {
get: vi.fn().mockReturnValue({
status: 'CONNECTED',
error: null,
lastPing: Date.now(),
}),
} as unknown as ConfigService<ConfigType, true>;
service = new ConnectStatusWriterService(configService);
});
afterEach(async () => {
vi.useRealTimers();
});
describe('onApplicationBootstrap', () => {
it('should write initial status on bootstrap', async () => {
await service.onApplicationBootstrap();
expect(writeFileMock).toHaveBeenCalledTimes(1);
expect(writeFileMock).toHaveBeenCalledWith(
'/var/local/emhttp/connectStatus.json',
expect.stringContaining('CONNECTED')
);
});
it('should handle event-driven status changes', async () => {
await service.onApplicationBootstrap();
writeFileMock.mockClear();
// The service uses @OnEvent decorator, so we need to call the method directly
await service['writeStatus']();
expect(writeFileMock).toHaveBeenCalledTimes(1);
});
});
describe('write content', () => {
it('should write correct JSON structure with all fields', async () => {
const mockMetadata = {
status: 'CONNECTED',
error: 'Some error',
lastPing: 1234567890,
};
vi.mocked(configService.get).mockReturnValue(mockMetadata);
await service.onApplicationBootstrap();
const writeCall = writeFileMock.mock.calls[0];
const writtenData = JSON.parse(writeCall[1] as string);
expect(writtenData).toMatchObject({
connectionStatus: 'CONNECTED',
error: 'Some error',
lastPing: 1234567890,
allowedOrigins: '',
});
expect(writtenData.timestamp).toBeDefined();
expect(typeof writtenData.timestamp).toBe('number');
});
it('should handle missing connection metadata', async () => {
vi.mocked(configService.get).mockReturnValue(undefined);
await service.onApplicationBootstrap();
const writeCall = writeFileMock.mock.calls[0];
const writtenData = JSON.parse(writeCall[1] as string);
expect(writtenData).toMatchObject({
connectionStatus: 'PRE_INIT',
error: null,
lastPing: null,
allowedOrigins: '',
});
});
});
describe('error handling', () => {
it('should handle write errors gracefully', async () => {
writeFileMock.mockRejectedValue(new Error('Write failed'));
await expect(service.onApplicationBootstrap()).resolves.not.toThrow();
// Test direct write error handling
await expect(service['writeStatus']()).resolves.not.toThrow();
});
});
describe('cleanup on shutdown', () => {
it('should delete status file on module destroy', async () => {
await service.onModuleDestroy();
expect(unlinkMock).toHaveBeenCalledTimes(1);
expect(unlinkMock).toHaveBeenCalledWith('/var/local/emhttp/connectStatus.json');
});
it('should handle file deletion errors gracefully', async () => {
unlinkMock.mockRejectedValue(new Error('File not found'));
await expect(service.onModuleDestroy()).resolves.not.toThrow();
expect(unlinkMock).toHaveBeenCalledTimes(1);
});
it('should ensure file is deleted even if it was never written', async () => {
// Don't bootstrap (so no file is written)
await service.onModuleDestroy();
expect(unlinkMock).toHaveBeenCalledTimes(1);
expect(unlinkMock).toHaveBeenCalledWith('/var/local/emhttp/connectStatus.json');
});
});
});

View File

@@ -0,0 +1,69 @@
import { Injectable, Logger, OnApplicationBootstrap, OnModuleDestroy } from '@nestjs/common';
import { ConfigService } from '@nestjs/config';
import { OnEvent } from '@nestjs/event-emitter';
import { unlink } from 'fs/promises';
import { writeFile } from 'fs/promises';
import { ConfigType, ConnectionMetadata } from '../config/connect.config.js';
import { EVENTS } from '../helper/nest-tokens.js';
@Injectable()
export class ConnectStatusWriterService implements OnApplicationBootstrap, OnModuleDestroy {
constructor(private readonly configService: ConfigService<ConfigType, true>) {}
private logger = new Logger(ConnectStatusWriterService.name);
get statusFilePath() {
// Write to /var/local/emhttp/connectStatus.json so PHP can read it
return '/var/local/emhttp/connectStatus.json';
}
async onApplicationBootstrap() {
this.logger.verbose(`Status file path: ${this.statusFilePath}`);
// Write initial status
await this.writeStatus();
}
async onModuleDestroy() {
try {
await unlink(this.statusFilePath);
this.logger.verbose(`Status file deleted: ${this.statusFilePath}`);
} catch (error) {
this.logger.debug(`Could not delete status file: ${error}`);
}
}
@OnEvent(EVENTS.MOTHERSHIP_CONNECTION_STATUS_CHANGED, { async: true })
private async writeStatus() {
try {
const connectionMetadata = this.configService.get<ConnectionMetadata>('connect.mothership');
// Try to get allowed origins from the store
let allowedOrigins = '';
try {
// We can't import from @app here, so we'll skip allowed origins for now
// This can be added later if needed
allowedOrigins = '';
} catch (error) {
this.logger.debug('Could not get allowed origins:', error);
}
const statusData = {
connectionStatus: connectionMetadata?.status || 'PRE_INIT',
error: connectionMetadata?.error || null,
lastPing: connectionMetadata?.lastPing || null,
allowedOrigins: allowedOrigins,
timestamp: Date.now(),
};
const data = JSON.stringify(statusData, null, 2);
this.logger.verbose(`Writing connection status: ${data}`);
await writeFile(this.statusFilePath, data);
this.logger.verbose(`Status written to ${this.statusFilePath}`);
} catch (error) {
this.logger.error(error, `Error writing status to '${this.statusFilePath}'`);
}
}
}

View File

@@ -32,7 +32,7 @@ export class MothershipHandler {
const state = this.connectionService.getConnectionState();
if (
state &&
[MinigraphStatus.PING_FAILURE, MinigraphStatus.ERROR_RETRYING].includes(state.status)
[MinigraphStatus.PING_FAILURE].includes(state.status)
) {
this.logger.verbose(
'Mothership connection status changed to %s; setting up mothership subscription',

View File

@@ -3,18 +3,20 @@ import { Module } from '@nestjs/common';
import { ConnectApiKeyService } from '../authn/connect-api-key.service.js';
import { CloudResolver } from '../connection-status/cloud.resolver.js';
import { CloudService } from '../connection-status/cloud.service.js';
import { ConnectStatusWriterService } from '../connection-status/connect-status-writer.service.js';
import { TimeoutCheckerJob } from '../connection-status/timeout-checker.job.js';
import { InternalClientService } from '../internal-rpc/internal.client.js';
import { RemoteAccessModule } from '../remote-access/remote-access.module.js';
import { MothershipConnectionService } from './connection.service.js';
import { MothershipGraphqlClientService } from './graphql.client.js';
import { MothershipSubscriptionHandler } from './mothership-subscription.handler.js';
import { MothershipHandler } from './mothership.events.js';
import { MothershipController } from './mothership.controller.js';
import { MothershipHandler } from './mothership.events.js';
@Module({
imports: [RemoteAccessModule],
providers: [
ConnectStatusWriterService,
ConnectApiKeyService,
MothershipConnectionService,
MothershipGraphqlClientService,

View File

@@ -3,11 +3,11 @@ import { EventEmitter2 } from '@nestjs/event-emitter';
import { Args, Mutation, Query, ResolveField, Resolver } from '@nestjs/graphql';
import { type Layout } from '@jsonforms/core';
import { GraphQLJSON } from 'graphql-scalars';
import { Resource } from '@unraid/shared/graphql.model.js';
import { DataSlice } from '@unraid/shared/jsonforms/settings.js';
import { PrefixedID } from '@unraid/shared/prefixed-id-scalar.js';
import { UsePermissions } from '@unraid/shared/use-permissions.directive.js';
import { GraphQLJSON } from 'graphql-scalars';
import { AuthActionVerb, AuthPossession } from 'nest-authz';
import { EVENTS } from '../helper/nest-tokens.js';

View File

@@ -15,7 +15,7 @@
"commander": "14.0.0",
"create-create-app": "7.3.0",
"fs-extra": "11.3.0",
"inquirer": "12.6.3",
"inquirer": "12.7.0",
"validate-npm-package-name": "6.0.1"
},
"devDependencies": {
@@ -25,7 +25,7 @@
"@nestjs/graphql": "13.1.0",
"@types/fs-extra": "11.0.4",
"@types/inquirer": "9.0.8",
"@types/node": "22.15.32",
"@types/node": "22.16.3",
"@types/validate-npm-package-name": "4.0.2",
"class-transformer": "0.5.1",
"class-validator": "0.14.2",

View File

@@ -31,9 +31,9 @@
"@jsonforms/core": "3.6.0",
"@nestjs/common": "11.1.3",
"@nestjs/graphql": "13.1.0",
"@types/bun": "1.2.16",
"@types/bun": "1.2.18",
"@types/lodash-es": "4.17.12",
"@types/node": "22.15.32",
"@types/node": "22.16.3",
"class-validator": "0.14.2",
"graphql": "16.11.0",
"graphql-scalars": "1.24.2",

View File

@@ -10,6 +10,7 @@ export enum Resource {
ACTIVATION_CODE = 'ACTIVATION_CODE',
API_KEY = 'API_KEY',
ARRAY = 'ARRAY',
BACKUP = 'BACKUP',
CLOUD = 'CLOUD',
CONFIG = 'CONFIG',
CONNECT = 'CONNECT',

View File

@@ -10,6 +10,7 @@ import { cleanupTxzFiles } from "./utils/cleanup";
import { apiDir } from "./utils/paths";
import { getVendorBundleName, getVendorFullPath } from "./build-vendor-store";
import { getAssetUrl } from "./utils/bucket-urls";
import { ensureRclone } from "./utils/rclone-helper";
// Recursively search for manifest files

View File

@@ -1,17 +1,17 @@
{
"name": "@unraid/connect-plugin",
"version": "4.8.0",
"version": "4.9.5",
"private": true,
"dependencies": {
"commander": "14.0.0",
"conventional-changelog": "6.0.0",
"date-fns": "4.1.0",
"glob": "11.0.1",
"glob": "11.0.3",
"html-sloppy-escaper": "0.1.0",
"semver": "7.7.1",
"tsx": "4.19.3",
"zod": "3.24.2",
"zx": "8.3.2"
"semver": "7.7.2",
"tsx": "4.20.3",
"zod": "3.25.76",
"zx": "8.6.2"
},
"type": "module",
"license": "GPL-2.0-or-later",
@@ -37,7 +37,7 @@
"devDependencies": {
"http-server": "14.1.1",
"nodemon": "3.1.10",
"vitest": "3.0.7"
"vitest": "3.2.4"
},
"packageManager": "pnpm@10.12.4"
"packageManager": "pnpm@10.13.1"
}

View File

@@ -138,21 +138,94 @@ exit 0
</INLINE>
</FILE>
<FILE Run="/bin/bash" Method="install">
<INLINE>
<![CDATA[
echo "Patching header logo if necessary..."
# We do this here instead of via API FileModification to avoid undesirable
# rollback when the API is stopped.
#
# This is necessary on < 7.2 because the unraid-header-os-version web component
# that ships with the base OS only displayes the version, not the logo as well.
#
# Rolling back in this case (i.e when stopping the API) yields a duplicate logo
# that blocks interaction with the navigation menu.
# Remove the old header logo from DefaultPageLayout.php if present
if [ -f "/usr/local/emhttp/plugins/dynamix/include/DefaultPageLayout.php" ]; then
sed -i 's|<a href="https://unraid.net" target="_blank"><?readfile("$docroot/webGui/images/UN-logotype-gradient.svg")?></a>||g' "/usr/local/emhttp/plugins/dynamix/include/DefaultPageLayout.php"
# Add unraid-modals element if not already present
if ! grep -q '<unraid-modals>' "/usr/local/emhttp/plugins/dynamix/include/DefaultPageLayout.php"; then
sed -i 's|<body>|<body>\n<unraid-modals></unraid-modals>|' "/usr/local/emhttp/plugins/dynamix/include/DefaultPageLayout.php"
fi
fi
]]>
</INLINE>
</FILE>
<FILE Run="/bin/bash" Method="remove">
<INLINE>
MAINNAME="&name;"
<![CDATA[
echo "Removing Plugin"
# Find any installed dynamix.unraid.net package
pkg_installed=$(ls -1 /var/log/packages/dynamix.unraid.net* 2>/dev/null | head -1)
if [ -n "$pkg_installed" ]; then
pkg_basename=$(basename "$pkg_installed")
echo "Removing package: $pkg_basename"
removepkg --terse "$pkg_basename"
# Check Unraid version
UNRAID_VERSION=""
is_7_2_or_higher=false
# Check if version file exists and is readable
if [ -f "/etc/unraid-version" ] && [ -r "/etc/unraid-version" ]; then
UNRAID_VERSION=$(cat /etc/unraid-version | grep "^version=" | cut -d'"' -f2 2>/dev/null)
if [ -z "$UNRAID_VERSION" ]; then
echo "Warning: Unable to parse version from /etc/unraid-version"
echo "Using safe removal method (plugin file removal + reboot)"
is_7_2_or_higher=true # Default to safe method
else
# Check if this is Unraid 7.2 or higher (including RCs and prereleases)
if [[ "$UNRAID_VERSION" =~ ^7\.([2-9]|[1-9][0-9]+)\. ]] || [[ "$UNRAID_VERSION" =~ ^([8-9]|[1-9][0-9]+)\. ]]; then
is_7_2_or_higher=true
fi
fi
else
echo "No dynamix.unraid.net package found. Trying with basic package name."
removepkg --terse "${MAINNAME}"
echo "Warning: /etc/unraid-version file not found or not readable"
echo "Using safe removal method (plugin file removal + reboot)"
is_7_2_or_higher=true # Default to safe method
fi
if [ "$is_7_2_or_higher" = true ]; then
echo "Unraid 7.2+ detected. Using safe removal method."
# Send notification to user
/usr/local/emhttp/webGui/scripts/notify \
-e "Unraid Connect" \
-s "Reboot Required for Unraid Connect Removal" \
-d "Unraid Connect plugin has been marked for removal. Please reboot your server to complete the uninstallation." \
-i "warning"
# Remove the plugin file so it won't be installed on reboot
PLUGIN_FILE="/boot/config/plugins/${MAINNAME}.plg"
if [ -f "$PLUGIN_FILE" ]; then
echo "Removing plugin file: $PLUGIN_FILE"
rm -f "$PLUGIN_FILE"
fi
echo "Plugin marked for removal. Reboot required to complete uninstallation."
else
# Original removal method for older versions
# Find any installed dynamix.unraid.net package
pkg_installed=$(ls -1 /var/log/packages/dynamix.unraid.net* 2>/dev/null | head -1)
if [ -n "$pkg_installed" ]; then
pkg_basename=$(basename "$pkg_installed")
echo "Removing package: $pkg_basename"
removepkg --terse "$pkg_basename"
else
echo "No dynamix.unraid.net package found. Trying with basic package name."
removepkg --terse "${MAINNAME}"
fi
fi
# File restoration function

View File

@@ -23,9 +23,16 @@ $myservers_flash_cfg_path='/boot/config/plugins/dynamix.my.servers/myservers.cfg
$myservers = file_exists($myservers_flash_cfg_path) ? @parse_ini_file($myservers_flash_cfg_path,true) : [];
$isRegistered = !empty($myservers['remote']['username']);
$myservers_memory_cfg_path ='/var/local/emhttp/myservers.cfg';
$mystatus = (file_exists($myservers_memory_cfg_path)) ? @parse_ini_file($myservers_memory_cfg_path) : [];
$isConnected = (($mystatus['minigraph']??'')==='CONNECTED') ? true : false;
// Read connection status from the new API status file
$statusFilePath = '/var/local/emhttp/connectStatus.json';
$connectionStatus = '';
if (file_exists($statusFilePath)) {
$statusData = @json_decode(file_get_contents($statusFilePath), true);
$connectionStatus = $statusData['connectionStatus'] ?? '';
}
$isConnected = ($connectionStatus === 'CONNECTED') ? true : false;
$flashbackup_ini = '/var/local/emhttp/flashbackup.ini';

View File

@@ -168,9 +168,8 @@ class ServerState
private function getMyServersCfgValues()
{
/**
* @todo can we read this from somewhere other than the flash? Connect page uses this path and /boot/config/plugins/dynamix.my.servers/myservers.cfg
* - $myservers_memory_cfg_path ='/var/local/emhttp/myservers.cfg';
* - $mystatus = (file_exists($myservers_memory_cfg_path)) ? @parse_ini_file($myservers_memory_cfg_path) : [];
* Memory config is now written by the new API to /usr/local/emhttp/state/myservers.cfg
* This contains runtime state including connection status.
*/
$flashCfgPath = '/boot/config/plugins/dynamix.my.servers/myservers.cfg';
$this->myServersFlashCfg = file_exists($flashCfgPath) ? @parse_ini_file($flashCfgPath, true) : [];
@@ -212,11 +211,19 @@ class ServerState
* Include localhost in the test, but only display HTTP(S) URLs that do not include localhost.
*/
$this->host = $_SERVER['HTTP_HOST'] ?? "unknown";
$memoryCfgPath = '/var/local/emhttp/myservers.cfg';
$this->myServersMemoryCfg = (file_exists($memoryCfgPath)) ? @parse_ini_file($memoryCfgPath) : [];
$this->myServersMiniGraphConnected = (($this->myServersMemoryCfg['minigraph'] ?? '') === 'CONNECTED');
// Read connection status and allowed origins from the new API status file
$statusFilePath = '/var/local/emhttp/connectStatus.json';
$connectionStatus = '';
$allowedOrigins = '';
if (file_exists($statusFilePath)) {
$statusData = @json_decode(file_get_contents($statusFilePath), true);
$connectionStatus = $statusData['connectionStatus'] ?? '';
$allowedOrigins = $statusData['allowedOrigins'] ?? '';
}
$this->myServersMiniGraphConnected = ($connectionStatus === 'CONNECTED');
$allowedOrigins = $this->myServersMemoryCfg['allowedOrigins'] ?? "";
$extraOrigins = $this->myServersFlashCfg['api']['extraOrigins'] ?? "";
$combinedOrigins = $allowedOrigins . "," . $extraOrigins; // combine the two strings for easier searching
$combinedOrigins = str_replace(" ", "", $combinedOrigins); // replace any spaces with nothing

Some files were not shown because too many files have changed in this diff Show More