mirror of
https://github.com/unraid/api.git
synced 2026-01-02 06:30:02 -06:00
Compare commits
44 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9ef1cf1eca | ||
|
|
a0745e15ca | ||
|
|
c39b0b267c | ||
|
|
73135b8328 | ||
|
|
e42d619b6d | ||
|
|
560db880cc | ||
|
|
d6055f102b | ||
|
|
d099e7521d | ||
|
|
bb9b539732 | ||
|
|
0e44e73bf7 | ||
|
|
277ac42046 | ||
|
|
e1e3ea7eb6 | ||
|
|
8b155d1f1c | ||
|
|
d13a1f6174 | ||
|
|
e243ae836e | ||
|
|
01a63fd86b | ||
|
|
df78608457 | ||
|
|
ca3bee4ad5 | ||
|
|
024ae69343 | ||
|
|
99ce88bfdc | ||
|
|
73b2ce360c | ||
|
|
d6e29395c8 | ||
|
|
317e0fa307 | ||
|
|
331c913329 | ||
|
|
abf3461348 | ||
|
|
079a09ec90 | ||
|
|
e4223ab5a1 | ||
|
|
6f54206a4a | ||
|
|
e35bcc72f1 | ||
|
|
74df938e45 | ||
|
|
51f025b105 | ||
|
|
23a71207dd | ||
|
|
832e9d04f2 | ||
|
|
31af99e52f | ||
|
|
933cefa020 | ||
|
|
375dcd0598 | ||
|
|
64875edbba | ||
|
|
330e81a484 | ||
|
|
b8f0fdf8d2 | ||
|
|
36c104915e | ||
|
|
dc9a036c73 | ||
|
|
c71b0487ad | ||
|
|
e7340431a5 | ||
|
|
e4a9b8291b |
@@ -241,4 +241,3 @@ const pinia = createTestingPinia({
|
|||||||
- Set initial state for focused testing
|
- Set initial state for focused testing
|
||||||
- Test computed properties by accessing them directly
|
- Test computed properties by accessing them directly
|
||||||
- Verify state changes by updating the store
|
- Verify state changes by updating the store
|
||||||
|
|
||||||
|
|||||||
27
.github/workflows/build-artifacts.yml
vendored
27
.github/workflows/build-artifacts.yml
vendored
@@ -32,13 +32,13 @@ jobs:
|
|||||||
name: Build API
|
name: Build API
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
outputs:
|
outputs:
|
||||||
build_number: ${{ steps.buildnumber.outputs.build_number }}
|
build_number: ${{ steps.buildnumber.outputs.build_number || steps.fallback_buildnumber.outputs.build_number }}
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
working-directory: api
|
working-directory: api
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repo
|
- name: Checkout repo
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
ref: ${{ inputs.ref || github.ref }}
|
ref: ${{ inputs.ref || github.ref }}
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
@@ -49,7 +49,7 @@ jobs:
|
|||||||
run_install: false
|
run_install: false
|
||||||
|
|
||||||
- name: Install Node
|
- name: Install Node
|
||||||
uses: actions/setup-node@v5
|
uses: actions/setup-node@v6
|
||||||
with:
|
with:
|
||||||
node-version-file: ".nvmrc"
|
node-version-file: ".nvmrc"
|
||||||
cache: 'pnpm'
|
cache: 'pnpm'
|
||||||
@@ -81,18 +81,25 @@ jobs:
|
|||||||
|
|
||||||
- name: Generate build number
|
- name: Generate build number
|
||||||
id: buildnumber
|
id: buildnumber
|
||||||
|
if: github.repository == 'unraid/api'
|
||||||
|
continue-on-error: true
|
||||||
uses: onyxmueller/build-tag-number@v1
|
uses: onyxmueller/build-tag-number@v1
|
||||||
with:
|
with:
|
||||||
token: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN || github.token }}
|
token: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN || github.token }}
|
||||||
prefix: ${{ inputs.version_override || steps.vars.outputs.PACKAGE_LOCK_VERSION }}
|
prefix: ${{ inputs.version_override || steps.vars.outputs.PACKAGE_LOCK_VERSION }}
|
||||||
|
|
||||||
|
- name: Generate fallback build number
|
||||||
|
id: fallback_buildnumber
|
||||||
|
if: steps.buildnumber.outcome != 'success'
|
||||||
|
run: echo "build_number=${GITHUB_RUN_NUMBER}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: Build
|
- name: Build
|
||||||
run: |
|
run: |
|
||||||
pnpm run build:release
|
pnpm run build:release
|
||||||
tar -czf deploy/unraid-api.tgz -C deploy/pack/ .
|
tar -czf deploy/unraid-api.tgz -C deploy/pack/ .
|
||||||
|
|
||||||
- name: Upload tgz to Github artifacts
|
- name: Upload tgz to Github artifacts
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v6
|
||||||
with:
|
with:
|
||||||
name: unraid-api
|
name: unraid-api
|
||||||
path: ${{ github.workspace }}/api/deploy/unraid-api.tgz
|
path: ${{ github.workspace }}/api/deploy/unraid-api.tgz
|
||||||
@@ -105,7 +112,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repo
|
- name: Checkout repo
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
ref: ${{ inputs.ref || github.ref }}
|
ref: ${{ inputs.ref || github.ref }}
|
||||||
|
|
||||||
@@ -115,7 +122,7 @@ jobs:
|
|||||||
run_install: false
|
run_install: false
|
||||||
|
|
||||||
- name: Install Node
|
- name: Install Node
|
||||||
uses: actions/setup-node@v5
|
uses: actions/setup-node@v6
|
||||||
with:
|
with:
|
||||||
node-version-file: ".nvmrc"
|
node-version-file: ".nvmrc"
|
||||||
cache: 'pnpm'
|
cache: 'pnpm'
|
||||||
@@ -138,7 +145,7 @@ jobs:
|
|||||||
run: pnpm run build:wc
|
run: pnpm run build:wc
|
||||||
|
|
||||||
- name: Upload Artifact to Github
|
- name: Upload Artifact to Github
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v6
|
||||||
with:
|
with:
|
||||||
name: unraid-wc-ui
|
name: unraid-wc-ui
|
||||||
path: unraid-ui/dist-wc/
|
path: unraid-ui/dist-wc/
|
||||||
@@ -151,7 +158,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repo
|
- name: Checkout repo
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
ref: ${{ inputs.ref || github.ref }}
|
ref: ${{ inputs.ref || github.ref }}
|
||||||
|
|
||||||
@@ -169,7 +176,7 @@ jobs:
|
|||||||
run_install: false
|
run_install: false
|
||||||
|
|
||||||
- name: Install Node
|
- name: Install Node
|
||||||
uses: actions/setup-node@v5
|
uses: actions/setup-node@v6
|
||||||
with:
|
with:
|
||||||
node-version-file: ".nvmrc"
|
node-version-file: ".nvmrc"
|
||||||
cache: 'pnpm'
|
cache: 'pnpm'
|
||||||
@@ -194,7 +201,7 @@ jobs:
|
|||||||
run: pnpm run build
|
run: pnpm run build
|
||||||
|
|
||||||
- name: Upload build to Github artifacts
|
- name: Upload build to Github artifacts
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v6
|
||||||
with:
|
with:
|
||||||
name: unraid-wc-rich
|
name: unraid-wc-rich
|
||||||
path: web/dist
|
path: web/dist
|
||||||
|
|||||||
28
.github/workflows/build-plugin.yml
vendored
28
.github/workflows/build-plugin.yml
vendored
@@ -56,7 +56,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repo
|
- name: Checkout repo
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
ref: ${{ inputs.ref }}
|
ref: ${{ inputs.ref }}
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
@@ -67,7 +67,7 @@ jobs:
|
|||||||
run_install: false
|
run_install: false
|
||||||
|
|
||||||
- name: Install Node
|
- name: Install Node
|
||||||
uses: actions/setup-node@v5
|
uses: actions/setup-node@v6
|
||||||
with:
|
with:
|
||||||
node-version-file: ".nvmrc"
|
node-version-file: ".nvmrc"
|
||||||
cache: 'pnpm'
|
cache: 'pnpm'
|
||||||
@@ -78,7 +78,21 @@ jobs:
|
|||||||
GIT_SHA=$(git rev-parse --short HEAD)
|
GIT_SHA=$(git rev-parse --short HEAD)
|
||||||
IS_TAGGED=$(git describe --tags --abbrev=0 --exact-match || echo '')
|
IS_TAGGED=$(git describe --tags --abbrev=0 --exact-match || echo '')
|
||||||
PACKAGE_LOCK_VERSION=$(jq -r '.version' package.json)
|
PACKAGE_LOCK_VERSION=$(jq -r '.version' package.json)
|
||||||
API_VERSION=$([[ -n "$IS_TAGGED" ]] && echo "$PACKAGE_LOCK_VERSION" || echo "${PACKAGE_LOCK_VERSION}+${GIT_SHA}")
|
|
||||||
|
# For release builds, trust the release tag version to avoid stale checkouts
|
||||||
|
if [ "${{ inputs.RELEASE_CREATED }}" = "true" ] && [ -n "${{ inputs.RELEASE_TAG }}" ]; then
|
||||||
|
TAG_VERSION="${{ inputs.RELEASE_TAG }}"
|
||||||
|
TAG_VERSION="${TAG_VERSION#v}" # trim leading v if present
|
||||||
|
|
||||||
|
if [ "$TAG_VERSION" != "$PACKAGE_LOCK_VERSION" ]; then
|
||||||
|
echo "::warning::Release tag version ($TAG_VERSION) does not match package.json version ($PACKAGE_LOCK_VERSION). Using tag version for TXZ naming."
|
||||||
|
fi
|
||||||
|
|
||||||
|
API_VERSION="$TAG_VERSION"
|
||||||
|
else
|
||||||
|
API_VERSION=$([[ -n "$IS_TAGGED" ]] && echo "$PACKAGE_LOCK_VERSION" || echo "${PACKAGE_LOCK_VERSION}+${GIT_SHA}")
|
||||||
|
fi
|
||||||
|
|
||||||
echo "API_VERSION=${API_VERSION}" >> $GITHUB_OUTPUT
|
echo "API_VERSION=${API_VERSION}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
@@ -87,19 +101,19 @@ jobs:
|
|||||||
pnpm install --frozen-lockfile --filter @unraid/connect-plugin
|
pnpm install --frozen-lockfile --filter @unraid/connect-plugin
|
||||||
|
|
||||||
- name: Download Unraid UI Components
|
- name: Download Unraid UI Components
|
||||||
uses: actions/download-artifact@v5
|
uses: actions/download-artifact@v7
|
||||||
with:
|
with:
|
||||||
name: unraid-wc-ui
|
name: unraid-wc-ui
|
||||||
path: ${{ github.workspace }}/plugin/source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components/uui
|
path: ${{ github.workspace }}/plugin/source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components/uui
|
||||||
merge-multiple: true
|
merge-multiple: true
|
||||||
- name: Download Unraid Web Components
|
- name: Download Unraid Web Components
|
||||||
uses: actions/download-artifact@v5
|
uses: actions/download-artifact@v7
|
||||||
with:
|
with:
|
||||||
pattern: unraid-wc-rich
|
pattern: unraid-wc-rich
|
||||||
path: ${{ github.workspace }}/plugin/source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components/standalone
|
path: ${{ github.workspace }}/plugin/source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components/standalone
|
||||||
merge-multiple: true
|
merge-multiple: true
|
||||||
- name: Download Unraid API
|
- name: Download Unraid API
|
||||||
uses: actions/download-artifact@v5
|
uses: actions/download-artifact@v7
|
||||||
with:
|
with:
|
||||||
name: unraid-api
|
name: unraid-api
|
||||||
path: ${{ github.workspace }}/plugin/api/
|
path: ${{ github.workspace }}/plugin/api/
|
||||||
@@ -128,7 +142,7 @@ jobs:
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Upload to GHA
|
- name: Upload to GHA
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v6
|
||||||
with:
|
with:
|
||||||
name: unraid-plugin-${{ github.run_id }}-${{ inputs.RELEASE_TAG }}
|
name: unraid-plugin-${{ github.run_id }}-${{ inputs.RELEASE_TAG }}
|
||||||
path: plugin/deploy/
|
path: plugin/deploy/
|
||||||
|
|||||||
8
.github/workflows/codeql-analysis.yml
vendored
8
.github/workflows/codeql-analysis.yml
vendored
@@ -24,17 +24,17 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: github/codeql-action/init@v3
|
uses: github/codeql-action/init@v4
|
||||||
with:
|
with:
|
||||||
languages: ${{ matrix.language }}
|
languages: ${{ matrix.language }}
|
||||||
config-file: ./.github/codeql/codeql-config.yml
|
config-file: ./.github/codeql/codeql-config.yml
|
||||||
queries: +security-and-quality
|
queries: +security-and-quality
|
||||||
|
|
||||||
- name: Autobuild
|
- name: Autobuild
|
||||||
uses: github/codeql-action/autobuild@v3
|
uses: github/codeql-action/autobuild@v4
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
- name: Perform CodeQL Analysis
|
||||||
uses: github/codeql-action/analyze@v3
|
uses: github/codeql-action/analyze@v4
|
||||||
4
.github/workflows/deploy-storybook.yml
vendored
4
.github/workflows/deploy-storybook.yml
vendored
@@ -20,7 +20,7 @@ jobs:
|
|||||||
name: Deploy Storybook
|
name: Deploy Storybook
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- uses: pnpm/action-setup@v4
|
- uses: pnpm/action-setup@v4
|
||||||
name: Install pnpm
|
name: Install pnpm
|
||||||
@@ -28,7 +28,7 @@ jobs:
|
|||||||
run_install: false
|
run_install: false
|
||||||
|
|
||||||
- name: Setup Node.js
|
- name: Setup Node.js
|
||||||
uses: actions/setup-node@v5
|
uses: actions/setup-node@v6
|
||||||
with:
|
with:
|
||||||
node-version-file: ".nvmrc"
|
node-version-file: ".nvmrc"
|
||||||
cache: 'pnpm'
|
cache: 'pnpm'
|
||||||
|
|||||||
4
.github/workflows/generate-release-notes.yml
vendored
4
.github/workflows/generate-release-notes.yml
vendored
@@ -31,14 +31,14 @@ jobs:
|
|||||||
release_notes: ${{ steps.generate_notes.outputs.release_notes }}
|
release_notes: ${{ steps.generate_notes.outputs.release_notes }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repo
|
- name: Checkout repo
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
ref: ${{ inputs.target_commitish || github.ref }}
|
ref: ${{ inputs.target_commitish || github.ref }}
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
token: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN }}
|
token: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN }}
|
||||||
|
|
||||||
- name: Setup Node.js
|
- name: Setup Node.js
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v6
|
||||||
with:
|
with:
|
||||||
node-version: '20'
|
node-version: '20'
|
||||||
|
|
||||||
|
|||||||
6
.github/workflows/main.yml
vendored
6
.github/workflows/main.yml
vendored
@@ -23,7 +23,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repo
|
- name: Checkout repo
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
@@ -33,7 +33,7 @@ jobs:
|
|||||||
run_install: false
|
run_install: false
|
||||||
|
|
||||||
- name: Install Node
|
- name: Install Node
|
||||||
uses: actions/setup-node@v5
|
uses: actions/setup-node@v6
|
||||||
with:
|
with:
|
||||||
node-version-file: ".nvmrc"
|
node-version-file: ".nvmrc"
|
||||||
cache: 'pnpm'
|
cache: 'pnpm'
|
||||||
@@ -177,7 +177,7 @@ jobs:
|
|||||||
pull-requests: write
|
pull-requests: write
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
|
|||||||
6
.github/workflows/manual-release.yml
vendored
6
.github/workflows/manual-release.yml
vendored
@@ -31,14 +31,14 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repo
|
- name: Checkout repo
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
ref: ${{ inputs.target_commitish || github.ref }}
|
ref: ${{ inputs.target_commitish || github.ref }}
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
token: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN }}
|
token: ${{ secrets.UNRAID_BOT_GITHUB_ADMIN_TOKEN }}
|
||||||
|
|
||||||
- name: Setup Node.js
|
- name: Setup Node.js
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v6
|
||||||
with:
|
with:
|
||||||
node-version: '20'
|
node-version: '20'
|
||||||
|
|
||||||
@@ -167,7 +167,7 @@ jobs:
|
|||||||
release_notes: ${{ needs.generate-release-notes.outputs.release_notes }}
|
release_notes: ${{ needs.generate-release-notes.outputs.release_notes }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repo
|
- name: Checkout repo
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
ref: ${{ inputs.target_commitish || github.ref }}
|
ref: ${{ inputs.target_commitish || github.ref }}
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|||||||
2
.github/workflows/publish-schema.yml
vendored
2
.github/workflows/publish-schema.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repo
|
- name: Checkout repo
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Install Apollo Rover CLI
|
- name: Install Apollo Rover CLI
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
2
.github/workflows/release-production.yml
vendored
2
.github/workflows/release-production.yml
vendored
@@ -28,7 +28,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
latest: true
|
latest: true
|
||||||
prerelease: false
|
prerelease: false
|
||||||
- uses: actions/setup-node@v5
|
- uses: actions/setup-node@v6
|
||||||
with:
|
with:
|
||||||
node-version: 22.19.0
|
node-version: 22.19.0
|
||||||
- run: |
|
- run: |
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{".":"4.26.2"}
|
{".":"4.29.2"}
|
||||||
|
|||||||
@@ -63,15 +63,6 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
.unapi {
|
.unapi {
|
||||||
--color-alpha: #1c1b1b;
|
|
||||||
--color-beta: #f2f2f2;
|
|
||||||
--color-gamma: #999999;
|
|
||||||
--color-gamma-opaque: rgba(153, 153, 153, 0.5);
|
|
||||||
--color-customgradient-start: rgba(242, 242, 242, 0);
|
|
||||||
--color-customgradient-end: rgba(242, 242, 242, 0.85);
|
|
||||||
--shadow-beta: 0 25px 50px -12px rgba(242, 242, 242, 0.15);
|
|
||||||
--ring-offset-shadow: 0 0 var(--color-beta);
|
|
||||||
--ring-shadow: 0 0 var(--color-beta);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
.unapi button:not(:disabled),
|
.unapi button:not(:disabled),
|
||||||
|
|||||||
@@ -6,92 +6,63 @@
|
|||||||
|
|
||||||
/* Default/White Theme */
|
/* Default/White Theme */
|
||||||
.Theme--white {
|
.Theme--white {
|
||||||
--header-text-primary: #ffffff;
|
|
||||||
--header-text-secondary: #999999;
|
|
||||||
--header-background-color: #1c1b1b;
|
|
||||||
--header-gradient-start: rgba(28, 27, 27, 0);
|
|
||||||
--header-gradient-end: rgba(28, 27, 27, 0.7);
|
|
||||||
--color-border: #383735;
|
--color-border: #383735;
|
||||||
--color-alpha: #ff8c2f;
|
--color-alpha: #ff8c2f;
|
||||||
--color-beta: #1c1b1b;
|
--color-beta: #1c1b1b;
|
||||||
--color-gamma: #ffffff;
|
--color-gamma: #ffffff;
|
||||||
--color-gamma-opaque: rgba(255, 255, 255, 0.3);
|
--color-gamma-opaque: rgba(255, 255, 255, 0.3);
|
||||||
|
--color-header-gradient-start: color-mix(in srgb, var(--header-background-color) 0%, transparent);
|
||||||
|
--color-header-gradient-end: color-mix(in srgb, var(--header-background-color) 100%, transparent);
|
||||||
|
--shadow-beta: 0 25px 50px -12px color-mix(in srgb, var(--color-beta) 15%, transparent);
|
||||||
|
--ring-offset-shadow: 0 0 var(--color-beta);
|
||||||
|
--ring-shadow: 0 0 var(--color-beta);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Black Theme */
|
/* Black Theme */
|
||||||
.Theme--black,
|
.Theme--black,
|
||||||
.Theme--black.dark {
|
.Theme--black.dark {
|
||||||
--header-text-primary: #1c1b1b;
|
|
||||||
--header-text-secondary: #999999;
|
|
||||||
--header-background-color: #f2f2f2;
|
|
||||||
--header-gradient-start: rgba(242, 242, 242, 0);
|
|
||||||
--header-gradient-end: rgba(242, 242, 242, 0.7);
|
|
||||||
--color-border: #e0e0e0;
|
--color-border: #e0e0e0;
|
||||||
--color-alpha: #ff8c2f;
|
--color-alpha: #ff8c2f;
|
||||||
--color-beta: #f2f2f2;
|
--color-beta: #f2f2f2;
|
||||||
--color-gamma: #1c1b1b;
|
--color-gamma: #1c1b1b;
|
||||||
--color-gamma-opaque: rgba(28, 27, 27, 0.3);
|
--color-gamma-opaque: rgba(28, 27, 27, 0.3);
|
||||||
|
--color-header-gradient-start: color-mix(in srgb, var(--header-background-color) 0%, transparent);
|
||||||
|
--color-header-gradient-end: color-mix(in srgb, var(--header-background-color) 100%, transparent);
|
||||||
|
--shadow-beta: 0 25px 50px -12px color-mix(in srgb, var(--color-beta) 15%, transparent);
|
||||||
|
--ring-offset-shadow: 0 0 var(--color-beta);
|
||||||
|
--ring-shadow: 0 0 var(--color-beta);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Gray Theme */
|
/* Gray Theme */
|
||||||
.Theme--gray {
|
.Theme--gray,
|
||||||
--header-text-primary: #ffffff;
|
.Theme--gray.dark {
|
||||||
--header-text-secondary: #999999;
|
|
||||||
--header-background-color: #1c1b1b;
|
|
||||||
--header-gradient-start: rgba(28, 27, 27, 0);
|
|
||||||
--header-gradient-end: rgba(28, 27, 27, 0.7);
|
|
||||||
--color-border: #383735;
|
--color-border: #383735;
|
||||||
--color-alpha: #ff8c2f;
|
--color-alpha: #ff8c2f;
|
||||||
--color-beta: #383735;
|
--color-beta: #383735;
|
||||||
--color-gamma: #ffffff;
|
--color-gamma: #ffffff;
|
||||||
--color-gamma-opaque: rgba(255, 255, 255, 0.3);
|
--color-gamma-opaque: rgba(255, 255, 255, 0.3);
|
||||||
|
--color-header-gradient-start: color-mix(in srgb, var(--header-background-color) 0%, transparent);
|
||||||
|
--color-header-gradient-end: color-mix(in srgb, var(--header-background-color) 100%, transparent);
|
||||||
|
--shadow-beta: 0 25px 50px -12px color-mix(in srgb, var(--color-beta) 15%, transparent);
|
||||||
|
--ring-offset-shadow: 0 0 var(--color-beta);
|
||||||
|
--ring-shadow: 0 0 var(--color-beta);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Azure Theme */
|
/* Azure Theme */
|
||||||
.Theme--azure {
|
.Theme--azure {
|
||||||
--header-text-primary: #1c1b1b;
|
|
||||||
--header-text-secondary: #999999;
|
|
||||||
--header-background-color: #f2f2f2;
|
|
||||||
--header-gradient-start: rgba(242, 242, 242, 0);
|
|
||||||
--header-gradient-end: rgba(242, 242, 242, 0.7);
|
|
||||||
--color-border: #5a8bb8;
|
--color-border: #5a8bb8;
|
||||||
--color-alpha: #ff8c2f;
|
--color-alpha: #ff8c2f;
|
||||||
--color-beta: #e7f2f8;
|
--color-beta: #e7f2f8;
|
||||||
--color-gamma: #336699;
|
--color-gamma: #336699;
|
||||||
--color-gamma-opaque: rgba(51, 102, 153, 0.3);
|
--color-gamma-opaque: rgba(51, 102, 153, 0.3);
|
||||||
|
--color-header-gradient-start: color-mix(in srgb, var(--header-background-color) 0%, transparent);
|
||||||
|
--color-header-gradient-end: color-mix(in srgb, var(--header-background-color) 100%, transparent);
|
||||||
|
--shadow-beta: 0 25px 50px -12px color-mix(in srgb, var(--color-beta) 15%, transparent);
|
||||||
|
--ring-offset-shadow: 0 0 var(--color-beta);
|
||||||
|
--ring-shadow: 0 0 var(--color-beta);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Dark Mode Overrides */
|
/* Dark Mode Overrides */
|
||||||
.dark {
|
.dark {
|
||||||
--color-border: #383735;
|
--color-border: #383735;
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
|
||||||
* Dynamic color variables for user overrides from GraphQL
|
|
||||||
* These are set via JavaScript and override the theme defaults
|
|
||||||
* Using :root with class for higher specificity to override theme classes
|
|
||||||
*/
|
|
||||||
:root.has-custom-header-text {
|
|
||||||
--header-text-primary: var(--custom-header-text-primary);
|
|
||||||
--color-header-text-primary: var(--custom-header-text-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
:root.has-custom-header-meta {
|
|
||||||
--header-text-secondary: var(--custom-header-text-secondary);
|
|
||||||
--color-header-text-secondary: var(--custom-header-text-secondary);
|
|
||||||
}
|
|
||||||
|
|
||||||
:root.has-custom-header-bg,
|
|
||||||
.has-custom-header-bg.Theme--black,
|
|
||||||
.has-custom-header-bg.Theme--black.dark,
|
|
||||||
.has-custom-header-bg.Theme--white,
|
|
||||||
.has-custom-header-bg.Theme--white.dark,
|
|
||||||
.has-custom-header-bg.Theme--gray,
|
|
||||||
.has-custom-header-bg.Theme--azure {
|
|
||||||
--header-background-color: var(--custom-header-background-color);
|
|
||||||
--color-header-background: var(--custom-header-background-color);
|
|
||||||
--header-gradient-start: var(--custom-header-gradient-start);
|
|
||||||
--header-gradient-end: var(--custom-header-gradient-end);
|
|
||||||
--color-header-gradient-start: var(--custom-header-gradient-start);
|
|
||||||
--color-header-gradient-end: var(--custom-header-gradient-end);
|
|
||||||
}
|
|
||||||
@@ -19,6 +19,7 @@ PATHS_LOGS_FILE=./dev/log/graphql-api.log
|
|||||||
PATHS_CONNECT_STATUS_FILE_PATH=./dev/connectStatus.json # Connect plugin status file
|
PATHS_CONNECT_STATUS_FILE_PATH=./dev/connectStatus.json # Connect plugin status file
|
||||||
PATHS_OIDC_JSON=./dev/configs/oidc.local.json
|
PATHS_OIDC_JSON=./dev/configs/oidc.local.json
|
||||||
PATHS_LOCAL_SESSION_FILE=./dev/local-session
|
PATHS_LOCAL_SESSION_FILE=./dev/local-session
|
||||||
|
PATHS_DOCKER_TEMPLATES=./dev/docker-templates
|
||||||
ENVIRONMENT="development"
|
ENVIRONMENT="development"
|
||||||
NODE_ENV="development"
|
NODE_ENV="development"
|
||||||
PORT="3001"
|
PORT="3001"
|
||||||
|
|||||||
@@ -3,3 +3,4 @@ NODE_ENV="production"
|
|||||||
PORT="/var/run/unraid-api.sock"
|
PORT="/var/run/unraid-api.sock"
|
||||||
MOTHERSHIP_GRAPHQL_LINK="https://mothership.unraid.net/ws"
|
MOTHERSHIP_GRAPHQL_LINK="https://mothership.unraid.net/ws"
|
||||||
PATHS_CONFIG_MODULES="/boot/config/plugins/dynamix.my.servers/configs"
|
PATHS_CONFIG_MODULES="/boot/config/plugins/dynamix.my.servers/configs"
|
||||||
|
ENABLE_NEXT_DOCKER_RELEASE=true
|
||||||
|
|||||||
@@ -3,3 +3,4 @@ NODE_ENV="production"
|
|||||||
PORT="/var/run/unraid-api.sock"
|
PORT="/var/run/unraid-api.sock"
|
||||||
MOTHERSHIP_GRAPHQL_LINK="https://staging.mothership.unraid.net/ws"
|
MOTHERSHIP_GRAPHQL_LINK="https://staging.mothership.unraid.net/ws"
|
||||||
PATHS_CONFIG_MODULES="/boot/config/plugins/dynamix.my.servers/configs"
|
PATHS_CONFIG_MODULES="/boot/config/plugins/dynamix.my.servers/configs"
|
||||||
|
ENABLE_NEXT_DOCKER_RELEASE=true
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ export default tseslint.config(
|
|||||||
eslint.configs.recommended,
|
eslint.configs.recommended,
|
||||||
...tseslint.configs.recommended,
|
...tseslint.configs.recommended,
|
||||||
{
|
{
|
||||||
ignores: ['src/graphql/generated/client/**/*', 'src/**/**/dummy-process.js'],
|
ignores: ['src/graphql/generated/client/**/*', 'src/**/**/dummy-process.js', 'dist/**/*'],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
plugins: {
|
plugins: {
|
||||||
|
|||||||
6
api/.gitignore
vendored
6
api/.gitignore
vendored
@@ -83,6 +83,8 @@ deploy/*
|
|||||||
|
|
||||||
!**/*.login.*
|
!**/*.login.*
|
||||||
|
|
||||||
|
# Local Development Artifacts
|
||||||
|
|
||||||
# local api configs - don't need project-wide tracking
|
# local api configs - don't need project-wide tracking
|
||||||
dev/connectStatus.json
|
dev/connectStatus.json
|
||||||
dev/configs/*
|
dev/configs/*
|
||||||
@@ -96,3 +98,7 @@ dev/configs/oidc.local.json
|
|||||||
|
|
||||||
# local api keys
|
# local api keys
|
||||||
dev/keys/*
|
dev/keys/*
|
||||||
|
# mock docker templates
|
||||||
|
dev/docker-templates
|
||||||
|
# ie unraid notifications
|
||||||
|
dev/notifications
|
||||||
@@ -5,3 +5,4 @@ src/unraid-api/unraid-file-modifier/modifications/__fixtures__/downloaded/*
|
|||||||
|
|
||||||
# Generated Types
|
# Generated Types
|
||||||
src/graphql/generated/client/*.ts
|
src/graphql/generated/client/*.ts
|
||||||
|
dist/
|
||||||
|
|||||||
@@ -1,5 +1,92 @@
|
|||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
|
## [4.29.2](https://github.com/unraid/api/compare/v4.29.1...v4.29.2) (2025-12-19)
|
||||||
|
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
* unraid-connect plugin not loaded when connect is installed ([#1856](https://github.com/unraid/api/issues/1856)) ([73135b8](https://github.com/unraid/api/commit/73135b832801f5c76d60020161492e4770958c3d))
|
||||||
|
|
||||||
|
## [4.29.1](https://github.com/unraid/api/compare/v4.29.0...v4.29.1) (2025-12-19)
|
||||||
|
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
* revert replace docker overview table with web component (7.3+) ([#1853](https://github.com/unraid/api/issues/1853)) ([560db88](https://github.com/unraid/api/commit/560db880cc138324f9ff8753f7209b683a84c045))
|
||||||
|
|
||||||
|
## [4.29.0](https://github.com/unraid/api/compare/v4.28.2...v4.29.0) (2025-12-19)
|
||||||
|
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
* replace docker overview table with web component (7.3+) ([#1764](https://github.com/unraid/api/issues/1764)) ([277ac42](https://github.com/unraid/api/commit/277ac420464379e7ee6739c4530271caf7717503))
|
||||||
|
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
* handle race condition between guid loading and license check ([#1847](https://github.com/unraid/api/issues/1847)) ([8b155d1](https://github.com/unraid/api/commit/8b155d1f1c99bb19efbc9614e000d852e9f0c12d))
|
||||||
|
* resolve issue with "Continue" button when updating ([#1852](https://github.com/unraid/api/issues/1852)) ([d099e75](https://github.com/unraid/api/commit/d099e7521d2062bb9cf84f340e46b169dd2492c5))
|
||||||
|
* update myservers config references to connect config references ([#1810](https://github.com/unraid/api/issues/1810)) ([e1e3ea7](https://github.com/unraid/api/commit/e1e3ea7eb68cc6840f67a8aec937fd3740e75b28))
|
||||||
|
|
||||||
|
## [4.28.2](https://github.com/unraid/api/compare/v4.28.1...v4.28.2) (2025-12-16)
|
||||||
|
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
* **api:** timeout on startup on 7.0 and 6.12 ([#1844](https://github.com/unraid/api/issues/1844)) ([e243ae8](https://github.com/unraid/api/commit/e243ae836ec1a7fde37dceeb106cc693b20ec82b))
|
||||||
|
|
||||||
|
## [4.28.1](https://github.com/unraid/api/compare/v4.28.0...v4.28.1) (2025-12-16)
|
||||||
|
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
* empty commit to release as 4.28.1 ([df78608](https://github.com/unraid/api/commit/df786084572eefb82e086c15939b50cc08b9db10))
|
||||||
|
|
||||||
|
## [4.28.0](https://github.com/unraid/api/compare/v4.27.2...v4.28.0) (2025-12-15)
|
||||||
|
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
* when cancelling OS upgrade, delete any plugin files that were d… ([#1823](https://github.com/unraid/api/issues/1823)) ([74df938](https://github.com/unraid/api/commit/74df938e450def2ee3e2864d4b928f53a68e9eb8))
|
||||||
|
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
* change keyfile watcher to poll instead of inotify on FAT32 ([#1820](https://github.com/unraid/api/issues/1820)) ([23a7120](https://github.com/unraid/api/commit/23a71207ddde221867562b722f4e65a5fc4dd744))
|
||||||
|
* enhance dark mode support in theme handling ([#1808](https://github.com/unraid/api/issues/1808)) ([d6e2939](https://github.com/unraid/api/commit/d6e29395c8a8b0215d4f5945775de7fa358d06ec))
|
||||||
|
* improve API startup reliability with timeout budget tracking ([#1824](https://github.com/unraid/api/issues/1824)) ([51f025b](https://github.com/unraid/api/commit/51f025b105487b178048afaabf46b260c4a7f9c1))
|
||||||
|
* PHP Warnings in Management Settings ([#1805](https://github.com/unraid/api/issues/1805)) ([832e9d0](https://github.com/unraid/api/commit/832e9d04f207d3ec612c98500a2ffc86659264e5))
|
||||||
|
* **plg:** explicitly stop an existing api before installation ([#1841](https://github.com/unraid/api/issues/1841)) ([99ce88b](https://github.com/unraid/api/commit/99ce88bfdc0a7f020c42f2fe0c6a0f4e32ac8f5a))
|
||||||
|
* update @unraid/shared-callbacks to version 3.0.0 ([#1831](https://github.com/unraid/api/issues/1831)) ([73b2ce3](https://github.com/unraid/api/commit/73b2ce360c66cd9bedc138a5f8306af04b6bde77))
|
||||||
|
* **ups:** convert estimatedRuntime from minutes to seconds ([#1822](https://github.com/unraid/api/issues/1822)) ([024ae69](https://github.com/unraid/api/commit/024ae69343bad5a3cbc19f80e357082e9b2efc1e))
|
||||||
|
|
||||||
|
## [4.27.2](https://github.com/unraid/api/compare/v4.27.1...v4.27.2) (2025-11-21)
|
||||||
|
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
* issue with header flashing + issue with trial date ([64875ed](https://github.com/unraid/api/commit/64875edbba786a0d1ba0113c9e9a3d38594eafcc))
|
||||||
|
|
||||||
|
## [4.27.1](https://github.com/unraid/api/compare/v4.27.0...v4.27.1) (2025-11-21)
|
||||||
|
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
* missing translations for expiring trials ([#1800](https://github.com/unraid/api/issues/1800)) ([36c1049](https://github.com/unraid/api/commit/36c104915ece203a3cac9e1a13e0c325e536a839))
|
||||||
|
* resolve header flash when background color is set ([#1796](https://github.com/unraid/api/issues/1796)) ([dc9a036](https://github.com/unraid/api/commit/dc9a036c73d8ba110029364e0d044dc24c7d0dfa))
|
||||||
|
|
||||||
|
## [4.27.0](https://github.com/unraid/api/compare/v4.26.2...v4.27.0) (2025-11-19)
|
||||||
|
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
* remove Unraid API log download functionality ([#1793](https://github.com/unraid/api/issues/1793)) ([e4a9b82](https://github.com/unraid/api/commit/e4a9b8291b049752a9ff59b17ff50cf464fe0535))
|
||||||
|
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
* auto-uninstallation of connect api plugin ([#1791](https://github.com/unraid/api/issues/1791)) ([e734043](https://github.com/unraid/api/commit/e7340431a58821ec1b4f5d1b452fba6613b01fa5))
|
||||||
|
|
||||||
## [4.26.2](https://github.com/unraid/api/compare/v4.26.1...v4.26.2) (2025-11-19)
|
## [4.26.2](https://github.com/unraid/api/compare/v4.26.1...v4.26.2) (2025-11-19)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -75,6 +75,16 @@ If you found this file you're likely a developer. If you'd like to know more abo
|
|||||||
|
|
||||||
- Run `pnpm --filter @unraid/api i18n:extract` to scan the Nest.js source for translation helper usages and update `src/i18n/en.json` with any new keys. The extractor keeps existing translations intact and appends new keys with their English source text.
|
- Run `pnpm --filter @unraid/api i18n:extract` to scan the Nest.js source for translation helper usages and update `src/i18n/en.json` with any new keys. The extractor keeps existing translations intact and appends new keys with their English source text.
|
||||||
|
|
||||||
|
## Developer Documentation
|
||||||
|
|
||||||
|
For detailed information about specific features:
|
||||||
|
|
||||||
|
- [API Plugins](docs/developer/api-plugins.md) - Working with API plugins and workspace packages
|
||||||
|
- [Docker Feature](docs/developer/docker.md) - Container management, GraphQL API, and WebGUI integration
|
||||||
|
- [Feature Flags](docs/developer/feature-flags.md) - Conditionally enabling functionality
|
||||||
|
- [Repository Organization](docs/developer/repo-organization.md) - Codebase structure
|
||||||
|
- [Development Workflows](docs/developer/workflows.md) - Development processes
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
Copyright Lime Technology Inc. All rights reserved.
|
Copyright Lime Technology Inc. All rights reserved.
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
{
|
{
|
||||||
"version": "4.25.3",
|
"version": "4.29.2",
|
||||||
"extraOrigins": [],
|
"extraOrigins": [],
|
||||||
"sandbox": true,
|
"sandbox": true,
|
||||||
"ssoSubIds": [],
|
"ssoSubIds": [],
|
||||||
|
|||||||
555
api/docs/developer/docker.md
Normal file
555
api/docs/developer/docker.md
Normal file
@@ -0,0 +1,555 @@
|
|||||||
|
# Docker Feature
|
||||||
|
|
||||||
|
The Docker feature provides complete container management for Unraid through a GraphQL API, including lifecycle operations, real-time monitoring, update detection, and organizational tools.
|
||||||
|
|
||||||
|
## Table of Contents
|
||||||
|
|
||||||
|
- [Overview](#overview)
|
||||||
|
- [Architecture](#architecture)
|
||||||
|
- [Module Structure](#module-structure)
|
||||||
|
- [Data Flow](#data-flow)
|
||||||
|
- [Core Services](#core-services)
|
||||||
|
- [DockerService](#dockerservice)
|
||||||
|
- [DockerNetworkService](#dockernetworkservice)
|
||||||
|
- [DockerPortService](#dockerportservice)
|
||||||
|
- [DockerLogService](#dockerlogservice)
|
||||||
|
- [DockerStatsService](#dockerstatsservice)
|
||||||
|
- [DockerAutostartService](#dockerautostartservice)
|
||||||
|
- [DockerConfigService](#dockerconfigservice)
|
||||||
|
- [DockerManifestService](#dockermanifestservice)
|
||||||
|
- [DockerPhpService](#dockerphpservice)
|
||||||
|
- [DockerTailscaleService](#dockertailscaleservice)
|
||||||
|
- [DockerTemplateScannerService](#dockertemplatescannerservice)
|
||||||
|
- [DockerOrganizerService](#dockerorganizerservice)
|
||||||
|
- [GraphQL API](#graphql-api)
|
||||||
|
- [Queries](#queries)
|
||||||
|
- [Mutations](#mutations)
|
||||||
|
- [Subscriptions](#subscriptions)
|
||||||
|
- [Data Models](#data-models)
|
||||||
|
- [DockerContainer](#dockercontainer)
|
||||||
|
- [ContainerState](#containerstate)
|
||||||
|
- [ContainerPort](#containerport)
|
||||||
|
- [DockerPortConflicts](#dockerportconflicts)
|
||||||
|
- [Caching Strategy](#caching-strategy)
|
||||||
|
- [WebGUI Integration](#webgui-integration)
|
||||||
|
- [File Modification](#file-modification)
|
||||||
|
- [PHP Integration](#php-integration)
|
||||||
|
- [Permissions](#permissions)
|
||||||
|
- [Configuration Files](#configuration-files)
|
||||||
|
- [Development](#development)
|
||||||
|
- [Adding a New Docker Service](#adding-a-new-docker-service)
|
||||||
|
- [Testing](#testing)
|
||||||
|
- [Feature Flag Testing](#feature-flag-testing)
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
**Location:** `src/unraid-api/graph/resolvers/docker/`
|
||||||
|
|
||||||
|
**Feature Flag:** Many next-generation features are gated behind `ENABLE_NEXT_DOCKER_RELEASE`. See [Feature Flags](./feature-flags.md) for details on enabling.
|
||||||
|
|
||||||
|
**Key Capabilities:**
|
||||||
|
|
||||||
|
- Container lifecycle management (start, stop, pause, update, remove)
|
||||||
|
- Real-time container stats streaming
|
||||||
|
- Network and port conflict detection
|
||||||
|
- Container log retrieval
|
||||||
|
- Automatic update detection via digest comparison
|
||||||
|
- Tailscale container integration
|
||||||
|
- Container organization with folders and views
|
||||||
|
- Template-based metadata resolution
|
||||||
|
|
||||||
|
## Architecture
|
||||||
|
|
||||||
|
### Module Structure
|
||||||
|
|
||||||
|
The Docker module (`docker.module.ts`) serves as the entry point and exports:
|
||||||
|
|
||||||
|
- **13 services** for various Docker operations
|
||||||
|
- **3 resolvers** for GraphQL query/mutation/subscription handling
|
||||||
|
|
||||||
|
**Dependencies:**
|
||||||
|
|
||||||
|
- `JobModule` - Background job scheduling
|
||||||
|
- `NotificationsModule` - User notifications
|
||||||
|
- `ServicesModule` - Shared service utilities
|
||||||
|
|
||||||
|
### Data Flow
|
||||||
|
|
||||||
|
```text
|
||||||
|
Docker Daemon (Unix Socket)
|
||||||
|
↓
|
||||||
|
dockerode library
|
||||||
|
↓
|
||||||
|
DockerService (transform & cache)
|
||||||
|
↓
|
||||||
|
GraphQL Resolvers
|
||||||
|
↓
|
||||||
|
Client Applications
|
||||||
|
```
|
||||||
|
|
||||||
|
The API communicates with the Docker daemon through the `dockerode` library via Unix socket. Container data is transformed from raw Docker API format to GraphQL types, enriched with Unraid-specific metadata (templates, autostart config), and cached for performance.
|
||||||
|
|
||||||
|
## Core Services
|
||||||
|
|
||||||
|
### DockerService
|
||||||
|
|
||||||
|
**File:** `docker.service.ts`
|
||||||
|
|
||||||
|
Central orchestrator for all container operations.
|
||||||
|
|
||||||
|
**Key Methods:**
|
||||||
|
|
||||||
|
- `getContainers(skipCache?, includeSize?)` - List containers with caching
|
||||||
|
- `start(id)`, `stop(id)`, `pause(id)`, `unpause(id)` - Lifecycle operations
|
||||||
|
- `updateContainer(id)`, `updateContainers(ids)`, `updateAllContainers()` - Image updates
|
||||||
|
- `removeContainer(id, withImage?)` - Remove container and optionally its image
|
||||||
|
|
||||||
|
**Caching:**
|
||||||
|
|
||||||
|
- Cache TTL: 60 seconds (60000ms)
|
||||||
|
- Cache keys: `docker_containers`, `docker_containers_with_size`
|
||||||
|
- Invalidated automatically on mutations
|
||||||
|
|
||||||
|
### DockerNetworkService
|
||||||
|
|
||||||
|
**File:** `docker-network.service.ts`
|
||||||
|
|
||||||
|
Lists Docker networks with metadata including driver, scope, IPAM settings, and connected containers.
|
||||||
|
|
||||||
|
**Caching:** 60 seconds
|
||||||
|
|
||||||
|
### DockerPortService
|
||||||
|
|
||||||
|
**File:** `docker-port.service.ts`
|
||||||
|
|
||||||
|
Detects port conflicts between containers and with the host.
|
||||||
|
|
||||||
|
**Features:**
|
||||||
|
|
||||||
|
- Deduplicates port mappings from Docker API
|
||||||
|
- Identifies container-to-container conflicts
|
||||||
|
- Detects host-level port collisions
|
||||||
|
- Separates TCP and UDP conflicts
|
||||||
|
- Calculates LAN-accessible IP:port combinations
|
||||||
|
|
||||||
|
### DockerLogService
|
||||||
|
|
||||||
|
**File:** `docker-log.service.ts`
|
||||||
|
|
||||||
|
Retrieves container logs with configurable options.
|
||||||
|
|
||||||
|
**Parameters:**
|
||||||
|
|
||||||
|
- `tail` - Number of lines (default: 200, max: 2000)
|
||||||
|
- `since` - Timestamp filter for log entries
|
||||||
|
|
||||||
|
**Additional Features:**
|
||||||
|
|
||||||
|
- Calculates container log file sizes
|
||||||
|
- Supports timestamp-based filtering
|
||||||
|
|
||||||
|
### DockerStatsService
|
||||||
|
|
||||||
|
**File:** `docker-stats.service.ts`
|
||||||
|
|
||||||
|
Provides real-time container statistics via GraphQL subscription.
|
||||||
|
|
||||||
|
**Metrics:**
|
||||||
|
|
||||||
|
- CPU percentage
|
||||||
|
- Memory usage and limit
|
||||||
|
- Network I/O (received/transmitted bytes)
|
||||||
|
- Block I/O (read/written bytes)
|
||||||
|
|
||||||
|
**Implementation:**
|
||||||
|
|
||||||
|
- Spawns `docker stats` process with streaming output
|
||||||
|
- Publishes to `PUBSUB_CHANNEL.DOCKER_STATS`
|
||||||
|
- Auto-starts on first subscriber, stops when last disconnects
|
||||||
|
|
||||||
|
### DockerAutostartService
|
||||||
|
|
||||||
|
**File:** `docker-autostart.service.ts`
|
||||||
|
|
||||||
|
Manages container auto-start configuration.
|
||||||
|
|
||||||
|
**Features:**
|
||||||
|
|
||||||
|
- Parses auto-start file format (name + wait time per line)
|
||||||
|
- Maintains auto-start order and wait times
|
||||||
|
- Persists configuration changes
|
||||||
|
- Tracks container primary names
|
||||||
|
|
||||||
|
### DockerConfigService
|
||||||
|
|
||||||
|
**File:** `docker-config.service.ts`
|
||||||
|
|
||||||
|
Persistent configuration management using `ConfigFilePersister`.
|
||||||
|
|
||||||
|
**Configuration Options:**
|
||||||
|
|
||||||
|
- `templateMappings` - Container name to template file path mappings
|
||||||
|
- `skipTemplatePaths` - Containers excluded from template scanning
|
||||||
|
- `updateCheckCronSchedule` - Cron expression for digest refresh (default: daily at 6am)
|
||||||
|
|
||||||
|
### DockerManifestService
|
||||||
|
|
||||||
|
**File:** `docker-manifest.service.ts`
|
||||||
|
|
||||||
|
Detects available container image updates.
|
||||||
|
|
||||||
|
**Implementation:**
|
||||||
|
|
||||||
|
- Compares local and remote image SHA256 digests
|
||||||
|
- Reads cached status from `/var/lib/docker/unraid-update-status.json`
|
||||||
|
- Triggers refresh via PHP integration
|
||||||
|
|
||||||
|
### DockerPhpService
|
||||||
|
|
||||||
|
**File:** `docker-php.service.ts`
|
||||||
|
|
||||||
|
Integration with legacy Unraid PHP Docker scripts.
|
||||||
|
|
||||||
|
**PHP Scripts Used:**
|
||||||
|
|
||||||
|
- `DockerUpdate.php` - Refresh container digests
|
||||||
|
- `DockerContainers.php` - Get update statuses
|
||||||
|
|
||||||
|
**Update Statuses:**
|
||||||
|
|
||||||
|
- `UP_TO_DATE` - Container is current
|
||||||
|
- `UPDATE_AVAILABLE` - New image available
|
||||||
|
- `REBUILD_READY` - Rebuild required
|
||||||
|
- `UNKNOWN` - Status could not be determined
|
||||||
|
|
||||||
|
### DockerTailscaleService
|
||||||
|
|
||||||
|
**File:** `docker-tailscale.service.ts`
|
||||||
|
|
||||||
|
Detects and monitors Tailscale-enabled containers.
|
||||||
|
|
||||||
|
**Detection Methods:**
|
||||||
|
|
||||||
|
- Container labels indicating Tailscale
|
||||||
|
- Tailscale socket mount points
|
||||||
|
|
||||||
|
**Status Information:**
|
||||||
|
|
||||||
|
- Tailscale version and backend state
|
||||||
|
- Hostname and DNS name
|
||||||
|
- Exit node status
|
||||||
|
- Key expiry dates
|
||||||
|
|
||||||
|
**Caching:**
|
||||||
|
|
||||||
|
- Status cache: 30 seconds
|
||||||
|
- DERP map and versions: 24 hours
|
||||||
|
|
||||||
|
### DockerTemplateScannerService
|
||||||
|
|
||||||
|
**File:** `docker-template-scanner.service.ts`
|
||||||
|
|
||||||
|
Maps containers to their template files for metadata resolution.
|
||||||
|
|
||||||
|
**Bootstrap Process:**
|
||||||
|
|
||||||
|
1. Runs 5 seconds after app startup
|
||||||
|
2. Scans XML templates from configured paths
|
||||||
|
3. Parses container/image names from XML
|
||||||
|
4. Matches against running containers
|
||||||
|
5. Stores mappings in `docker.config.json`
|
||||||
|
|
||||||
|
**Template Metadata Resolved:**
|
||||||
|
|
||||||
|
- `projectUrl`, `registryUrl`, `supportUrl`
|
||||||
|
- `iconUrl`, `webUiUrl`, `shell`
|
||||||
|
- Template port mappings
|
||||||
|
|
||||||
|
**Orphaned Containers:**
|
||||||
|
|
||||||
|
Containers without matching templates are marked as "orphaned" in the API response.
|
||||||
|
|
||||||
|
### DockerOrganizerService
|
||||||
|
|
||||||
|
**File:** `organizer/docker-organizer.service.ts`
|
||||||
|
|
||||||
|
Container organization system for UI views.
|
||||||
|
|
||||||
|
**Features:**
|
||||||
|
|
||||||
|
- Hierarchical folder structure
|
||||||
|
- Multiple views with different layouts
|
||||||
|
- Position-based organization
|
||||||
|
- View-specific preferences (sorting, filtering)
|
||||||
|
|
||||||
|
## GraphQL API
|
||||||
|
|
||||||
|
### Queries
|
||||||
|
|
||||||
|
```graphql
|
||||||
|
type Query {
|
||||||
|
docker: Docker!
|
||||||
|
}
|
||||||
|
|
||||||
|
type Docker {
|
||||||
|
containers(skipCache: Boolean): [DockerContainer!]!
|
||||||
|
container(id: PrefixedID!): DockerContainer # Feature-flagged
|
||||||
|
networks(skipCache: Boolean): [DockerNetwork!]!
|
||||||
|
portConflicts(skipCache: Boolean): DockerPortConflicts!
|
||||||
|
logs(id: PrefixedID!, since: Int, tail: Int): DockerContainerLogs!
|
||||||
|
organizer(skipCache: Boolean): DockerOrganizer! # Feature-flagged
|
||||||
|
containerUpdateStatuses: [ContainerUpdateStatus!]! # Feature-flagged
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Mutations
|
||||||
|
|
||||||
|
**Container Lifecycle:**
|
||||||
|
|
||||||
|
```graphql
|
||||||
|
type Mutation {
|
||||||
|
start(id: PrefixedID!): DockerContainer!
|
||||||
|
stop(id: PrefixedID!): DockerContainer!
|
||||||
|
pause(id: PrefixedID!): DockerContainer!
|
||||||
|
unpause(id: PrefixedID!): DockerContainer!
|
||||||
|
removeContainer(id: PrefixedID!, withImage: Boolean): Boolean!
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Container Updates:**
|
||||||
|
|
||||||
|
```graphql
|
||||||
|
type Mutation {
|
||||||
|
updateContainer(id: PrefixedID!): DockerContainer!
|
||||||
|
updateContainers(ids: [PrefixedID!]!): [DockerContainer!]!
|
||||||
|
updateAllContainers: [DockerContainer!]!
|
||||||
|
refreshDockerDigests: Boolean!
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Configuration:**
|
||||||
|
|
||||||
|
```graphql
|
||||||
|
type Mutation {
|
||||||
|
updateAutostartConfiguration(
|
||||||
|
entries: [AutostartEntry!]!
|
||||||
|
persistUserPreferences: Boolean
|
||||||
|
): Boolean!
|
||||||
|
syncDockerTemplatePaths: Boolean!
|
||||||
|
resetDockerTemplateMappings: Boolean!
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Organizer (Feature-flagged):**
|
||||||
|
|
||||||
|
```graphql
|
||||||
|
type Mutation {
|
||||||
|
createDockerFolder(name: String!, parentId: ID, childrenIds: [ID!]): DockerFolder!
|
||||||
|
createDockerFolderWithItems(
|
||||||
|
name: String!
|
||||||
|
parentId: ID
|
||||||
|
sourceEntryIds: [ID!]
|
||||||
|
position: Int
|
||||||
|
): DockerFolder!
|
||||||
|
setDockerFolderChildren(folderId: ID!, childrenIds: [ID!]!): DockerFolder!
|
||||||
|
deleteDockerEntries(entryIds: [ID!]!): Boolean!
|
||||||
|
moveDockerEntriesToFolder(sourceEntryIds: [ID!]!, destinationFolderId: ID!): Boolean!
|
||||||
|
moveDockerItemsToPosition(
|
||||||
|
sourceEntryIds: [ID!]!
|
||||||
|
destinationFolderId: ID!
|
||||||
|
position: Int!
|
||||||
|
): Boolean!
|
||||||
|
renameDockerFolder(folderId: ID!, newName: String!): DockerFolder!
|
||||||
|
updateDockerViewPreferences(viewId: ID!, prefs: ViewPreferencesInput!): Boolean!
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Subscriptions
|
||||||
|
|
||||||
|
```graphql
|
||||||
|
type Subscription {
|
||||||
|
dockerContainerStats: DockerContainerStats!
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Real-time container statistics stream. Automatically starts when first client subscribes and stops when last client disconnects.
|
||||||
|
|
||||||
|
## Data Models
|
||||||
|
|
||||||
|
### DockerContainer
|
||||||
|
|
||||||
|
Primary container representation with 24+ fields:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
{
|
||||||
|
id: PrefixedID
|
||||||
|
names: [String!]!
|
||||||
|
image: String!
|
||||||
|
imageId: String!
|
||||||
|
state: ContainerState!
|
||||||
|
status: String!
|
||||||
|
created: Float!
|
||||||
|
|
||||||
|
// Networking
|
||||||
|
ports: [ContainerPort!]!
|
||||||
|
lanIpPorts: [ContainerPort!]!
|
||||||
|
hostConfig: ContainerHostConfig
|
||||||
|
networkSettings: DockerNetworkSettings
|
||||||
|
|
||||||
|
// Storage
|
||||||
|
sizeRootFs: Float
|
||||||
|
sizeRw: Float
|
||||||
|
sizeLog: Float
|
||||||
|
mounts: [ContainerMount!]!
|
||||||
|
|
||||||
|
// Metadata
|
||||||
|
labels: JSON
|
||||||
|
|
||||||
|
// Auto-start
|
||||||
|
autoStart: Boolean!
|
||||||
|
autoStartOrder: Int
|
||||||
|
autoStartWait: Int
|
||||||
|
|
||||||
|
// Template Integration
|
||||||
|
templatePath: String
|
||||||
|
isOrphaned: Boolean!
|
||||||
|
projectUrl: String
|
||||||
|
registryUrl: String
|
||||||
|
supportUrl: String
|
||||||
|
iconUrl: String
|
||||||
|
webUiUrl: String
|
||||||
|
shell: String
|
||||||
|
templatePorts: [ContainerPort!]
|
||||||
|
|
||||||
|
// Tailscale
|
||||||
|
tailscaleEnabled: Boolean!
|
||||||
|
tailscaleStatus: TailscaleStatus
|
||||||
|
|
||||||
|
// Updates
|
||||||
|
isUpdateAvailable: Boolean
|
||||||
|
isRebuildReady: Boolean
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### ContainerState
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
enum ContainerState {
|
||||||
|
RUNNING
|
||||||
|
PAUSED
|
||||||
|
EXITED
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### ContainerPort
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
{
|
||||||
|
ip: String
|
||||||
|
privatePort: Int!
|
||||||
|
publicPort: Int
|
||||||
|
type: String! // "tcp" or "udp"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### DockerPortConflicts
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
{
|
||||||
|
containerConflicts: [DockerContainerPortConflict!]!
|
||||||
|
lanConflicts: [DockerLanPortConflict!]!
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Caching Strategy
|
||||||
|
|
||||||
|
The Docker feature uses `cache-manager` v7 for performance optimization.
|
||||||
|
|
||||||
|
**Important:** cache-manager v7 expects TTL values in **milliseconds**, not seconds.
|
||||||
|
|
||||||
|
| Cache Key | TTL | Invalidation |
|
||||||
|
|-----------|-----|--------------|
|
||||||
|
| `docker_containers` | 60s | On any container mutation |
|
||||||
|
| `docker_containers_with_size` | 60s | On any container mutation |
|
||||||
|
| `docker_networks` | 60s | On network changes |
|
||||||
|
| Tailscale status | 30s | Automatic |
|
||||||
|
| Tailscale DERP/versions | 24h | Automatic |
|
||||||
|
|
||||||
|
**Cache Invalidation Triggers:**
|
||||||
|
|
||||||
|
- `start()`, `stop()`, `pause()`, `unpause()`
|
||||||
|
- `updateContainer()`, `updateContainers()`, `updateAllContainers()`
|
||||||
|
- `removeContainer()`
|
||||||
|
- `updateAutostartConfiguration()`
|
||||||
|
|
||||||
|
## WebGUI Integration
|
||||||
|
|
||||||
|
### File Modification
|
||||||
|
|
||||||
|
**File:** `unraid-file-modifier/modifications/docker-containers-page.modification.ts`
|
||||||
|
|
||||||
|
**Target:** `/usr/local/emhttp/plugins/dynamix.docker.manager/DockerContainers.page`
|
||||||
|
|
||||||
|
When `ENABLE_NEXT_DOCKER_RELEASE` is enabled and Unraid version is 7.3.0+, the modification:
|
||||||
|
|
||||||
|
1. Replaces the legacy Docker containers page
|
||||||
|
2. Injects the Vue web component: `<unraid-docker-container-overview>`
|
||||||
|
3. Retains the `Nchan="docker_load"` page attribute (an emhttp/WebGUI feature for real-time updates, not controlled by the API)
|
||||||
|
|
||||||
|
### PHP Integration
|
||||||
|
|
||||||
|
The API integrates with legacy Unraid PHP scripts for certain operations:
|
||||||
|
|
||||||
|
- **Digest refresh:** Calls `DockerUpdate.php` to refresh container image digests
|
||||||
|
- **Update status:** Reads from `DockerContainers.php` output
|
||||||
|
|
||||||
|
## Permissions
|
||||||
|
|
||||||
|
All Docker operations are protected with permission checks:
|
||||||
|
|
||||||
|
| Operation | Resource | Action |
|
||||||
|
|-----------|----------|--------|
|
||||||
|
| Read containers/networks | `Resource.DOCKER` | `AuthAction.READ_ANY` |
|
||||||
|
| Start/stop/pause/update | `Resource.DOCKER` | `AuthAction.UPDATE_ANY` |
|
||||||
|
| Remove containers | `Resource.DOCKER` | `AuthAction.DELETE_ANY` |
|
||||||
|
|
||||||
|
## Configuration Files
|
||||||
|
|
||||||
|
| File | Purpose |
|
||||||
|
|------|---------|
|
||||||
|
| `docker.config.json` | Template mappings, skip paths, cron schedule |
|
||||||
|
| `docker.organizer.json` | Container organization tree and views |
|
||||||
|
| `/var/lib/docker/unraid-update-status.json` | Cached container update statuses |
|
||||||
|
|
||||||
|
## Development
|
||||||
|
|
||||||
|
### Adding a New Docker Service
|
||||||
|
|
||||||
|
1. Create service file in `src/unraid-api/graph/resolvers/docker/`
|
||||||
|
2. Add to `docker.module.ts` providers and exports
|
||||||
|
3. Inject into resolvers as needed
|
||||||
|
4. Add GraphQL types to `docker.model.ts` if needed
|
||||||
|
|
||||||
|
### Testing
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Run Docker-related tests
|
||||||
|
pnpm --filter ./api test -- src/unraid-api/graph/resolvers/docker/
|
||||||
|
|
||||||
|
# Run specific test file
|
||||||
|
pnpm --filter ./api test -- src/unraid-api/graph/resolvers/docker/docker.service.spec.ts
|
||||||
|
```
|
||||||
|
|
||||||
|
### Feature Flag Testing
|
||||||
|
|
||||||
|
To test next-generation Docker features locally:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
ENABLE_NEXT_DOCKER_RELEASE=true unraid-api start
|
||||||
|
```
|
||||||
|
|
||||||
|
Or add to `.env`:
|
||||||
|
|
||||||
|
```env
|
||||||
|
ENABLE_NEXT_DOCKER_RELEASE=true
|
||||||
|
```
|
||||||
@@ -62,15 +62,18 @@ To build all packages in the monorepo:
|
|||||||
pnpm build
|
pnpm build
|
||||||
```
|
```
|
||||||
|
|
||||||
### Watch Mode Building
|
### Plugin Building (Docker Required)
|
||||||
|
|
||||||
For continuous building during development:
|
The plugin build requires Docker. This command automatically builds all dependencies (API, web) before starting Docker:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
pnpm build:watch
|
cd plugin
|
||||||
|
pnpm run docker:build-and-run
|
||||||
|
# Then inside the container:
|
||||||
|
pnpm build
|
||||||
```
|
```
|
||||||
|
|
||||||
This is useful when you want to see your changes reflected without manually rebuilding. This will also allow you to install a local plugin to test your changes.
|
This serves the plugin at `http://YOUR_IP:5858/` for installation on your Unraid server.
|
||||||
|
|
||||||
### Package-Specific Building
|
### Package-Specific Building
|
||||||
|
|
||||||
|
|||||||
@@ -7,7 +7,7 @@
|
|||||||
"cwd": "/usr/local/unraid-api",
|
"cwd": "/usr/local/unraid-api",
|
||||||
"exec_mode": "fork",
|
"exec_mode": "fork",
|
||||||
"wait_ready": true,
|
"wait_ready": true,
|
||||||
"listen_timeout": 15000,
|
"listen_timeout": 30000,
|
||||||
"max_restarts": 10,
|
"max_restarts": 10,
|
||||||
"min_uptime": 10000,
|
"min_uptime": 10000,
|
||||||
"watch": false,
|
"watch": false,
|
||||||
|
|||||||
@@ -862,6 +862,38 @@ type DockerMutations {
|
|||||||
|
|
||||||
"""Stop a container"""
|
"""Stop a container"""
|
||||||
stop(id: PrefixedID!): DockerContainer!
|
stop(id: PrefixedID!): DockerContainer!
|
||||||
|
|
||||||
|
"""Pause (Suspend) a container"""
|
||||||
|
pause(id: PrefixedID!): DockerContainer!
|
||||||
|
|
||||||
|
"""Unpause (Resume) a container"""
|
||||||
|
unpause(id: PrefixedID!): DockerContainer!
|
||||||
|
|
||||||
|
"""Remove a container"""
|
||||||
|
removeContainer(id: PrefixedID!, withImage: Boolean): Boolean!
|
||||||
|
|
||||||
|
"""Update auto-start configuration for Docker containers"""
|
||||||
|
updateAutostartConfiguration(entries: [DockerAutostartEntryInput!]!, persistUserPreferences: Boolean): Boolean!
|
||||||
|
|
||||||
|
"""Update a container to the latest image"""
|
||||||
|
updateContainer(id: PrefixedID!): DockerContainer!
|
||||||
|
|
||||||
|
"""Update multiple containers to the latest images"""
|
||||||
|
updateContainers(ids: [PrefixedID!]!): [DockerContainer!]!
|
||||||
|
|
||||||
|
"""Update all containers that have available updates"""
|
||||||
|
updateAllContainers: [DockerContainer!]!
|
||||||
|
}
|
||||||
|
|
||||||
|
input DockerAutostartEntryInput {
|
||||||
|
"""Docker container identifier"""
|
||||||
|
id: PrefixedID!
|
||||||
|
|
||||||
|
"""Whether the container should auto-start"""
|
||||||
|
autoStart: Boolean!
|
||||||
|
|
||||||
|
"""Number of seconds to wait after starting the container"""
|
||||||
|
wait: Int
|
||||||
}
|
}
|
||||||
|
|
||||||
type VmMutations {
|
type VmMutations {
|
||||||
@@ -944,6 +976,23 @@ input UpdateApiKeyInput {
|
|||||||
permissions: [AddPermissionInput!]
|
permissions: [AddPermissionInput!]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
"""Customization related mutations"""
|
||||||
|
type CustomizationMutations {
|
||||||
|
"""Update the UI theme (writes dynamix.cfg)"""
|
||||||
|
setTheme(
|
||||||
|
"""Theme to apply"""
|
||||||
|
theme: ThemeName!
|
||||||
|
): Theme!
|
||||||
|
}
|
||||||
|
|
||||||
|
"""The theme name"""
|
||||||
|
enum ThemeName {
|
||||||
|
azure
|
||||||
|
black
|
||||||
|
gray
|
||||||
|
white
|
||||||
|
}
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Parity check related mutations, WIP, response types and functionaliy will change
|
Parity check related mutations, WIP, response types and functionaliy will change
|
||||||
"""
|
"""
|
||||||
@@ -1042,14 +1091,6 @@ type Theme {
|
|||||||
headerSecondaryTextColor: String
|
headerSecondaryTextColor: String
|
||||||
}
|
}
|
||||||
|
|
||||||
"""The theme name"""
|
|
||||||
enum ThemeName {
|
|
||||||
azure
|
|
||||||
black
|
|
||||||
gray
|
|
||||||
white
|
|
||||||
}
|
|
||||||
|
|
||||||
type ExplicitStatusItem {
|
type ExplicitStatusItem {
|
||||||
name: String!
|
name: String!
|
||||||
updateStatus: UpdateStatus!
|
updateStatus: UpdateStatus!
|
||||||
@@ -1080,6 +1121,29 @@ enum ContainerPortType {
|
|||||||
UDP
|
UDP
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type DockerPortConflictContainer {
|
||||||
|
id: PrefixedID!
|
||||||
|
name: String!
|
||||||
|
}
|
||||||
|
|
||||||
|
type DockerContainerPortConflict {
|
||||||
|
privatePort: Port!
|
||||||
|
type: ContainerPortType!
|
||||||
|
containers: [DockerPortConflictContainer!]!
|
||||||
|
}
|
||||||
|
|
||||||
|
type DockerLanPortConflict {
|
||||||
|
lanIpPort: String!
|
||||||
|
publicPort: Port
|
||||||
|
type: ContainerPortType!
|
||||||
|
containers: [DockerPortConflictContainer!]!
|
||||||
|
}
|
||||||
|
|
||||||
|
type DockerPortConflicts {
|
||||||
|
containerPorts: [DockerContainerPortConflict!]!
|
||||||
|
lanPorts: [DockerLanPortConflict!]!
|
||||||
|
}
|
||||||
|
|
||||||
type ContainerHostConfig {
|
type ContainerHostConfig {
|
||||||
networkMode: String!
|
networkMode: String!
|
||||||
}
|
}
|
||||||
@@ -1093,8 +1157,17 @@ type DockerContainer implements Node {
|
|||||||
created: Int!
|
created: Int!
|
||||||
ports: [ContainerPort!]!
|
ports: [ContainerPort!]!
|
||||||
|
|
||||||
|
"""List of LAN-accessible host:port values"""
|
||||||
|
lanIpPorts: [String!]
|
||||||
|
|
||||||
"""Total size of all files in the container (in bytes)"""
|
"""Total size of all files in the container (in bytes)"""
|
||||||
sizeRootFs: BigInt
|
sizeRootFs: BigInt
|
||||||
|
|
||||||
|
"""Size of writable layer (in bytes)"""
|
||||||
|
sizeRw: BigInt
|
||||||
|
|
||||||
|
"""Size of container logs (in bytes)"""
|
||||||
|
sizeLog: BigInt
|
||||||
labels: JSON
|
labels: JSON
|
||||||
state: ContainerState!
|
state: ContainerState!
|
||||||
status: String!
|
status: String!
|
||||||
@@ -1102,12 +1175,50 @@ type DockerContainer implements Node {
|
|||||||
networkSettings: JSON
|
networkSettings: JSON
|
||||||
mounts: [JSON!]
|
mounts: [JSON!]
|
||||||
autoStart: Boolean!
|
autoStart: Boolean!
|
||||||
|
|
||||||
|
"""Zero-based order in the auto-start list"""
|
||||||
|
autoStartOrder: Int
|
||||||
|
|
||||||
|
"""Wait time in seconds applied after start"""
|
||||||
|
autoStartWait: Int
|
||||||
|
templatePath: String
|
||||||
|
|
||||||
|
"""Project/Product homepage URL"""
|
||||||
|
projectUrl: String
|
||||||
|
|
||||||
|
"""Registry/Docker Hub URL"""
|
||||||
|
registryUrl: String
|
||||||
|
|
||||||
|
"""Support page/thread URL"""
|
||||||
|
supportUrl: String
|
||||||
|
|
||||||
|
"""Icon URL"""
|
||||||
|
iconUrl: String
|
||||||
|
|
||||||
|
"""Resolved WebUI URL from template"""
|
||||||
|
webUiUrl: String
|
||||||
|
|
||||||
|
"""Shell to use for console access (from template)"""
|
||||||
|
shell: String
|
||||||
|
|
||||||
|
"""Port mappings from template (used when container is not running)"""
|
||||||
|
templatePorts: [ContainerPort!]
|
||||||
|
|
||||||
|
"""Whether the container is orphaned (no template found)"""
|
||||||
|
isOrphaned: Boolean!
|
||||||
isUpdateAvailable: Boolean
|
isUpdateAvailable: Boolean
|
||||||
isRebuildReady: Boolean
|
isRebuildReady: Boolean
|
||||||
|
|
||||||
|
"""Whether Tailscale is enabled for this container"""
|
||||||
|
tailscaleEnabled: Boolean!
|
||||||
|
|
||||||
|
"""Tailscale status for this container (fetched via docker exec)"""
|
||||||
|
tailscaleStatus(forceRefresh: Boolean = false): TailscaleStatus
|
||||||
}
|
}
|
||||||
|
|
||||||
enum ContainerState {
|
enum ContainerState {
|
||||||
RUNNING
|
RUNNING
|
||||||
|
PAUSED
|
||||||
EXITED
|
EXITED
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1129,49 +1240,213 @@ type DockerNetwork implements Node {
|
|||||||
labels: JSON!
|
labels: JSON!
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type DockerContainerLogLine {
|
||||||
|
timestamp: DateTime!
|
||||||
|
message: String!
|
||||||
|
}
|
||||||
|
|
||||||
|
type DockerContainerLogs {
|
||||||
|
containerId: PrefixedID!
|
||||||
|
lines: [DockerContainerLogLine!]!
|
||||||
|
|
||||||
|
"""
|
||||||
|
Cursor that can be passed back through the since argument to continue streaming logs.
|
||||||
|
"""
|
||||||
|
cursor: DateTime
|
||||||
|
}
|
||||||
|
|
||||||
|
type DockerContainerStats {
|
||||||
|
id: PrefixedID!
|
||||||
|
|
||||||
|
"""CPU Usage Percentage"""
|
||||||
|
cpuPercent: Float!
|
||||||
|
|
||||||
|
"""Memory Usage String (e.g. 100MB / 1GB)"""
|
||||||
|
memUsage: String!
|
||||||
|
|
||||||
|
"""Memory Usage Percentage"""
|
||||||
|
memPercent: Float!
|
||||||
|
|
||||||
|
"""Network I/O String (e.g. 100MB / 1GB)"""
|
||||||
|
netIO: String!
|
||||||
|
|
||||||
|
"""Block I/O String (e.g. 100MB / 1GB)"""
|
||||||
|
blockIO: String!
|
||||||
|
}
|
||||||
|
|
||||||
|
"""Tailscale exit node connection status"""
|
||||||
|
type TailscaleExitNodeStatus {
|
||||||
|
"""Whether the exit node is online"""
|
||||||
|
online: Boolean!
|
||||||
|
|
||||||
|
"""Tailscale IPs of the exit node"""
|
||||||
|
tailscaleIps: [String!]
|
||||||
|
}
|
||||||
|
|
||||||
|
"""Tailscale status for a Docker container"""
|
||||||
|
type TailscaleStatus {
|
||||||
|
"""Whether Tailscale is online in the container"""
|
||||||
|
online: Boolean!
|
||||||
|
|
||||||
|
"""Current Tailscale version"""
|
||||||
|
version: String
|
||||||
|
|
||||||
|
"""Latest available Tailscale version"""
|
||||||
|
latestVersion: String
|
||||||
|
|
||||||
|
"""Whether a Tailscale update is available"""
|
||||||
|
updateAvailable: Boolean!
|
||||||
|
|
||||||
|
"""Configured Tailscale hostname"""
|
||||||
|
hostname: String
|
||||||
|
|
||||||
|
"""Actual Tailscale DNS name"""
|
||||||
|
dnsName: String
|
||||||
|
|
||||||
|
"""DERP relay code"""
|
||||||
|
relay: String
|
||||||
|
|
||||||
|
"""DERP relay region name"""
|
||||||
|
relayName: String
|
||||||
|
|
||||||
|
"""Tailscale IPv4 and IPv6 addresses"""
|
||||||
|
tailscaleIps: [String!]
|
||||||
|
|
||||||
|
"""Advertised subnet routes"""
|
||||||
|
primaryRoutes: [String!]
|
||||||
|
|
||||||
|
"""Whether this container is an exit node"""
|
||||||
|
isExitNode: Boolean!
|
||||||
|
|
||||||
|
"""Status of the connected exit node (if using one)"""
|
||||||
|
exitNodeStatus: TailscaleExitNodeStatus
|
||||||
|
|
||||||
|
"""Tailscale Serve/Funnel WebUI URL"""
|
||||||
|
webUiUrl: String
|
||||||
|
|
||||||
|
"""Tailscale key expiry date"""
|
||||||
|
keyExpiry: DateTime
|
||||||
|
|
||||||
|
"""Days until key expires"""
|
||||||
|
keyExpiryDays: Int
|
||||||
|
|
||||||
|
"""Whether the Tailscale key has expired"""
|
||||||
|
keyExpired: Boolean!
|
||||||
|
|
||||||
|
"""Tailscale backend state (Running, NeedsLogin, Stopped, etc.)"""
|
||||||
|
backendState: String
|
||||||
|
|
||||||
|
"""Authentication URL if Tailscale needs login"""
|
||||||
|
authUrl: String
|
||||||
|
}
|
||||||
|
|
||||||
type Docker implements Node {
|
type Docker implements Node {
|
||||||
id: PrefixedID!
|
id: PrefixedID!
|
||||||
containers(skipCache: Boolean! = false): [DockerContainer!]!
|
containers(skipCache: Boolean! = false): [DockerContainer!]!
|
||||||
networks(skipCache: Boolean! = false): [DockerNetwork!]!
|
networks(skipCache: Boolean! = false): [DockerNetwork!]!
|
||||||
organizer: ResolvedOrganizerV1!
|
portConflicts(skipCache: Boolean! = false): DockerPortConflicts!
|
||||||
|
|
||||||
|
"""
|
||||||
|
Access container logs. Requires specifying a target container id through resolver arguments.
|
||||||
|
"""
|
||||||
|
logs(id: PrefixedID!, since: DateTime, tail: Int): DockerContainerLogs!
|
||||||
|
container(id: PrefixedID!): DockerContainer
|
||||||
|
organizer(skipCache: Boolean! = false): ResolvedOrganizerV1!
|
||||||
containerUpdateStatuses: [ExplicitStatusItem!]!
|
containerUpdateStatuses: [ExplicitStatusItem!]!
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type DockerTemplateSyncResult {
|
||||||
|
scanned: Int!
|
||||||
|
matched: Int!
|
||||||
|
skipped: Int!
|
||||||
|
errors: [String!]!
|
||||||
|
}
|
||||||
|
|
||||||
type ResolvedOrganizerView {
|
type ResolvedOrganizerView {
|
||||||
id: String!
|
id: String!
|
||||||
name: String!
|
name: String!
|
||||||
root: ResolvedOrganizerEntry!
|
rootId: String!
|
||||||
|
flatEntries: [FlatOrganizerEntry!]!
|
||||||
prefs: JSON
|
prefs: JSON
|
||||||
}
|
}
|
||||||
|
|
||||||
union ResolvedOrganizerEntry = ResolvedOrganizerFolder | OrganizerContainerResource | OrganizerResource
|
|
||||||
|
|
||||||
type ResolvedOrganizerFolder {
|
|
||||||
id: String!
|
|
||||||
type: String!
|
|
||||||
name: String!
|
|
||||||
children: [ResolvedOrganizerEntry!]!
|
|
||||||
}
|
|
||||||
|
|
||||||
type OrganizerContainerResource {
|
|
||||||
id: String!
|
|
||||||
type: String!
|
|
||||||
name: String!
|
|
||||||
meta: DockerContainer
|
|
||||||
}
|
|
||||||
|
|
||||||
type OrganizerResource {
|
|
||||||
id: String!
|
|
||||||
type: String!
|
|
||||||
name: String!
|
|
||||||
meta: JSON
|
|
||||||
}
|
|
||||||
|
|
||||||
type ResolvedOrganizerV1 {
|
type ResolvedOrganizerV1 {
|
||||||
version: Float!
|
version: Float!
|
||||||
views: [ResolvedOrganizerView!]!
|
views: [ResolvedOrganizerView!]!
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type FlatOrganizerEntry {
|
||||||
|
id: String!
|
||||||
|
type: String!
|
||||||
|
name: String!
|
||||||
|
parentId: String
|
||||||
|
depth: Float!
|
||||||
|
position: Float!
|
||||||
|
path: [String!]!
|
||||||
|
hasChildren: Boolean!
|
||||||
|
childrenIds: [String!]!
|
||||||
|
meta: DockerContainer
|
||||||
|
}
|
||||||
|
|
||||||
|
type NotificationCounts {
|
||||||
|
info: Int!
|
||||||
|
warning: Int!
|
||||||
|
alert: Int!
|
||||||
|
total: Int!
|
||||||
|
}
|
||||||
|
|
||||||
|
type NotificationOverview {
|
||||||
|
unread: NotificationCounts!
|
||||||
|
archive: NotificationCounts!
|
||||||
|
}
|
||||||
|
|
||||||
|
type Notification implements Node {
|
||||||
|
id: PrefixedID!
|
||||||
|
|
||||||
|
"""Also known as 'event'"""
|
||||||
|
title: String!
|
||||||
|
subject: String!
|
||||||
|
description: String!
|
||||||
|
importance: NotificationImportance!
|
||||||
|
link: String
|
||||||
|
type: NotificationType!
|
||||||
|
|
||||||
|
"""ISO Timestamp for when the notification occurred"""
|
||||||
|
timestamp: String
|
||||||
|
formattedTimestamp: String
|
||||||
|
}
|
||||||
|
|
||||||
|
enum NotificationImportance {
|
||||||
|
ALERT
|
||||||
|
INFO
|
||||||
|
WARNING
|
||||||
|
}
|
||||||
|
|
||||||
|
enum NotificationType {
|
||||||
|
UNREAD
|
||||||
|
ARCHIVE
|
||||||
|
}
|
||||||
|
|
||||||
|
type Notifications implements Node {
|
||||||
|
id: PrefixedID!
|
||||||
|
|
||||||
|
"""A cached overview of the notifications in the system & their severity."""
|
||||||
|
overview: NotificationOverview!
|
||||||
|
list(filter: NotificationFilter!): [Notification!]!
|
||||||
|
|
||||||
|
"""
|
||||||
|
Deduplicated list of unread warning and alert notifications, sorted latest first.
|
||||||
|
"""
|
||||||
|
warningsAndAlerts: [Notification!]!
|
||||||
|
}
|
||||||
|
|
||||||
|
input NotificationFilter {
|
||||||
|
importance: NotificationImportance
|
||||||
|
type: NotificationType!
|
||||||
|
offset: Int!
|
||||||
|
limit: Int!
|
||||||
|
}
|
||||||
|
|
||||||
type FlashBackupStatus {
|
type FlashBackupStatus {
|
||||||
"""Status message indicating the outcome of the backup initiation."""
|
"""Status message indicating the outcome of the backup initiation."""
|
||||||
status: String!
|
status: String!
|
||||||
@@ -1772,60 +2047,6 @@ type Metrics implements Node {
|
|||||||
memory: MemoryUtilization
|
memory: MemoryUtilization
|
||||||
}
|
}
|
||||||
|
|
||||||
type NotificationCounts {
|
|
||||||
info: Int!
|
|
||||||
warning: Int!
|
|
||||||
alert: Int!
|
|
||||||
total: Int!
|
|
||||||
}
|
|
||||||
|
|
||||||
type NotificationOverview {
|
|
||||||
unread: NotificationCounts!
|
|
||||||
archive: NotificationCounts!
|
|
||||||
}
|
|
||||||
|
|
||||||
type Notification implements Node {
|
|
||||||
id: PrefixedID!
|
|
||||||
|
|
||||||
"""Also known as 'event'"""
|
|
||||||
title: String!
|
|
||||||
subject: String!
|
|
||||||
description: String!
|
|
||||||
importance: NotificationImportance!
|
|
||||||
link: String
|
|
||||||
type: NotificationType!
|
|
||||||
|
|
||||||
"""ISO Timestamp for when the notification occurred"""
|
|
||||||
timestamp: String
|
|
||||||
formattedTimestamp: String
|
|
||||||
}
|
|
||||||
|
|
||||||
enum NotificationImportance {
|
|
||||||
ALERT
|
|
||||||
INFO
|
|
||||||
WARNING
|
|
||||||
}
|
|
||||||
|
|
||||||
enum NotificationType {
|
|
||||||
UNREAD
|
|
||||||
ARCHIVE
|
|
||||||
}
|
|
||||||
|
|
||||||
type Notifications implements Node {
|
|
||||||
id: PrefixedID!
|
|
||||||
|
|
||||||
"""A cached overview of the notifications in the system & their severity."""
|
|
||||||
overview: NotificationOverview!
|
|
||||||
list(filter: NotificationFilter!): [Notification!]!
|
|
||||||
}
|
|
||||||
|
|
||||||
input NotificationFilter {
|
|
||||||
importance: NotificationImportance
|
|
||||||
type: NotificationType!
|
|
||||||
offset: Int!
|
|
||||||
limit: Int!
|
|
||||||
}
|
|
||||||
|
|
||||||
type Owner {
|
type Owner {
|
||||||
username: String!
|
username: String!
|
||||||
url: String!
|
url: String!
|
||||||
@@ -2435,6 +2656,11 @@ type Mutation {
|
|||||||
"""Marks a notification as archived."""
|
"""Marks a notification as archived."""
|
||||||
archiveNotification(id: PrefixedID!): Notification!
|
archiveNotification(id: PrefixedID!): Notification!
|
||||||
archiveNotifications(ids: [PrefixedID!]!): NotificationOverview!
|
archiveNotifications(ids: [PrefixedID!]!): NotificationOverview!
|
||||||
|
|
||||||
|
"""
|
||||||
|
Creates a notification if an equivalent unread notification does not already exist.
|
||||||
|
"""
|
||||||
|
notifyIfUnique(input: NotificationData!): Notification
|
||||||
archiveAll(importance: NotificationImportance): NotificationOverview!
|
archiveAll(importance: NotificationImportance): NotificationOverview!
|
||||||
|
|
||||||
"""Marks a notification as unread."""
|
"""Marks a notification as unread."""
|
||||||
@@ -2449,11 +2675,22 @@ type Mutation {
|
|||||||
vm: VmMutations!
|
vm: VmMutations!
|
||||||
parityCheck: ParityCheckMutations!
|
parityCheck: ParityCheckMutations!
|
||||||
apiKey: ApiKeyMutations!
|
apiKey: ApiKeyMutations!
|
||||||
|
customization: CustomizationMutations!
|
||||||
rclone: RCloneMutations!
|
rclone: RCloneMutations!
|
||||||
createDockerFolder(name: String!, parentId: String, childrenIds: [String!]): ResolvedOrganizerV1!
|
createDockerFolder(name: String!, parentId: String, childrenIds: [String!]): ResolvedOrganizerV1!
|
||||||
setDockerFolderChildren(folderId: String, childrenIds: [String!]!): ResolvedOrganizerV1!
|
setDockerFolderChildren(folderId: String, childrenIds: [String!]!): ResolvedOrganizerV1!
|
||||||
deleteDockerEntries(entryIds: [String!]!): ResolvedOrganizerV1!
|
deleteDockerEntries(entryIds: [String!]!): ResolvedOrganizerV1!
|
||||||
moveDockerEntriesToFolder(sourceEntryIds: [String!]!, destinationFolderId: String!): ResolvedOrganizerV1!
|
moveDockerEntriesToFolder(sourceEntryIds: [String!]!, destinationFolderId: String!): ResolvedOrganizerV1!
|
||||||
|
moveDockerItemsToPosition(sourceEntryIds: [String!]!, destinationFolderId: String!, position: Float!): ResolvedOrganizerV1!
|
||||||
|
renameDockerFolder(folderId: String!, newName: String!): ResolvedOrganizerV1!
|
||||||
|
createDockerFolderWithItems(name: String!, parentId: String, sourceEntryIds: [String!], position: Float): ResolvedOrganizerV1!
|
||||||
|
updateDockerViewPreferences(viewId: String = "default", prefs: JSON!): ResolvedOrganizerV1!
|
||||||
|
syncDockerTemplatePaths: DockerTemplateSyncResult!
|
||||||
|
|
||||||
|
"""
|
||||||
|
Reset Docker template mappings to defaults. Use this to recover from corrupted state.
|
||||||
|
"""
|
||||||
|
resetDockerTemplateMappings: Boolean!
|
||||||
refreshDockerDigests: Boolean!
|
refreshDockerDigests: Boolean!
|
||||||
|
|
||||||
"""Initiates a flash drive backup using a configured remote."""
|
"""Initiates a flash drive backup using a configured remote."""
|
||||||
@@ -2655,10 +2892,12 @@ input AccessUrlInput {
|
|||||||
type Subscription {
|
type Subscription {
|
||||||
notificationAdded: Notification!
|
notificationAdded: Notification!
|
||||||
notificationsOverview: NotificationOverview!
|
notificationsOverview: NotificationOverview!
|
||||||
|
notificationsWarningsAndAlerts: [Notification!]!
|
||||||
ownerSubscription: Owner!
|
ownerSubscription: Owner!
|
||||||
serversSubscription: Server!
|
serversSubscription: Server!
|
||||||
parityHistorySubscription: ParityCheck!
|
parityHistorySubscription: ParityCheck!
|
||||||
arraySubscription: UnraidArray!
|
arraySubscription: UnraidArray!
|
||||||
|
dockerContainerStats: DockerContainerStats!
|
||||||
logFile(path: String!): LogFileContent!
|
logFile(path: String!): LogFileContent!
|
||||||
systemMetricsCpu: CpuUtilization!
|
systemMetricsCpu: CpuUtilization!
|
||||||
systemMetricsCpuTelemetry: CpuPackages!
|
systemMetricsCpuTelemetry: CpuPackages!
|
||||||
|
|||||||
@@ -12,8 +12,13 @@ default:
|
|||||||
@deploy remote:
|
@deploy remote:
|
||||||
./scripts/deploy-dev.sh {{remote}}
|
./scripts/deploy-dev.sh {{remote}}
|
||||||
|
|
||||||
|
# watches typescript files and restarts dev server on changes
|
||||||
|
@watch:
|
||||||
|
watchexec -e ts -r -- pnpm dev
|
||||||
|
|
||||||
alias b := build
|
alias b := build
|
||||||
alias d := deploy
|
alias d := deploy
|
||||||
|
alias w := watch
|
||||||
|
|
||||||
sync-env server:
|
sync-env server:
|
||||||
rsync -avz --progress --stats -e ssh .env* root@{{server}}:/usr/local/unraid-api
|
rsync -avz --progress --stats -e ssh .env* root@{{server}}:/usr/local/unraid-api
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@unraid/api",
|
"name": "@unraid/api",
|
||||||
"version": "4.26.2",
|
"version": "4.29.2",
|
||||||
"main": "src/cli/index.ts",
|
"main": "src/cli/index.ts",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"corepack": {
|
"corepack": {
|
||||||
@@ -104,6 +104,7 @@
|
|||||||
"escape-html": "1.0.3",
|
"escape-html": "1.0.3",
|
||||||
"execa": "9.6.0",
|
"execa": "9.6.0",
|
||||||
"exit-hook": "4.0.0",
|
"exit-hook": "4.0.0",
|
||||||
|
"fast-xml-parser": "^5.3.0",
|
||||||
"fastify": "5.5.0",
|
"fastify": "5.5.0",
|
||||||
"filenamify": "7.0.0",
|
"filenamify": "7.0.0",
|
||||||
"fs-extra": "11.3.1",
|
"fs-extra": "11.3.1",
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ import { exit } from 'process';
|
|||||||
import type { PackageJson } from 'type-fest';
|
import type { PackageJson } from 'type-fest';
|
||||||
import { $, cd } from 'zx';
|
import { $, cd } from 'zx';
|
||||||
|
|
||||||
import { getDeploymentVersion } from './get-deployment-version.js';
|
import { getDeploymentVersion } from '@app/../scripts/get-deployment-version.js';
|
||||||
|
|
||||||
type ApiPackageJson = PackageJson & {
|
type ApiPackageJson = PackageJson & {
|
||||||
version: string;
|
version: string;
|
||||||
@@ -83,6 +83,10 @@ try {
|
|||||||
if (parsedPackageJson.dependencies?.[dep]) {
|
if (parsedPackageJson.dependencies?.[dep]) {
|
||||||
delete parsedPackageJson.dependencies[dep];
|
delete parsedPackageJson.dependencies[dep];
|
||||||
}
|
}
|
||||||
|
// Also strip from peerDependencies (npm doesn't understand workspace: protocol)
|
||||||
|
if (parsedPackageJson.peerDependencies?.[dep]) {
|
||||||
|
delete parsedPackageJson.peerDependencies[dep];
|
||||||
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ exports[`Returns paths 1`] = `
|
|||||||
"unraid-api-base",
|
"unraid-api-base",
|
||||||
"unraid-data",
|
"unraid-data",
|
||||||
"docker-autostart",
|
"docker-autostart",
|
||||||
|
"docker-userprefs",
|
||||||
"docker-socket",
|
"docker-socket",
|
||||||
"rclone-socket",
|
"rclone-socket",
|
||||||
"parity-checks",
|
"parity-checks",
|
||||||
|
|||||||
@@ -11,6 +11,7 @@ test('Returns paths', async () => {
|
|||||||
'unraid-api-base': '/usr/local/unraid-api/',
|
'unraid-api-base': '/usr/local/unraid-api/',
|
||||||
'unraid-data': expect.stringContaining('api/dev/data'),
|
'unraid-data': expect.stringContaining('api/dev/data'),
|
||||||
'docker-autostart': '/var/lib/docker/unraid-autostart',
|
'docker-autostart': '/var/lib/docker/unraid-autostart',
|
||||||
|
'docker-userprefs': '/boot/config/plugins/dockerMan/userprefs.cfg',
|
||||||
'docker-socket': '/var/run/docker.sock',
|
'docker-socket': '/var/run/docker.sock',
|
||||||
'parity-checks': expect.stringContaining('api/dev/states/parity-checks.log'),
|
'parity-checks': expect.stringContaining('api/dev/states/parity-checks.log'),
|
||||||
htpasswd: '/etc/nginx/htpasswd',
|
htpasswd: '/etc/nginx/htpasswd',
|
||||||
|
|||||||
151
api/src/__test__/store/watch/registration-watch.test.ts
Normal file
151
api/src/__test__/store/watch/registration-watch.test.ts
Normal file
@@ -0,0 +1,151 @@
|
|||||||
|
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||||
|
|
||||||
|
import { StateFileKey } from '@app/store/types.js';
|
||||||
|
import { RegistrationType } from '@app/unraid-api/graph/resolvers/registration/registration.model.js';
|
||||||
|
|
||||||
|
// Mock the store module
|
||||||
|
vi.mock('@app/store/index.js', () => ({
|
||||||
|
store: {
|
||||||
|
dispatch: vi.fn(),
|
||||||
|
},
|
||||||
|
getters: {
|
||||||
|
emhttp: vi.fn(),
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Mock the emhttp module
|
||||||
|
vi.mock('@app/store/modules/emhttp.js', () => ({
|
||||||
|
loadSingleStateFile: vi.fn((key) => ({ type: 'emhttp/load-single-state-file', payload: key })),
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Mock the registration module
|
||||||
|
vi.mock('@app/store/modules/registration.js', () => ({
|
||||||
|
loadRegistrationKey: vi.fn(() => ({ type: 'registration/load-registration-key' })),
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Mock the logger
|
||||||
|
vi.mock('@app/core/log.js', () => ({
|
||||||
|
keyServerLogger: {
|
||||||
|
info: vi.fn(),
|
||||||
|
debug: vi.fn(),
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
|
||||||
|
describe('reloadVarIniWithRetry', () => {
|
||||||
|
let store: { dispatch: ReturnType<typeof vi.fn> };
|
||||||
|
let getters: { emhttp: ReturnType<typeof vi.fn> };
|
||||||
|
let loadSingleStateFile: ReturnType<typeof vi.fn>;
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
vi.useFakeTimers();
|
||||||
|
|
||||||
|
const storeModule = await import('@app/store/index.js');
|
||||||
|
const emhttpModule = await import('@app/store/modules/emhttp.js');
|
||||||
|
|
||||||
|
store = storeModule.store as unknown as typeof store;
|
||||||
|
getters = storeModule.getters as unknown as typeof getters;
|
||||||
|
loadSingleStateFile = emhttpModule.loadSingleStateFile as unknown as typeof loadSingleStateFile;
|
||||||
|
|
||||||
|
vi.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
vi.useRealTimers();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns early when registration state changes on first retry', async () => {
|
||||||
|
// Initial state is TRIAL
|
||||||
|
getters.emhttp
|
||||||
|
.mockReturnValueOnce({ var: { regTy: RegistrationType.TRIAL } }) // First call (beforeState)
|
||||||
|
.mockReturnValueOnce({ var: { regTy: RegistrationType.UNLEASHED } }); // After first reload
|
||||||
|
|
||||||
|
const { reloadVarIniWithRetry } = await import('@app/store/watch/registration-watch.js');
|
||||||
|
|
||||||
|
const promise = reloadVarIniWithRetry();
|
||||||
|
|
||||||
|
// Advance past the first delay (500ms)
|
||||||
|
await vi.advanceTimersByTimeAsync(500);
|
||||||
|
await promise;
|
||||||
|
|
||||||
|
// Should only dispatch once since state changed
|
||||||
|
expect(store.dispatch).toHaveBeenCalledTimes(1);
|
||||||
|
expect(loadSingleStateFile).toHaveBeenCalledWith(StateFileKey.var);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('retries up to maxRetries when state does not change', async () => {
|
||||||
|
// State never changes
|
||||||
|
getters.emhttp.mockReturnValue({ var: { regTy: RegistrationType.TRIAL } });
|
||||||
|
|
||||||
|
const { reloadVarIniWithRetry } = await import('@app/store/watch/registration-watch.js');
|
||||||
|
|
||||||
|
const promise = reloadVarIniWithRetry(3);
|
||||||
|
|
||||||
|
// Advance through all retries: 500ms, 1000ms, 2000ms
|
||||||
|
await vi.advanceTimersByTimeAsync(500);
|
||||||
|
await vi.advanceTimersByTimeAsync(1000);
|
||||||
|
await vi.advanceTimersByTimeAsync(2000);
|
||||||
|
await promise;
|
||||||
|
|
||||||
|
// Should dispatch 3 times (maxRetries)
|
||||||
|
expect(store.dispatch).toHaveBeenCalledTimes(3);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('stops retrying when state changes on second attempt', async () => {
|
||||||
|
getters.emhttp
|
||||||
|
.mockReturnValueOnce({ var: { regTy: RegistrationType.TRIAL } }) // beforeState
|
||||||
|
.mockReturnValueOnce({ var: { regTy: RegistrationType.TRIAL } }) // After first reload (no change)
|
||||||
|
.mockReturnValueOnce({ var: { regTy: RegistrationType.UNLEASHED } }); // After second reload (changed!)
|
||||||
|
|
||||||
|
const { reloadVarIniWithRetry } = await import('@app/store/watch/registration-watch.js');
|
||||||
|
|
||||||
|
const promise = reloadVarIniWithRetry(3);
|
||||||
|
|
||||||
|
// First retry
|
||||||
|
await vi.advanceTimersByTimeAsync(500);
|
||||||
|
// Second retry
|
||||||
|
await vi.advanceTimersByTimeAsync(1000);
|
||||||
|
await promise;
|
||||||
|
|
||||||
|
// Should dispatch twice - stopped after state changed
|
||||||
|
expect(store.dispatch).toHaveBeenCalledTimes(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('handles undefined regTy gracefully', async () => {
|
||||||
|
getters.emhttp.mockReturnValue({ var: {} });
|
||||||
|
|
||||||
|
const { reloadVarIniWithRetry } = await import('@app/store/watch/registration-watch.js');
|
||||||
|
|
||||||
|
const promise = reloadVarIniWithRetry(1);
|
||||||
|
|
||||||
|
await vi.advanceTimersByTimeAsync(500);
|
||||||
|
await promise;
|
||||||
|
|
||||||
|
// Should still dispatch even with undefined regTy
|
||||||
|
expect(store.dispatch).toHaveBeenCalledTimes(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('uses exponential backoff delays', async () => {
|
||||||
|
getters.emhttp.mockReturnValue({ var: { regTy: RegistrationType.TRIAL } });
|
||||||
|
|
||||||
|
const { reloadVarIniWithRetry } = await import('@app/store/watch/registration-watch.js');
|
||||||
|
|
||||||
|
const promise = reloadVarIniWithRetry(3);
|
||||||
|
|
||||||
|
// At 0ms, no dispatch yet
|
||||||
|
expect(store.dispatch).toHaveBeenCalledTimes(0);
|
||||||
|
|
||||||
|
// At 500ms, first dispatch
|
||||||
|
await vi.advanceTimersByTimeAsync(500);
|
||||||
|
expect(store.dispatch).toHaveBeenCalledTimes(1);
|
||||||
|
|
||||||
|
// At 1500ms (500 + 1000), second dispatch
|
||||||
|
await vi.advanceTimersByTimeAsync(1000);
|
||||||
|
expect(store.dispatch).toHaveBeenCalledTimes(2);
|
||||||
|
|
||||||
|
// At 3500ms (500 + 1000 + 2000), third dispatch
|
||||||
|
await vi.advanceTimersByTimeAsync(2000);
|
||||||
|
expect(store.dispatch).toHaveBeenCalledTimes(3);
|
||||||
|
|
||||||
|
await promise;
|
||||||
|
});
|
||||||
|
});
|
||||||
234
api/src/common/compare-semver-version.spec.ts
Normal file
234
api/src/common/compare-semver-version.spec.ts
Normal file
@@ -0,0 +1,234 @@
|
|||||||
|
import { eq, gt, gte, lt, lte, parse } from 'semver';
|
||||||
|
import { describe, expect, it } from 'vitest';
|
||||||
|
|
||||||
|
import { compareVersions } from '@app/common/compare-semver-version.js';
|
||||||
|
|
||||||
|
describe('compareVersions', () => {
|
||||||
|
describe('basic comparisons', () => {
|
||||||
|
it('should return true when current version is greater than compared (gte)', () => {
|
||||||
|
const current = parse('7.3.0')!;
|
||||||
|
const compared = parse('7.2.0')!;
|
||||||
|
expect(compareVersions(current, compared, gte)).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return true when current version equals compared (gte)', () => {
|
||||||
|
const current = parse('7.2.0')!;
|
||||||
|
const compared = parse('7.2.0')!;
|
||||||
|
expect(compareVersions(current, compared, gte)).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return false when current version is less than compared (gte)', () => {
|
||||||
|
const current = parse('7.1.0')!;
|
||||||
|
const compared = parse('7.2.0')!;
|
||||||
|
expect(compareVersions(current, compared, gte)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return true when current version is less than compared (lte)', () => {
|
||||||
|
const current = parse('7.1.0')!;
|
||||||
|
const compared = parse('7.2.0')!;
|
||||||
|
expect(compareVersions(current, compared, lte)).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return true when current version equals compared (lte)', () => {
|
||||||
|
const current = parse('7.2.0')!;
|
||||||
|
const compared = parse('7.2.0')!;
|
||||||
|
expect(compareVersions(current, compared, lte)).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return false when current version is greater than compared (lte)', () => {
|
||||||
|
const current = parse('7.3.0')!;
|
||||||
|
const compared = parse('7.2.0')!;
|
||||||
|
expect(compareVersions(current, compared, lte)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return true when current version is greater than compared (gt)', () => {
|
||||||
|
const current = parse('7.3.0')!;
|
||||||
|
const compared = parse('7.2.0')!;
|
||||||
|
expect(compareVersions(current, compared, gt)).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return false when current version equals compared (gt)', () => {
|
||||||
|
const current = parse('7.2.0')!;
|
||||||
|
const compared = parse('7.2.0')!;
|
||||||
|
expect(compareVersions(current, compared, gt)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return true when current version is less than compared (lt)', () => {
|
||||||
|
const current = parse('7.1.0')!;
|
||||||
|
const compared = parse('7.2.0')!;
|
||||||
|
expect(compareVersions(current, compared, lt)).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return false when current version equals compared (lt)', () => {
|
||||||
|
const current = parse('7.2.0')!;
|
||||||
|
const compared = parse('7.2.0')!;
|
||||||
|
expect(compareVersions(current, compared, lt)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return true when versions are equal (eq)', () => {
|
||||||
|
const current = parse('7.2.0')!;
|
||||||
|
const compared = parse('7.2.0')!;
|
||||||
|
expect(compareVersions(current, compared, eq)).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return false when versions are not equal (eq)', () => {
|
||||||
|
const current = parse('7.3.0')!;
|
||||||
|
const compared = parse('7.2.0')!;
|
||||||
|
expect(compareVersions(current, compared, eq)).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('prerelease handling - current has prerelease, compared is stable', () => {
|
||||||
|
it('should return true for gte when current prerelease > stable (same base)', () => {
|
||||||
|
const current = parse('7.2.0-beta.1')!;
|
||||||
|
const compared = parse('7.2.0')!;
|
||||||
|
expect(compareVersions(current, compared, gte)).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return true for gt when current prerelease > stable (same base)', () => {
|
||||||
|
const current = parse('7.2.0-beta.1')!;
|
||||||
|
const compared = parse('7.2.0')!;
|
||||||
|
expect(compareVersions(current, compared, gt)).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return false for lte when current prerelease < stable (same base)', () => {
|
||||||
|
const current = parse('7.2.0-beta.1')!;
|
||||||
|
const compared = parse('7.2.0')!;
|
||||||
|
expect(compareVersions(current, compared, lte)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return false for lt when current prerelease < stable (same base)', () => {
|
||||||
|
const current = parse('7.2.0-beta.1')!;
|
||||||
|
const compared = parse('7.2.0')!;
|
||||||
|
expect(compareVersions(current, compared, lt)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return false for eq when current prerelease != stable (same base)', () => {
|
||||||
|
const current = parse('7.2.0-beta.1')!;
|
||||||
|
const compared = parse('7.2.0')!;
|
||||||
|
expect(compareVersions(current, compared, eq)).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('prerelease handling - current is stable, compared has prerelease', () => {
|
||||||
|
it('should use normal comparison when current is stable and compared has prerelease', () => {
|
||||||
|
const current = parse('7.2.0')!;
|
||||||
|
const compared = parse('7.2.0-beta.1')!;
|
||||||
|
expect(compareVersions(current, compared, gte)).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should use normal comparison for lte when current is stable and compared has prerelease', () => {
|
||||||
|
const current = parse('7.2.0')!;
|
||||||
|
const compared = parse('7.2.0-beta.1')!;
|
||||||
|
expect(compareVersions(current, compared, lte)).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('prerelease handling - both have prerelease', () => {
|
||||||
|
it('should use normal comparison when both versions have prerelease', () => {
|
||||||
|
const current = parse('7.2.0-beta.2')!;
|
||||||
|
const compared = parse('7.2.0-beta.1')!;
|
||||||
|
expect(compareVersions(current, compared, gte)).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should use normal comparison for lte when both have prerelease', () => {
|
||||||
|
const current = parse('7.2.0-beta.1')!;
|
||||||
|
const compared = parse('7.2.0-beta.2')!;
|
||||||
|
expect(compareVersions(current, compared, lte)).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should use normal comparison when prerelease versions are equal', () => {
|
||||||
|
const current = parse('7.2.0-beta.1')!;
|
||||||
|
const compared = parse('7.2.0-beta.1')!;
|
||||||
|
expect(compareVersions(current, compared, eq)).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('prerelease handling - different base versions', () => {
|
||||||
|
it('should use normal comparison when base versions differ (current prerelease)', () => {
|
||||||
|
const current = parse('7.3.0-beta.1')!;
|
||||||
|
const compared = parse('7.2.0')!;
|
||||||
|
expect(compareVersions(current, compared, gte)).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should use normal comparison when base versions differ (current prerelease, less)', () => {
|
||||||
|
const current = parse('7.1.0-beta.1')!;
|
||||||
|
const compared = parse('7.2.0')!;
|
||||||
|
expect(compareVersions(current, compared, gte)).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('includePrerelease flag', () => {
|
||||||
|
it('should apply special prerelease handling when includePrerelease is true', () => {
|
||||||
|
const current = parse('7.2.0-beta.1')!;
|
||||||
|
const compared = parse('7.2.0')!;
|
||||||
|
expect(compareVersions(current, compared, gte, { includePrerelease: true })).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should skip special prerelease handling when includePrerelease is false', () => {
|
||||||
|
const current = parse('7.2.0-beta.1')!;
|
||||||
|
const compared = parse('7.2.0')!;
|
||||||
|
expect(compareVersions(current, compared, gte, { includePrerelease: false })).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should default to includePrerelease true', () => {
|
||||||
|
const current = parse('7.2.0-beta.1')!;
|
||||||
|
const compared = parse('7.2.0')!;
|
||||||
|
expect(compareVersions(current, compared, gte)).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('edge cases', () => {
|
||||||
|
it('should handle patch version differences', () => {
|
||||||
|
const current = parse('7.2.1')!;
|
||||||
|
const compared = parse('7.2.0')!;
|
||||||
|
expect(compareVersions(current, compared, gte)).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle minor version differences', () => {
|
||||||
|
const current = parse('7.3.0')!;
|
||||||
|
const compared = parse('7.2.0')!;
|
||||||
|
expect(compareVersions(current, compared, gte)).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle major version differences', () => {
|
||||||
|
const current = parse('8.0.0')!;
|
||||||
|
const compared = parse('7.2.0')!;
|
||||||
|
expect(compareVersions(current, compared, gte)).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle complex prerelease tags', () => {
|
||||||
|
const current = parse('7.2.0-beta.2.4')!;
|
||||||
|
const compared = parse('7.2.0')!;
|
||||||
|
expect(compareVersions(current, compared, gte)).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle alpha prerelease tags', () => {
|
||||||
|
const current = parse('7.2.0-alpha.1')!;
|
||||||
|
const compared = parse('7.2.0')!;
|
||||||
|
expect(compareVersions(current, compared, gte)).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle rc prerelease tags', () => {
|
||||||
|
const current = parse('7.2.0-rc.1')!;
|
||||||
|
const compared = parse('7.2.0')!;
|
||||||
|
expect(compareVersions(current, compared, gte)).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('comparison function edge cases', () => {
|
||||||
|
it('should handle custom comparison functions that are not gte/lte/gt/lt', () => {
|
||||||
|
const current = parse('7.2.0-beta.1')!;
|
||||||
|
const compared = parse('7.2.0')!;
|
||||||
|
const customCompare = (a: typeof current, b: typeof compared) => a.compare(b) === 1;
|
||||||
|
expect(compareVersions(current, compared, customCompare)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should fall through to normal comparison for unknown functions with prerelease', () => {
|
||||||
|
const current = parse('7.2.0-beta.1')!;
|
||||||
|
const compared = parse('7.2.0')!;
|
||||||
|
const customCompare = () => false;
|
||||||
|
expect(compareVersions(current, compared, customCompare)).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
44
api/src/common/compare-semver-version.ts
Normal file
44
api/src/common/compare-semver-version.ts
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
import type { SemVer } from 'semver';
|
||||||
|
import { gt, gte, lt, lte } from 'semver';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Shared version comparison logic with special handling for prerelease versions.
|
||||||
|
*
|
||||||
|
* When base versions are equal and current version has a prerelease tag while compared doesn't:
|
||||||
|
* - For gte/gt: prerelease is considered greater than stable (returns true)
|
||||||
|
* - For lte/lt: prerelease is considered less than stable (returns false)
|
||||||
|
* - For eq: prerelease is not equal to stable (returns false)
|
||||||
|
*
|
||||||
|
* @param currentVersion - The current Unraid version (SemVer object)
|
||||||
|
* @param comparedVersion - The version to compare against (SemVer object)
|
||||||
|
* @param compareFn - The comparison function (e.g., gte, lte, lt, gt, eq)
|
||||||
|
* @param includePrerelease - Whether to include special prerelease handling
|
||||||
|
* @returns The result of the comparison
|
||||||
|
*/
|
||||||
|
export const compareVersions = (
|
||||||
|
currentVersion: SemVer,
|
||||||
|
comparedVersion: SemVer,
|
||||||
|
compareFn: (a: SemVer, b: SemVer) => boolean,
|
||||||
|
{ includePrerelease = true }: { includePrerelease?: boolean } = {}
|
||||||
|
): boolean => {
|
||||||
|
if (includePrerelease) {
|
||||||
|
const baseCurrent = `${currentVersion.major}.${currentVersion.minor}.${currentVersion.patch}`;
|
||||||
|
const baseCompared = `${comparedVersion.major}.${comparedVersion.minor}.${comparedVersion.patch}`;
|
||||||
|
|
||||||
|
if (baseCurrent === baseCompared) {
|
||||||
|
const currentHasPrerelease = currentVersion.prerelease.length > 0;
|
||||||
|
const comparedHasPrerelease = comparedVersion.prerelease.length > 0;
|
||||||
|
|
||||||
|
if (currentHasPrerelease && !comparedHasPrerelease) {
|
||||||
|
if (compareFn === gte || compareFn === gt) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (compareFn === lte || compareFn === lt) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return compareFn(currentVersion, comparedVersion);
|
||||||
|
};
|
||||||
60
api/src/common/get-unraid-version-sync.ts
Normal file
60
api/src/common/get-unraid-version-sync.ts
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
import type { SemVer } from 'semver';
|
||||||
|
import { coerce } from 'semver';
|
||||||
|
|
||||||
|
import { compareVersions } from '@app/common/compare-semver-version.js';
|
||||||
|
import { fileExistsSync } from '@app/core/utils/files/file-exists.js';
|
||||||
|
import { parseConfig } from '@app/core/utils/misc/parse-config.js';
|
||||||
|
|
||||||
|
type UnraidVersionIni = {
|
||||||
|
version?: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Synchronously reads the Unraid version from /etc/unraid-version
|
||||||
|
* @returns The Unraid version string, or 'unknown' if the file cannot be read
|
||||||
|
*/
|
||||||
|
export const getUnraidVersionSync = (): string => {
|
||||||
|
const versionPath = '/etc/unraid-version';
|
||||||
|
|
||||||
|
if (!fileExistsSync(versionPath)) {
|
||||||
|
return 'unknown';
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const versionIni = parseConfig<UnraidVersionIni>({ filePath: versionPath, type: 'ini' });
|
||||||
|
return versionIni.version || 'unknown';
|
||||||
|
} catch {
|
||||||
|
return 'unknown';
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compares the Unraid version against a specified version using a comparison function
|
||||||
|
* @param compareFn - The comparison function from semver (e.g., lt, gte, lte, gt, eq)
|
||||||
|
* @param version - The version to compare against (e.g., '7.3.0')
|
||||||
|
* @param options - Options for the comparison
|
||||||
|
* @returns The result of the comparison, or false if the version cannot be determined
|
||||||
|
*/
|
||||||
|
export const compareUnraidVersionSync = (
|
||||||
|
compareFn: (a: SemVer, b: SemVer) => boolean,
|
||||||
|
version: string,
|
||||||
|
{ includePrerelease = true }: { includePrerelease?: boolean } = {}
|
||||||
|
): boolean => {
|
||||||
|
const currentVersion = getUnraidVersionSync();
|
||||||
|
if (currentVersion === 'unknown') {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const current = coerce(currentVersion, { includePrerelease });
|
||||||
|
const compared = coerce(version, { includePrerelease });
|
||||||
|
|
||||||
|
if (!current || !compared) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return compareVersions(current, compared, compareFn, { includePrerelease });
|
||||||
|
} catch {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
};
|
||||||
12
api/src/connect-plugin-cleanup.ts
Normal file
12
api/src/connect-plugin-cleanup.ts
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
import { existsSync } from 'node:fs';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Local filesystem and env checks stay synchronous so we can branch at module load.
|
||||||
|
* @returns True if the Connect Unraid plugin is installed, false otherwise.
|
||||||
|
*/
|
||||||
|
export const isConnectPluginInstalled = () => {
|
||||||
|
if (process.env.SKIP_CONNECT_PLUGIN_CHECK === 'true') {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return existsSync('/boot/config/plugins/dynamix.unraid.net.plg');
|
||||||
|
};
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
import pino from 'pino';
|
import pino from 'pino';
|
||||||
import pretty from 'pino-pretty';
|
import pretty from 'pino-pretty';
|
||||||
|
|
||||||
import { API_VERSION, LOG_LEVEL, LOG_TYPE, SUPPRESS_LOGS } from '@app/environment.js';
|
import { API_VERSION, LOG_LEVEL, LOG_TYPE, PATHS_LOGS_FILE, SUPPRESS_LOGS } from '@app/environment.js';
|
||||||
|
|
||||||
export const levels = ['trace', 'debug', 'info', 'warn', 'error', 'fatal'] as const;
|
export const levels = ['trace', 'debug', 'info', 'warn', 'error', 'fatal'] as const;
|
||||||
|
|
||||||
@@ -15,18 +15,24 @@ const nullDestination = pino.destination({
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const LOG_TRANSPORT = process.env.LOG_TRANSPORT ?? 'file';
|
||||||
|
const useConsole = LOG_TRANSPORT === 'console';
|
||||||
|
|
||||||
export const logDestination =
|
export const logDestination =
|
||||||
process.env.SUPPRESS_LOGS === 'true' ? nullDestination : pino.destination();
|
process.env.SUPPRESS_LOGS === 'true'
|
||||||
// Since PM2 captures stdout and writes to the log file, we should not colorize stdout
|
? nullDestination
|
||||||
// to avoid ANSI escape codes in the log file
|
: useConsole
|
||||||
|
? pino.destination(1) // stdout
|
||||||
|
: pino.destination({ dest: PATHS_LOGS_FILE, mkdir: true });
|
||||||
|
|
||||||
const stream = SUPPRESS_LOGS
|
const stream = SUPPRESS_LOGS
|
||||||
? nullDestination
|
? nullDestination
|
||||||
: LOG_TYPE === 'pretty'
|
: LOG_TYPE === 'pretty'
|
||||||
? pretty({
|
? pretty({
|
||||||
singleLine: true,
|
singleLine: true,
|
||||||
hideObject: false,
|
hideObject: false,
|
||||||
colorize: false, // No colors since PM2 writes stdout to file
|
colorize: useConsole, // Enable colors when outputting to console
|
||||||
colorizeObjects: false,
|
colorizeObjects: useConsole,
|
||||||
levelFirst: false,
|
levelFirst: false,
|
||||||
ignore: 'hostname,pid',
|
ignore: 'hostname,pid',
|
||||||
destination: logDestination,
|
destination: logDestination,
|
||||||
@@ -34,10 +40,10 @@ const stream = SUPPRESS_LOGS
|
|||||||
customPrettifiers: {
|
customPrettifiers: {
|
||||||
time: (timestamp: string | object) => `[${timestamp}`,
|
time: (timestamp: string | object) => `[${timestamp}`,
|
||||||
level: (_logLevel: string | object, _key: string, log: any, extras: any) => {
|
level: (_logLevel: string | object, _key: string, log: any, extras: any) => {
|
||||||
// Use label instead of labelColorized for non-colored output
|
const { label, labelColorized } = extras;
|
||||||
const { label } = extras;
|
|
||||||
const context = log.context || log.logger || 'app';
|
const context = log.context || log.logger || 'app';
|
||||||
return `${label} ${context}]`;
|
// Use colorized label when outputting to console
|
||||||
|
return `${useConsole ? labelColorized : label} ${context}]`;
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
messageFormat: (log: any, messageKey: string) => {
|
messageFormat: (log: any, messageKey: string) => {
|
||||||
|
|||||||
231
api/src/core/utils/misc/__test__/timeout-budget.test.ts
Normal file
231
api/src/core/utils/misc/__test__/timeout-budget.test.ts
Normal file
@@ -0,0 +1,231 @@
|
|||||||
|
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||||
|
|
||||||
|
import { TimeoutBudget } from '@app/core/utils/misc/timeout-budget.js';
|
||||||
|
|
||||||
|
describe('TimeoutBudget', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
vi.useFakeTimers();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
vi.useRealTimers();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('constructor', () => {
|
||||||
|
it('initializes with the given budget', () => {
|
||||||
|
const budget = new TimeoutBudget(10000);
|
||||||
|
expect(budget.remaining()).toBe(10000);
|
||||||
|
expect(budget.elapsed()).toBe(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('remaining', () => {
|
||||||
|
it('returns full budget immediately after construction', () => {
|
||||||
|
const budget = new TimeoutBudget(5000);
|
||||||
|
expect(budget.remaining()).toBe(5000);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('decreases as time passes', () => {
|
||||||
|
const budget = new TimeoutBudget(5000);
|
||||||
|
|
||||||
|
vi.advanceTimersByTime(1000);
|
||||||
|
expect(budget.remaining()).toBe(4000);
|
||||||
|
|
||||||
|
vi.advanceTimersByTime(2000);
|
||||||
|
expect(budget.remaining()).toBe(2000);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('never returns negative values', () => {
|
||||||
|
const budget = new TimeoutBudget(1000);
|
||||||
|
|
||||||
|
vi.advanceTimersByTime(5000); // Well past the budget
|
||||||
|
expect(budget.remaining()).toBe(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns zero when budget is exactly exhausted', () => {
|
||||||
|
const budget = new TimeoutBudget(1000);
|
||||||
|
|
||||||
|
vi.advanceTimersByTime(1000);
|
||||||
|
expect(budget.remaining()).toBe(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('elapsed', () => {
|
||||||
|
it('returns zero immediately after construction', () => {
|
||||||
|
const budget = new TimeoutBudget(5000);
|
||||||
|
expect(budget.elapsed()).toBe(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('increases as time passes', () => {
|
||||||
|
const budget = new TimeoutBudget(5000);
|
||||||
|
|
||||||
|
vi.advanceTimersByTime(1000);
|
||||||
|
expect(budget.elapsed()).toBe(1000);
|
||||||
|
|
||||||
|
vi.advanceTimersByTime(500);
|
||||||
|
expect(budget.elapsed()).toBe(1500);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('continues increasing past the budget limit', () => {
|
||||||
|
const budget = new TimeoutBudget(1000);
|
||||||
|
|
||||||
|
vi.advanceTimersByTime(2000);
|
||||||
|
expect(budget.elapsed()).toBe(2000);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('getTimeout', () => {
|
||||||
|
it('returns maxMs when plenty of budget remains', () => {
|
||||||
|
const budget = new TimeoutBudget(10000);
|
||||||
|
expect(budget.getTimeout(2000)).toBe(2000);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns maxMs when budget minus reserve is sufficient', () => {
|
||||||
|
const budget = new TimeoutBudget(10000);
|
||||||
|
expect(budget.getTimeout(2000, 5000)).toBe(2000);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('caps timeout to available budget minus reserve', () => {
|
||||||
|
const budget = new TimeoutBudget(10000);
|
||||||
|
vi.advanceTimersByTime(5000); // 5000ms remaining
|
||||||
|
|
||||||
|
// Want 2000ms but reserve 4000ms, only 1000ms available
|
||||||
|
expect(budget.getTimeout(2000, 4000)).toBe(1000);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('caps timeout to remaining budget when no reserve', () => {
|
||||||
|
const budget = new TimeoutBudget(1000);
|
||||||
|
vi.advanceTimersByTime(800); // 200ms remaining
|
||||||
|
|
||||||
|
expect(budget.getTimeout(500)).toBe(200);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns minimum of 100ms even when budget is exhausted', () => {
|
||||||
|
const budget = new TimeoutBudget(1000);
|
||||||
|
vi.advanceTimersByTime(2000); // Budget exhausted
|
||||||
|
|
||||||
|
expect(budget.getTimeout(500)).toBe(100);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns minimum of 100ms when reserve exceeds remaining', () => {
|
||||||
|
const budget = new TimeoutBudget(5000);
|
||||||
|
vi.advanceTimersByTime(4000); // 1000ms remaining
|
||||||
|
|
||||||
|
// Reserve 2000ms but only 1000ms remaining
|
||||||
|
expect(budget.getTimeout(500, 2000)).toBe(100);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('uses default reserve of 0 when not specified', () => {
|
||||||
|
const budget = new TimeoutBudget(1000);
|
||||||
|
vi.advanceTimersByTime(500); // 500ms remaining
|
||||||
|
|
||||||
|
expect(budget.getTimeout(1000)).toBe(500); // Capped to remaining
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('hasTimeFor', () => {
|
||||||
|
it('returns true when enough time remains', () => {
|
||||||
|
const budget = new TimeoutBudget(5000);
|
||||||
|
expect(budget.hasTimeFor(3000)).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns true when exactly enough time remains', () => {
|
||||||
|
const budget = new TimeoutBudget(5000);
|
||||||
|
expect(budget.hasTimeFor(5000)).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns false when not enough time remains', () => {
|
||||||
|
const budget = new TimeoutBudget(5000);
|
||||||
|
expect(budget.hasTimeFor(6000)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('accounts for elapsed time', () => {
|
||||||
|
const budget = new TimeoutBudget(5000);
|
||||||
|
vi.advanceTimersByTime(3000); // 2000ms remaining
|
||||||
|
|
||||||
|
expect(budget.hasTimeFor(2000)).toBe(true);
|
||||||
|
expect(budget.hasTimeFor(3000)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns false when budget is exhausted', () => {
|
||||||
|
const budget = new TimeoutBudget(1000);
|
||||||
|
vi.advanceTimersByTime(2000);
|
||||||
|
|
||||||
|
expect(budget.hasTimeFor(1)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns true for zero required time', () => {
|
||||||
|
const budget = new TimeoutBudget(1000);
|
||||||
|
vi.advanceTimersByTime(2000); // Budget exhausted
|
||||||
|
|
||||||
|
expect(budget.hasTimeFor(0)).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('integration scenarios', () => {
|
||||||
|
it('simulates a typical startup sequence', () => {
|
||||||
|
const budget = new TimeoutBudget(13000); // 13 second budget
|
||||||
|
const BOOTSTRAP_RESERVE = 8000;
|
||||||
|
const MAX_OP_TIMEOUT = 2000;
|
||||||
|
|
||||||
|
// First operation - should get full 2000ms
|
||||||
|
const op1Timeout = budget.getTimeout(MAX_OP_TIMEOUT, BOOTSTRAP_RESERVE);
|
||||||
|
expect(op1Timeout).toBe(2000);
|
||||||
|
|
||||||
|
// Simulate operation taking 500ms
|
||||||
|
vi.advanceTimersByTime(500);
|
||||||
|
|
||||||
|
// Second operation - still have plenty of budget
|
||||||
|
const op2Timeout = budget.getTimeout(MAX_OP_TIMEOUT, BOOTSTRAP_RESERVE);
|
||||||
|
expect(op2Timeout).toBe(2000);
|
||||||
|
|
||||||
|
// Simulate operation taking 1000ms
|
||||||
|
vi.advanceTimersByTime(1000);
|
||||||
|
|
||||||
|
// Third operation
|
||||||
|
const op3Timeout = budget.getTimeout(MAX_OP_TIMEOUT, BOOTSTRAP_RESERVE);
|
||||||
|
expect(op3Timeout).toBe(2000);
|
||||||
|
|
||||||
|
// Simulate slow operation taking 2000ms
|
||||||
|
vi.advanceTimersByTime(2000);
|
||||||
|
|
||||||
|
// Now 3500ms elapsed, 9500ms remaining
|
||||||
|
// After reserve, only 1500ms available - less than max
|
||||||
|
const op4Timeout = budget.getTimeout(MAX_OP_TIMEOUT, BOOTSTRAP_RESERVE);
|
||||||
|
expect(op4Timeout).toBe(1500);
|
||||||
|
|
||||||
|
// Simulate operation completing
|
||||||
|
vi.advanceTimersByTime(1000);
|
||||||
|
|
||||||
|
// Bootstrap phase - use all remaining time
|
||||||
|
const bootstrapTimeout = budget.remaining();
|
||||||
|
expect(bootstrapTimeout).toBe(8500);
|
||||||
|
expect(budget.hasTimeFor(8000)).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('handles worst-case scenario where all operations timeout', () => {
|
||||||
|
const budget = new TimeoutBudget(13000);
|
||||||
|
const BOOTSTRAP_RESERVE = 8000;
|
||||||
|
const MAX_OP_TIMEOUT = 2000;
|
||||||
|
|
||||||
|
// Each operation times out at its limit
|
||||||
|
// Available for operations: 13000 - 8000 = 5000ms
|
||||||
|
|
||||||
|
// Op 1: gets 2000ms, times out
|
||||||
|
budget.getTimeout(MAX_OP_TIMEOUT, BOOTSTRAP_RESERVE);
|
||||||
|
vi.advanceTimersByTime(2000);
|
||||||
|
|
||||||
|
// Op 2: gets 2000ms, times out
|
||||||
|
budget.getTimeout(MAX_OP_TIMEOUT, BOOTSTRAP_RESERVE);
|
||||||
|
vi.advanceTimersByTime(2000);
|
||||||
|
|
||||||
|
// Op 3: only 1000ms available (5000 - 4000), times out
|
||||||
|
const op3Timeout = budget.getTimeout(MAX_OP_TIMEOUT, BOOTSTRAP_RESERVE);
|
||||||
|
expect(op3Timeout).toBe(1000);
|
||||||
|
vi.advanceTimersByTime(1000);
|
||||||
|
|
||||||
|
// Bootstrap: should still have 8000ms
|
||||||
|
expect(budget.remaining()).toBe(8000);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
65
api/src/core/utils/misc/__test__/with-timeout.test.ts
Normal file
65
api/src/core/utils/misc/__test__/with-timeout.test.ts
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
import { describe, expect, it } from 'vitest';
|
||||||
|
|
||||||
|
import { withTimeout } from '@app/core/utils/misc/with-timeout.js';
|
||||||
|
|
||||||
|
describe('withTimeout', () => {
|
||||||
|
it('resolves when promise completes before timeout', async () => {
|
||||||
|
const promise = Promise.resolve('success');
|
||||||
|
const result = await withTimeout(promise, 1000, 'testOp');
|
||||||
|
expect(result).toBe('success');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('resolves with correct value for delayed promise within timeout', async () => {
|
||||||
|
const promise = new Promise<number>((resolve) => setTimeout(() => resolve(42), 50));
|
||||||
|
const result = await withTimeout(promise, 1000, 'testOp');
|
||||||
|
expect(result).toBe(42);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('rejects when promise takes longer than timeout', async () => {
|
||||||
|
const promise = new Promise<string>((resolve) => setTimeout(() => resolve('late'), 500));
|
||||||
|
await expect(withTimeout(promise, 50, 'slowOp')).rejects.toThrow('slowOp timed out after 50ms');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('includes operation name in timeout error message', async () => {
|
||||||
|
const promise = new Promise<void>(() => {}); // Never resolves
|
||||||
|
await expect(withTimeout(promise, 10, 'myCustomOperation')).rejects.toThrow(
|
||||||
|
'myCustomOperation timed out after 10ms'
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('propagates rejection from the original promise', async () => {
|
||||||
|
const promise = Promise.reject(new Error('original error'));
|
||||||
|
await expect(withTimeout(promise, 1000, 'testOp')).rejects.toThrow('original error');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('resolves immediately for already-resolved promises', async () => {
|
||||||
|
const promise = Promise.resolve('immediate');
|
||||||
|
const start = Date.now();
|
||||||
|
const result = await withTimeout(promise, 1000, 'testOp');
|
||||||
|
const elapsed = Date.now() - start;
|
||||||
|
|
||||||
|
expect(result).toBe('immediate');
|
||||||
|
expect(elapsed).toBeLessThan(50); // Should be nearly instant
|
||||||
|
});
|
||||||
|
|
||||||
|
it('works with zero timeout (immediately times out for pending promises)', async () => {
|
||||||
|
const promise = new Promise<void>(() => {}); // Never resolves
|
||||||
|
await expect(withTimeout(promise, 0, 'zeroTimeout')).rejects.toThrow(
|
||||||
|
'zeroTimeout timed out after 0ms'
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('preserves the type of the resolved value', async () => {
|
||||||
|
interface TestType {
|
||||||
|
id: number;
|
||||||
|
name: string;
|
||||||
|
}
|
||||||
|
const testObj: TestType = { id: 1, name: 'test' };
|
||||||
|
const promise = Promise.resolve(testObj);
|
||||||
|
|
||||||
|
const result = await withTimeout(promise, 1000, 'testOp');
|
||||||
|
|
||||||
|
expect(result.id).toBe(1);
|
||||||
|
expect(result.name).toBe('test');
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -2,7 +2,7 @@ import { AppError } from '@app/core/errors/app-error.js';
|
|||||||
import { getters } from '@app/store/index.js';
|
import { getters } from '@app/store/index.js';
|
||||||
|
|
||||||
interface DockerError extends NodeJS.ErrnoException {
|
interface DockerError extends NodeJS.ErrnoException {
|
||||||
address: string;
|
address?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
70
api/src/core/utils/misc/timeout-budget.ts
Normal file
70
api/src/core/utils/misc/timeout-budget.ts
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
/**
|
||||||
|
* Tracks remaining time budget to ensure we don't exceed external timeouts (e.g., PM2's listen_timeout).
|
||||||
|
*
|
||||||
|
* This class helps coordinate multiple async operations by:
|
||||||
|
* - Tracking elapsed time from construction
|
||||||
|
* - Calculating dynamic timeouts based on remaining budget
|
||||||
|
* - Reserving time for critical operations (like server bootstrap)
|
||||||
|
*
|
||||||
|
* @example
|
||||||
|
* ```typescript
|
||||||
|
* const budget = new TimeoutBudget(15000); // 15 second total budget
|
||||||
|
*
|
||||||
|
* // Each operation gets a timeout capped by remaining budget
|
||||||
|
* await withTimeout(loadConfig(), budget.getTimeout(2000, 8000), 'loadConfig');
|
||||||
|
* await withTimeout(loadState(), budget.getTimeout(2000, 8000), 'loadState');
|
||||||
|
*
|
||||||
|
* // Bootstrap gets all remaining time
|
||||||
|
* await withTimeout(bootstrap(), budget.remaining(), 'bootstrap');
|
||||||
|
*
|
||||||
|
* console.log(`Completed in ${budget.elapsed()}ms`);
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
export class TimeoutBudget {
|
||||||
|
private startTime: number;
|
||||||
|
private budgetMs: number;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a new startup budget tracker.
|
||||||
|
* @param budgetMs Total time budget in milliseconds
|
||||||
|
*/
|
||||||
|
constructor(budgetMs: number) {
|
||||||
|
this.startTime = Date.now();
|
||||||
|
this.budgetMs = budgetMs;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns remaining time in milliseconds.
|
||||||
|
* Never returns negative values.
|
||||||
|
*/
|
||||||
|
remaining(): number {
|
||||||
|
return Math.max(0, this.budgetMs - (Date.now() - this.startTime));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns elapsed time in milliseconds since construction.
|
||||||
|
*/
|
||||||
|
elapsed(): number {
|
||||||
|
return Date.now() - this.startTime;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns timeout for an operation, capped by remaining budget.
|
||||||
|
*
|
||||||
|
* @param maxMs Maximum timeout for this operation
|
||||||
|
* @param reserveMs Time to reserve for future operations (e.g., server bootstrap)
|
||||||
|
* @returns Timeout in milliseconds (minimum 100ms to avoid instant failures)
|
||||||
|
*/
|
||||||
|
getTimeout(maxMs: number, reserveMs: number = 0): number {
|
||||||
|
const available = this.remaining() - reserveMs;
|
||||||
|
return Math.max(100, Math.min(maxMs, available));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks if there's enough time remaining for an operation.
|
||||||
|
* @param requiredMs Time required in milliseconds
|
||||||
|
*/
|
||||||
|
hasTimeFor(requiredMs: number): boolean {
|
||||||
|
return this.remaining() >= requiredMs;
|
||||||
|
}
|
||||||
|
}
|
||||||
25
api/src/core/utils/misc/with-timeout.ts
Normal file
25
api/src/core/utils/misc/with-timeout.ts
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
/**
|
||||||
|
* Wraps a promise with a timeout to prevent hangs.
|
||||||
|
* If the operation takes longer than timeoutMs, it rejects with a timeout error.
|
||||||
|
*
|
||||||
|
* @param promise The promise to wrap with a timeout
|
||||||
|
* @param timeoutMs Maximum time in milliseconds before timing out
|
||||||
|
* @param operationName Name of the operation for the error message
|
||||||
|
* @returns The result of the promise if it completes in time
|
||||||
|
* @throws Error if the operation times out
|
||||||
|
*/
|
||||||
|
export const withTimeout = <T>(
|
||||||
|
promise: Promise<T>,
|
||||||
|
timeoutMs: number,
|
||||||
|
operationName: string
|
||||||
|
): Promise<T> => {
|
||||||
|
return Promise.race([
|
||||||
|
promise,
|
||||||
|
new Promise<never>((_, reject) =>
|
||||||
|
setTimeout(
|
||||||
|
() => reject(new Error(`${operationName} timed out after ${timeoutMs}ms`)),
|
||||||
|
timeoutMs
|
||||||
|
)
|
||||||
|
),
|
||||||
|
]);
|
||||||
|
};
|
||||||
19
api/src/core/utils/network.ts
Normal file
19
api/src/core/utils/network.ts
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
import { getters } from '@app/store/index.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the LAN IPv4 address reported by emhttp, if available.
|
||||||
|
*/
|
||||||
|
export function getLanIp(): string {
|
||||||
|
const emhttp = getters.emhttp();
|
||||||
|
const lanFromNetworks = emhttp?.networks?.[0]?.ipaddr?.[0];
|
||||||
|
if (lanFromNetworks) {
|
||||||
|
return lanFromNetworks;
|
||||||
|
}
|
||||||
|
|
||||||
|
const lanFromNginx = emhttp?.nginx?.lanIp;
|
||||||
|
if (lanFromNginx) {
|
||||||
|
return lanFromNginx;
|
||||||
|
}
|
||||||
|
|
||||||
|
return '';
|
||||||
|
}
|
||||||
@@ -111,5 +111,10 @@ export const PATHS_CONFIG_MODULES =
|
|||||||
export const PATHS_LOCAL_SESSION_FILE =
|
export const PATHS_LOCAL_SESSION_FILE =
|
||||||
process.env.PATHS_LOCAL_SESSION_FILE ?? '/var/run/unraid-api/local-session';
|
process.env.PATHS_LOCAL_SESSION_FILE ?? '/var/run/unraid-api/local-session';
|
||||||
|
|
||||||
|
export const PATHS_DOCKER_TEMPLATES = process.env.PATHS_DOCKER_TEMPLATES?.split(',') ?? [
|
||||||
|
'/boot/config/plugins/dockerMan/templates-user',
|
||||||
|
'/boot/config/plugins/dockerMan/templates',
|
||||||
|
];
|
||||||
|
|
||||||
/** feature flag for the upcoming docker release */
|
/** feature flag for the upcoming docker release */
|
||||||
export const ENABLE_NEXT_DOCKER_RELEASE = process.env.ENABLE_NEXT_DOCKER_RELEASE === 'true';
|
export const ENABLE_NEXT_DOCKER_RELEASE = process.env.ENABLE_NEXT_DOCKER_RELEASE === 'true';
|
||||||
|
|||||||
106
api/src/index.ts
106
api/src/index.ts
@@ -15,6 +15,8 @@ import { WebSocket } from 'ws';
|
|||||||
|
|
||||||
import { logger } from '@app/core/log.js';
|
import { logger } from '@app/core/log.js';
|
||||||
import { fileExistsSync } from '@app/core/utils/files/file-exists.js';
|
import { fileExistsSync } from '@app/core/utils/files/file-exists.js';
|
||||||
|
import { TimeoutBudget } from '@app/core/utils/misc/timeout-budget.js';
|
||||||
|
import { withTimeout } from '@app/core/utils/misc/with-timeout.js';
|
||||||
import { getServerIdentifier } from '@app/core/utils/server-identifier.js';
|
import { getServerIdentifier } from '@app/core/utils/server-identifier.js';
|
||||||
import { environment, PATHS_CONFIG_MODULES, PORT } from '@app/environment.js';
|
import { environment, PATHS_CONFIG_MODULES, PORT } from '@app/environment.js';
|
||||||
import * as envVars from '@app/environment.js';
|
import * as envVars from '@app/environment.js';
|
||||||
@@ -28,13 +30,23 @@ import { StateManager } from '@app/store/watch/state-watch.js';
|
|||||||
|
|
||||||
let server: NestFastifyApplication<RawServerDefault> | null = null;
|
let server: NestFastifyApplication<RawServerDefault> | null = null;
|
||||||
|
|
||||||
|
// PM2 listen_timeout is 15 seconds (ecosystem.config.json)
|
||||||
|
// We use 13 seconds as our total budget to ensure our timeout triggers before PM2 kills us
|
||||||
|
const TOTAL_STARTUP_BUDGET_MS = 30_000;
|
||||||
|
// Reserve time for the NestJS bootstrap (the most critical and time-consuming operation)
|
||||||
|
const BOOTSTRAP_RESERVED_MS = 20_000;
|
||||||
|
// Maximum time for any single pre-bootstrap operation
|
||||||
|
const MAX_OPERATION_TIMEOUT_MS = 5_000;
|
||||||
|
|
||||||
const unlinkUnixPort = () => {
|
const unlinkUnixPort = () => {
|
||||||
if (isNaN(parseInt(PORT, 10))) {
|
if (isNaN(parseInt(PORT, 10))) {
|
||||||
if (fileExistsSync(PORT)) unlinkSync(PORT);
|
if (fileExistsSync(PORT)) unlinkSync(PORT);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
export const viteNodeApp = async () => {
|
export const viteNodeApp = async (): Promise<NestFastifyApplication<RawServerDefault>> => {
|
||||||
|
const budget = new TimeoutBudget(TOTAL_STARTUP_BUDGET_MS);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await import('json-bigint-patch');
|
await import('json-bigint-patch');
|
||||||
environment.IS_MAIN_PROCESS = true;
|
environment.IS_MAIN_PROCESS = true;
|
||||||
@@ -42,15 +54,15 @@ export const viteNodeApp = async () => {
|
|||||||
/**------------------------------------------------------------------------
|
/**------------------------------------------------------------------------
|
||||||
* Attaching getServerIdentifier to globalThis
|
* Attaching getServerIdentifier to globalThis
|
||||||
|
|
||||||
* getServerIdentifier is tightly coupled to the deprecated redux store,
|
* getServerIdentifier is tightly coupled to the deprecated redux store,
|
||||||
* which we don't want to share with other packages or plugins.
|
* which we don't want to share with other packages or plugins.
|
||||||
*
|
*
|
||||||
* At the same time, we need to use it in @unraid/shared as a building block,
|
* At the same time, we need to use it in @unraid/shared as a building block,
|
||||||
* where it's used & available outside of NestJS's DI context.
|
* where it's used & available outside of NestJS's DI context.
|
||||||
*
|
*
|
||||||
* Attaching to globalThis is a temporary solution to avoid refactoring
|
* Attaching to globalThis is a temporary solution to avoid refactoring
|
||||||
* config sync & management outside of NestJS's DI context.
|
* config sync & management outside of NestJS's DI context.
|
||||||
*
|
*
|
||||||
* Plugin authors should import getServerIdentifier from @unraid/shared instead,
|
* Plugin authors should import getServerIdentifier from @unraid/shared instead,
|
||||||
* to avoid breaking changes to their code.
|
* to avoid breaking changes to their code.
|
||||||
*------------------------------------------------------------------------**/
|
*------------------------------------------------------------------------**/
|
||||||
@@ -58,7 +70,18 @@ export const viteNodeApp = async () => {
|
|||||||
logger.info('ENV %o', envVars);
|
logger.info('ENV %o', envVars);
|
||||||
logger.info('PATHS %o', store.getState().paths);
|
logger.info('PATHS %o', store.getState().paths);
|
||||||
|
|
||||||
await mkdir(PATHS_CONFIG_MODULES, { recursive: true });
|
// Note: we use logger.info for checkpoints instead of a lower log level
|
||||||
|
// to ensure emission during an unraid server's boot,
|
||||||
|
// where the log level will be set to INFO by default.
|
||||||
|
|
||||||
|
// Create config directory
|
||||||
|
try {
|
||||||
|
await mkdir(PATHS_CONFIG_MODULES, { recursive: true });
|
||||||
|
logger.info('Config directory ready');
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(error, 'Failed to create config directory');
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
|
||||||
const cacheable = new CacheableLookup();
|
const cacheable = new CacheableLookup();
|
||||||
|
|
||||||
@@ -68,29 +91,73 @@ export const viteNodeApp = async () => {
|
|||||||
cacheable.install(https.globalAgent);
|
cacheable.install(https.globalAgent);
|
||||||
|
|
||||||
// Load emhttp state into store
|
// Load emhttp state into store
|
||||||
await store.dispatch(loadStateFiles());
|
try {
|
||||||
|
const timeout = budget.getTimeout(MAX_OPERATION_TIMEOUT_MS, BOOTSTRAP_RESERVED_MS);
|
||||||
|
await withTimeout(store.dispatch(loadStateFiles()), timeout, 'loadStateFiles');
|
||||||
|
logger.info('Emhttp state loaded');
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(error, 'Failed to load emhttp state files');
|
||||||
|
logger.warn('Continuing with default state');
|
||||||
|
}
|
||||||
|
|
||||||
// Load initial registration key into store
|
// Load initial registration key into store
|
||||||
await store.dispatch(loadRegistrationKey());
|
try {
|
||||||
|
const timeout = budget.getTimeout(MAX_OPERATION_TIMEOUT_MS, BOOTSTRAP_RESERVED_MS);
|
||||||
|
await withTimeout(store.dispatch(loadRegistrationKey()), timeout, 'loadRegistrationKey');
|
||||||
|
logger.info('Registration key loaded');
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(error, 'Failed to load registration key');
|
||||||
|
logger.warn('Continuing without registration key');
|
||||||
|
}
|
||||||
|
|
||||||
// Load my dynamix config file into store
|
// Load my dynamix config file into store
|
||||||
loadDynamixConfig();
|
try {
|
||||||
|
loadDynamixConfig();
|
||||||
|
logger.info('Dynamix config loaded');
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(error, 'Failed to load dynamix config');
|
||||||
|
logger.warn('Continuing with default dynamix config');
|
||||||
|
}
|
||||||
|
|
||||||
// Start listening to file updates
|
// Start listening to file updates
|
||||||
StateManager.getInstance();
|
try {
|
||||||
|
StateManager.getInstance();
|
||||||
|
logger.info('State manager initialized');
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(error, 'Failed to initialize state manager');
|
||||||
|
logger.warn('Continuing without state watching');
|
||||||
|
}
|
||||||
|
|
||||||
// Start listening to key file changes
|
// Start listening to key file changes
|
||||||
setupRegistrationKeyWatch();
|
try {
|
||||||
|
setupRegistrationKeyWatch();
|
||||||
|
logger.info('Registration key watch active');
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(error, 'Failed to setup registration key watch');
|
||||||
|
logger.warn('Continuing without key file watching');
|
||||||
|
}
|
||||||
|
|
||||||
// If port is unix socket, delete old socket before starting http server
|
// If port is unix socket, delete old socket before starting http server
|
||||||
unlinkUnixPort();
|
unlinkUnixPort();
|
||||||
|
|
||||||
startMiddlewareListeners();
|
startMiddlewareListeners();
|
||||||
|
|
||||||
// Start webserver
|
// Start webserver - use all remaining budget
|
||||||
const { bootstrapNestServer } = await import('@app/unraid-api/main.js');
|
try {
|
||||||
|
const bootstrapTimeout = budget.remaining();
|
||||||
server = await bootstrapNestServer();
|
if (bootstrapTimeout < 1000) {
|
||||||
|
logger.warn(
|
||||||
|
`Insufficient startup budget remaining (${bootstrapTimeout}ms) for NestJS bootstrap`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
logger.info('Bootstrapping NestJS server (budget: %dms)...', bootstrapTimeout);
|
||||||
|
const { bootstrapNestServer } = await import('@app/unraid-api/main.js');
|
||||||
|
server = await withTimeout(bootstrapNestServer(), bootstrapTimeout, 'bootstrapNestServer');
|
||||||
|
logger.info('Startup complete in %dms', budget.elapsed());
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(error, 'Failed to start NestJS server');
|
||||||
|
throw error; // This is critical - must rethrow to trigger graceful exit
|
||||||
|
}
|
||||||
|
|
||||||
asyncExitHook(
|
asyncExitHook(
|
||||||
async (signal) => {
|
async (signal) => {
|
||||||
@@ -103,8 +170,10 @@ export const viteNodeApp = async () => {
|
|||||||
|
|
||||||
gracefulExit();
|
gracefulExit();
|
||||||
},
|
},
|
||||||
{ wait: 9999 }
|
{ wait: 10_000 }
|
||||||
);
|
);
|
||||||
|
|
||||||
|
return server;
|
||||||
} catch (error: unknown) {
|
} catch (error: unknown) {
|
||||||
if (error instanceof Error) {
|
if (error instanceof Error) {
|
||||||
logger.error(error, 'API-ERROR');
|
logger.error(error, 'API-ERROR');
|
||||||
@@ -115,8 +184,9 @@ export const viteNodeApp = async () => {
|
|||||||
await server?.close?.();
|
await server?.close?.();
|
||||||
}
|
}
|
||||||
shutdownApiEvent();
|
shutdownApiEvent();
|
||||||
// Kill application
|
// Kill application - gracefulExit calls process.exit but TS doesn't know it never returns
|
||||||
gracefulExit(1);
|
gracefulExit(1);
|
||||||
|
throw new Error('Unreachable');
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -20,6 +20,7 @@ const initialState = {
|
|||||||
process.env.PATHS_UNRAID_DATA ?? ('/boot/config/plugins/dynamix.my.servers/data/' as const)
|
process.env.PATHS_UNRAID_DATA ?? ('/boot/config/plugins/dynamix.my.servers/data/' as const)
|
||||||
),
|
),
|
||||||
'docker-autostart': '/var/lib/docker/unraid-autostart' as const,
|
'docker-autostart': '/var/lib/docker/unraid-autostart' as const,
|
||||||
|
'docker-userprefs': '/boot/config/plugins/dockerMan/userprefs.cfg' as const,
|
||||||
'docker-socket': '/var/run/docker.sock' as const,
|
'docker-socket': '/var/run/docker.sock' as const,
|
||||||
'rclone-socket': resolvePath(process.env.PATHS_RCLONE_SOCKET ?? ('/var/run/rclone.socket' as const)),
|
'rclone-socket': resolvePath(process.env.PATHS_RCLONE_SOCKET ?? ('/var/run/rclone.socket' as const)),
|
||||||
'parity-checks': resolvePath(
|
'parity-checks': resolvePath(
|
||||||
|
|||||||
@@ -1,17 +1,51 @@
|
|||||||
import { watch } from 'chokidar';
|
import { watch } from 'chokidar';
|
||||||
|
|
||||||
import { CHOKIDAR_USEPOLLING } from '@app/environment.js';
|
import { keyServerLogger } from '@app/core/log.js';
|
||||||
import { store } from '@app/store/index.js';
|
import { getters, store } from '@app/store/index.js';
|
||||||
|
import { loadSingleStateFile } from '@app/store/modules/emhttp.js';
|
||||||
import { loadRegistrationKey } from '@app/store/modules/registration.js';
|
import { loadRegistrationKey } from '@app/store/modules/registration.js';
|
||||||
|
import { StateFileKey } from '@app/store/types.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reloads var.ini with retry logic to handle timing issues with emhttpd.
|
||||||
|
* When a key file changes, emhttpd needs time to process it and update var.ini.
|
||||||
|
* This function retries loading var.ini until the registration state changes
|
||||||
|
* or max retries are exhausted.
|
||||||
|
*/
|
||||||
|
export const reloadVarIniWithRetry = async (maxRetries = 3): Promise<void> => {
|
||||||
|
const beforeState = getters.emhttp().var?.regTy;
|
||||||
|
|
||||||
|
for (let attempt = 0; attempt < maxRetries; attempt++) {
|
||||||
|
const delay = 500 * Math.pow(2, attempt); // 500ms, 1s, 2s
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, delay));
|
||||||
|
|
||||||
|
await store.dispatch(loadSingleStateFile(StateFileKey.var));
|
||||||
|
|
||||||
|
const afterState = getters.emhttp().var?.regTy;
|
||||||
|
if (beforeState !== afterState) {
|
||||||
|
keyServerLogger.info('Registration state updated: %s -> %s', beforeState, afterState);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
keyServerLogger.debug('Retry %d: var.ini regTy still %s', attempt + 1, afterState);
|
||||||
|
}
|
||||||
|
keyServerLogger.debug('var.ini regTy unchanged after %d retries (may be expected)', maxRetries);
|
||||||
|
};
|
||||||
|
|
||||||
export const setupRegistrationKeyWatch = () => {
|
export const setupRegistrationKeyWatch = () => {
|
||||||
|
// IMPORTANT: /boot/config is on FAT32 flash drive which does NOT support inotify
|
||||||
|
// Must use polling to detect file changes on FAT32 filesystems
|
||||||
watch('/boot/config', {
|
watch('/boot/config', {
|
||||||
persistent: true,
|
persistent: true,
|
||||||
ignoreInitial: true,
|
ignoreInitial: true,
|
||||||
ignored: (path: string) => !path.endsWith('.key'),
|
ignored: (path: string) => !path.endsWith('.key'),
|
||||||
usePolling: CHOKIDAR_USEPOLLING === true,
|
usePolling: true, // Required for FAT32 - inotify doesn't work
|
||||||
}).on('all', async () => {
|
interval: 5000, // Poll every 5 seconds (balance between responsiveness and CPU usage)
|
||||||
// Load updated key into store
|
}).on('all', async (event, path) => {
|
||||||
|
keyServerLogger.info('Key file %s: %s', event, path);
|
||||||
|
|
||||||
await store.dispatch(loadRegistrationKey());
|
await store.dispatch(loadRegistrationKey());
|
||||||
|
|
||||||
|
// Reload var.ini to get updated registration metadata from emhttpd
|
||||||
|
await reloadVarIniWithRetry();
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -6,102 +6,60 @@ import { AuthZGuard } from 'nest-authz';
|
|||||||
import request from 'supertest';
|
import request from 'supertest';
|
||||||
import { afterAll, beforeAll, describe, expect, it, vi } from 'vitest';
|
import { afterAll, beforeAll, describe, expect, it, vi } from 'vitest';
|
||||||
|
|
||||||
import { loadDynamixConfig, store } from '@app/store/index.js';
|
|
||||||
import { loadStateFiles } from '@app/store/modules/emhttp.js';
|
|
||||||
import { AppModule } from '@app/unraid-api/app/app.module.js';
|
import { AppModule } from '@app/unraid-api/app/app.module.js';
|
||||||
import { AuthService } from '@app/unraid-api/auth/auth.service.js';
|
import { AuthService } from '@app/unraid-api/auth/auth.service.js';
|
||||||
import { AuthenticationGuard } from '@app/unraid-api/auth/authentication.guard.js';
|
import { AuthenticationGuard } from '@app/unraid-api/auth/authentication.guard.js';
|
||||||
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
|
|
||||||
|
|
||||||
// Mock external system boundaries that we can't control in tests
|
// Mock the store before importing it
|
||||||
vi.mock('dockerode', () => {
|
vi.mock('@app/store/index.js', () => ({
|
||||||
return {
|
store: {
|
||||||
default: vi.fn().mockImplementation(() => ({
|
dispatch: vi.fn().mockResolvedValue(undefined),
|
||||||
listContainers: vi.fn().mockResolvedValue([
|
subscribe: vi.fn().mockImplementation(() => vi.fn()),
|
||||||
{
|
getState: vi.fn().mockReturnValue({
|
||||||
Id: 'test-container-1',
|
emhttp: {
|
||||||
Names: ['/test-container'],
|
var: {
|
||||||
State: 'running',
|
csrfToken: 'test-csrf-token',
|
||||||
Status: 'Up 5 minutes',
|
|
||||||
Image: 'test:latest',
|
|
||||||
Command: 'node server.js',
|
|
||||||
Created: Date.now() / 1000,
|
|
||||||
Ports: [
|
|
||||||
{
|
|
||||||
IP: '0.0.0.0',
|
|
||||||
PrivatePort: 3000,
|
|
||||||
PublicPort: 3000,
|
|
||||||
Type: 'tcp',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
Labels: {},
|
|
||||||
HostConfig: {
|
|
||||||
NetworkMode: 'bridge',
|
|
||||||
},
|
|
||||||
NetworkSettings: {
|
|
||||||
Networks: {},
|
|
||||||
},
|
|
||||||
Mounts: [],
|
|
||||||
},
|
},
|
||||||
]),
|
},
|
||||||
getContainer: vi.fn().mockImplementation((id) => ({
|
docker: {
|
||||||
inspect: vi.fn().mockResolvedValue({
|
containers: [],
|
||||||
Id: id,
|
autostart: [],
|
||||||
Name: '/test-container',
|
},
|
||||||
State: { Running: true },
|
}),
|
||||||
Config: { Image: 'test:latest' },
|
unsubscribe: vi.fn(),
|
||||||
}),
|
},
|
||||||
})),
|
getters: {
|
||||||
listImages: vi.fn().mockResolvedValue([]),
|
emhttp: vi.fn().mockReturnValue({
|
||||||
listNetworks: vi.fn().mockResolvedValue([]),
|
var: {
|
||||||
listVolumes: vi.fn().mockResolvedValue({ Volumes: [] }),
|
csrfToken: 'test-csrf-token',
|
||||||
})),
|
},
|
||||||
};
|
}),
|
||||||
});
|
docker: vi.fn().mockReturnValue({
|
||||||
|
containers: [],
|
||||||
// Mock external command execution
|
autostart: [],
|
||||||
vi.mock('execa', () => ({
|
}),
|
||||||
execa: vi.fn().mockImplementation((cmd) => {
|
paths: vi.fn().mockReturnValue({
|
||||||
if (cmd === 'whoami') {
|
'docker-autostart': '/tmp/docker-autostart',
|
||||||
return Promise.resolve({ stdout: 'testuser' });
|
'docker-socket': '/var/run/docker.sock',
|
||||||
}
|
'var-run': '/var/run',
|
||||||
return Promise.resolve({ stdout: 'mocked output' });
|
'auth-keys': '/tmp/auth-keys',
|
||||||
}),
|
activationBase: '/tmp/activation',
|
||||||
|
'dynamix-config': ['/tmp/dynamix-config', '/tmp/dynamix-config'],
|
||||||
|
identConfig: '/tmp/ident.cfg',
|
||||||
|
}),
|
||||||
|
dynamix: vi.fn().mockReturnValue({
|
||||||
|
notify: {
|
||||||
|
path: '/tmp/notifications',
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
loadDynamixConfig: vi.fn(),
|
||||||
|
loadStateFiles: vi.fn().mockResolvedValue(undefined),
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// Mock child_process for services that spawn processes
|
// Mock fs-extra for directory operations
|
||||||
vi.mock('node:child_process', () => ({
|
vi.mock('fs-extra', () => ({
|
||||||
spawn: vi.fn(() => ({
|
ensureDirSync: vi.fn().mockReturnValue(undefined),
|
||||||
on: vi.fn(),
|
|
||||||
kill: vi.fn(),
|
|
||||||
stdout: { on: vi.fn() },
|
|
||||||
stderr: { on: vi.fn() },
|
|
||||||
})),
|
|
||||||
}));
|
|
||||||
|
|
||||||
// Mock file system operations that would fail in test environment
|
|
||||||
vi.mock('node:fs/promises', async (importOriginal) => {
|
|
||||||
const actual = await importOriginal<typeof import('fs/promises')>();
|
|
||||||
return {
|
|
||||||
...actual,
|
|
||||||
readFile: vi.fn().mockResolvedValue(''),
|
|
||||||
writeFile: vi.fn().mockResolvedValue(undefined),
|
|
||||||
mkdir: vi.fn().mockResolvedValue(undefined),
|
|
||||||
access: vi.fn().mockResolvedValue(undefined),
|
|
||||||
stat: vi.fn().mockResolvedValue({ isFile: () => true }),
|
|
||||||
readdir: vi.fn().mockResolvedValue([]),
|
|
||||||
rename: vi.fn().mockResolvedValue(undefined),
|
|
||||||
unlink: vi.fn().mockResolvedValue(undefined),
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
// Mock fs module for synchronous operations
|
|
||||||
vi.mock('node:fs', () => ({
|
|
||||||
existsSync: vi.fn().mockReturnValue(false),
|
|
||||||
readFileSync: vi.fn().mockReturnValue(''),
|
|
||||||
writeFileSync: vi.fn(),
|
|
||||||
mkdirSync: vi.fn(),
|
|
||||||
readdirSync: vi.fn().mockReturnValue([]),
|
|
||||||
}));
|
}));
|
||||||
|
|
||||||
describe('AppModule Integration Tests', () => {
|
describe('AppModule Integration Tests', () => {
|
||||||
@@ -109,14 +67,6 @@ describe('AppModule Integration Tests', () => {
|
|||||||
let moduleRef: TestingModule;
|
let moduleRef: TestingModule;
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
// Initialize the dynamix config and state files before creating the module
|
|
||||||
await store.dispatch(loadStateFiles());
|
|
||||||
loadDynamixConfig();
|
|
||||||
|
|
||||||
// Debug: Log the CSRF token from the store
|
|
||||||
const { getters } = await import('@app/store/index.js');
|
|
||||||
console.log('CSRF Token from store:', getters.emhttp().var.csrfToken);
|
|
||||||
|
|
||||||
moduleRef = await Test.createTestingModule({
|
moduleRef = await Test.createTestingModule({
|
||||||
imports: [AppModule],
|
imports: [AppModule],
|
||||||
})
|
})
|
||||||
@@ -149,14 +99,6 @@ describe('AppModule Integration Tests', () => {
|
|||||||
roles: ['admin'],
|
roles: ['admin'],
|
||||||
}),
|
}),
|
||||||
})
|
})
|
||||||
// Override Redis client
|
|
||||||
.overrideProvider('REDIS_CLIENT')
|
|
||||||
.useValue({
|
|
||||||
get: vi.fn(),
|
|
||||||
set: vi.fn(),
|
|
||||||
del: vi.fn(),
|
|
||||||
connect: vi.fn(),
|
|
||||||
})
|
|
||||||
.compile();
|
.compile();
|
||||||
|
|
||||||
app = moduleRef.createNestApplication<NestFastifyApplication>(new FastifyAdapter());
|
app = moduleRef.createNestApplication<NestFastifyApplication>(new FastifyAdapter());
|
||||||
@@ -177,9 +119,9 @@ describe('AppModule Integration Tests', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should resolve core services', () => {
|
it('should resolve core services', () => {
|
||||||
const dockerService = moduleRef.get(DockerService);
|
const authService = moduleRef.get(AuthService);
|
||||||
|
|
||||||
expect(dockerService).toBeDefined();
|
expect(authService).toBeDefined();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -238,18 +180,12 @@ describe('AppModule Integration Tests', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
describe('Service Integration', () => {
|
describe('Service Integration', () => {
|
||||||
it('should have working service-to-service communication', async () => {
|
it('should have working service-to-service communication', () => {
|
||||||
const dockerService = moduleRef.get(DockerService);
|
// Test that the module can resolve its services without errors
|
||||||
|
// This validates that dependency injection is working correctly
|
||||||
// Test that the service can be called and returns expected data structure
|
const authService = moduleRef.get(AuthService);
|
||||||
const containers = await dockerService.getContainers();
|
expect(authService).toBeDefined();
|
||||||
|
expect(typeof authService.validateCookiesWithCsrfToken).toBe('function');
|
||||||
expect(containers).toBeInstanceOf(Array);
|
|
||||||
// The containers might be empty or cached, just verify structure
|
|
||||||
if (containers.length > 0) {
|
|
||||||
expect(containers[0]).toHaveProperty('id');
|
|
||||||
expect(containers[0]).toHaveProperty('names');
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
import { CacheModule } from '@nestjs/cache-manager';
|
import { CacheModule } from '@nestjs/cache-manager';
|
||||||
|
import { ConfigModule } from '@nestjs/config';
|
||||||
import { Test } from '@nestjs/testing';
|
import { Test } from '@nestjs/testing';
|
||||||
|
|
||||||
import { describe, expect, it } from 'vitest';
|
import { describe, expect, it } from 'vitest';
|
||||||
@@ -10,7 +11,11 @@ describe('Module Dependencies Integration', () => {
|
|||||||
let module;
|
let module;
|
||||||
try {
|
try {
|
||||||
module = await Test.createTestingModule({
|
module = await Test.createTestingModule({
|
||||||
imports: [CacheModule.register({ isGlobal: true }), RestModule],
|
imports: [
|
||||||
|
ConfigModule.forRoot({ ignoreEnvFile: true, isGlobal: true }),
|
||||||
|
CacheModule.register({ isGlobal: true }),
|
||||||
|
RestModule,
|
||||||
|
],
|
||||||
}).compile();
|
}).compile();
|
||||||
|
|
||||||
expect(module).toBeDefined();
|
expect(module).toBeDefined();
|
||||||
|
|||||||
@@ -183,6 +183,11 @@ export class ApiKeyService implements OnModuleInit {
|
|||||||
|
|
||||||
async loadAllFromDisk(): Promise<ApiKey[]> {
|
async loadAllFromDisk(): Promise<ApiKey[]> {
|
||||||
const files = await readdir(this.basePath).catch((error) => {
|
const files = await readdir(this.basePath).catch((error) => {
|
||||||
|
if (error.code === 'ENOENT') {
|
||||||
|
// Directory doesn't exist, which means no API keys have been created yet
|
||||||
|
this.logger.error(`API key directory does not exist: ${this.basePath}`);
|
||||||
|
return [];
|
||||||
|
}
|
||||||
this.logger.error(`Failed to read API key directory: ${error}`);
|
this.logger.error(`Failed to read API key directory: ${error}`);
|
||||||
throw new Error('Failed to list API keys');
|
throw new Error('Failed to list API keys');
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -525,6 +525,7 @@ export enum ContainerPortType {
|
|||||||
|
|
||||||
export enum ContainerState {
|
export enum ContainerState {
|
||||||
EXITED = 'EXITED',
|
EXITED = 'EXITED',
|
||||||
|
PAUSED = 'PAUSED',
|
||||||
RUNNING = 'RUNNING'
|
RUNNING = 'RUNNING'
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -678,11 +679,20 @@ export enum DiskSmartStatus {
|
|||||||
|
|
||||||
export type Docker = Node & {
|
export type Docker = Node & {
|
||||||
__typename?: 'Docker';
|
__typename?: 'Docker';
|
||||||
|
container?: Maybe<DockerContainer>;
|
||||||
containerUpdateStatuses: Array<ExplicitStatusItem>;
|
containerUpdateStatuses: Array<ExplicitStatusItem>;
|
||||||
containers: Array<DockerContainer>;
|
containers: Array<DockerContainer>;
|
||||||
id: Scalars['PrefixedID']['output'];
|
id: Scalars['PrefixedID']['output'];
|
||||||
|
/** Access container logs. Requires specifying a target container id through resolver arguments. */
|
||||||
|
logs: DockerContainerLogs;
|
||||||
networks: Array<DockerNetwork>;
|
networks: Array<DockerNetwork>;
|
||||||
organizer: ResolvedOrganizerV1;
|
organizer: ResolvedOrganizerV1;
|
||||||
|
portConflicts: DockerPortConflicts;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
export type DockerContainerArgs = {
|
||||||
|
id: Scalars['PrefixedID']['input'];
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
@@ -691,38 +701,169 @@ export type DockerContainersArgs = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
export type DockerLogsArgs = {
|
||||||
|
id: Scalars['PrefixedID']['input'];
|
||||||
|
since?: InputMaybe<Scalars['DateTime']['input']>;
|
||||||
|
tail?: InputMaybe<Scalars['Int']['input']>;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
export type DockerNetworksArgs = {
|
export type DockerNetworksArgs = {
|
||||||
skipCache?: Scalars['Boolean']['input'];
|
skipCache?: Scalars['Boolean']['input'];
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
export type DockerOrganizerArgs = {
|
||||||
|
skipCache?: Scalars['Boolean']['input'];
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
export type DockerPortConflictsArgs = {
|
||||||
|
skipCache?: Scalars['Boolean']['input'];
|
||||||
|
};
|
||||||
|
|
||||||
|
export type DockerAutostartEntryInput = {
|
||||||
|
/** Whether the container should auto-start */
|
||||||
|
autoStart: Scalars['Boolean']['input'];
|
||||||
|
/** Docker container identifier */
|
||||||
|
id: Scalars['PrefixedID']['input'];
|
||||||
|
/** Number of seconds to wait after starting the container */
|
||||||
|
wait?: InputMaybe<Scalars['Int']['input']>;
|
||||||
|
};
|
||||||
|
|
||||||
export type DockerContainer = Node & {
|
export type DockerContainer = Node & {
|
||||||
__typename?: 'DockerContainer';
|
__typename?: 'DockerContainer';
|
||||||
autoStart: Scalars['Boolean']['output'];
|
autoStart: Scalars['Boolean']['output'];
|
||||||
|
/** Zero-based order in the auto-start list */
|
||||||
|
autoStartOrder?: Maybe<Scalars['Int']['output']>;
|
||||||
|
/** Wait time in seconds applied after start */
|
||||||
|
autoStartWait?: Maybe<Scalars['Int']['output']>;
|
||||||
command: Scalars['String']['output'];
|
command: Scalars['String']['output'];
|
||||||
created: Scalars['Int']['output'];
|
created: Scalars['Int']['output'];
|
||||||
hostConfig?: Maybe<ContainerHostConfig>;
|
hostConfig?: Maybe<ContainerHostConfig>;
|
||||||
|
/** Icon URL */
|
||||||
|
iconUrl?: Maybe<Scalars['String']['output']>;
|
||||||
id: Scalars['PrefixedID']['output'];
|
id: Scalars['PrefixedID']['output'];
|
||||||
image: Scalars['String']['output'];
|
image: Scalars['String']['output'];
|
||||||
imageId: Scalars['String']['output'];
|
imageId: Scalars['String']['output'];
|
||||||
|
/** Whether the container is orphaned (no template found) */
|
||||||
|
isOrphaned: Scalars['Boolean']['output'];
|
||||||
isRebuildReady?: Maybe<Scalars['Boolean']['output']>;
|
isRebuildReady?: Maybe<Scalars['Boolean']['output']>;
|
||||||
isUpdateAvailable?: Maybe<Scalars['Boolean']['output']>;
|
isUpdateAvailable?: Maybe<Scalars['Boolean']['output']>;
|
||||||
labels?: Maybe<Scalars['JSON']['output']>;
|
labels?: Maybe<Scalars['JSON']['output']>;
|
||||||
|
/** List of LAN-accessible host:port values */
|
||||||
|
lanIpPorts?: Maybe<Array<Scalars['String']['output']>>;
|
||||||
mounts?: Maybe<Array<Scalars['JSON']['output']>>;
|
mounts?: Maybe<Array<Scalars['JSON']['output']>>;
|
||||||
names: Array<Scalars['String']['output']>;
|
names: Array<Scalars['String']['output']>;
|
||||||
networkSettings?: Maybe<Scalars['JSON']['output']>;
|
networkSettings?: Maybe<Scalars['JSON']['output']>;
|
||||||
ports: Array<ContainerPort>;
|
ports: Array<ContainerPort>;
|
||||||
|
/** Project/Product homepage URL */
|
||||||
|
projectUrl?: Maybe<Scalars['String']['output']>;
|
||||||
|
/** Registry/Docker Hub URL */
|
||||||
|
registryUrl?: Maybe<Scalars['String']['output']>;
|
||||||
|
/** Shell to use for console access (from template) */
|
||||||
|
shell?: Maybe<Scalars['String']['output']>;
|
||||||
|
/** Size of container logs (in bytes) */
|
||||||
|
sizeLog?: Maybe<Scalars['BigInt']['output']>;
|
||||||
/** Total size of all files in the container (in bytes) */
|
/** Total size of all files in the container (in bytes) */
|
||||||
sizeRootFs?: Maybe<Scalars['BigInt']['output']>;
|
sizeRootFs?: Maybe<Scalars['BigInt']['output']>;
|
||||||
|
/** Size of writable layer (in bytes) */
|
||||||
|
sizeRw?: Maybe<Scalars['BigInt']['output']>;
|
||||||
state: ContainerState;
|
state: ContainerState;
|
||||||
status: Scalars['String']['output'];
|
status: Scalars['String']['output'];
|
||||||
|
/** Support page/thread URL */
|
||||||
|
supportUrl?: Maybe<Scalars['String']['output']>;
|
||||||
|
/** Whether Tailscale is enabled for this container */
|
||||||
|
tailscaleEnabled: Scalars['Boolean']['output'];
|
||||||
|
/** Tailscale status for this container (fetched via docker exec) */
|
||||||
|
tailscaleStatus?: Maybe<TailscaleStatus>;
|
||||||
|
templatePath?: Maybe<Scalars['String']['output']>;
|
||||||
|
/** Port mappings from template (used when container is not running) */
|
||||||
|
templatePorts?: Maybe<Array<ContainerPort>>;
|
||||||
|
/** Resolved WebUI URL from template */
|
||||||
|
webUiUrl?: Maybe<Scalars['String']['output']>;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
export type DockerContainerTailscaleStatusArgs = {
|
||||||
|
forceRefresh?: InputMaybe<Scalars['Boolean']['input']>;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type DockerContainerLogLine = {
|
||||||
|
__typename?: 'DockerContainerLogLine';
|
||||||
|
message: Scalars['String']['output'];
|
||||||
|
timestamp: Scalars['DateTime']['output'];
|
||||||
|
};
|
||||||
|
|
||||||
|
export type DockerContainerLogs = {
|
||||||
|
__typename?: 'DockerContainerLogs';
|
||||||
|
containerId: Scalars['PrefixedID']['output'];
|
||||||
|
/** Cursor that can be passed back through the since argument to continue streaming logs. */
|
||||||
|
cursor?: Maybe<Scalars['DateTime']['output']>;
|
||||||
|
lines: Array<DockerContainerLogLine>;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type DockerContainerPortConflict = {
|
||||||
|
__typename?: 'DockerContainerPortConflict';
|
||||||
|
containers: Array<DockerPortConflictContainer>;
|
||||||
|
privatePort: Scalars['Port']['output'];
|
||||||
|
type: ContainerPortType;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type DockerContainerStats = {
|
||||||
|
__typename?: 'DockerContainerStats';
|
||||||
|
/** Block I/O String (e.g. 100MB / 1GB) */
|
||||||
|
blockIO: Scalars['String']['output'];
|
||||||
|
/** CPU Usage Percentage */
|
||||||
|
cpuPercent: Scalars['Float']['output'];
|
||||||
|
id: Scalars['PrefixedID']['output'];
|
||||||
|
/** Memory Usage Percentage */
|
||||||
|
memPercent: Scalars['Float']['output'];
|
||||||
|
/** Memory Usage String (e.g. 100MB / 1GB) */
|
||||||
|
memUsage: Scalars['String']['output'];
|
||||||
|
/** Network I/O String (e.g. 100MB / 1GB) */
|
||||||
|
netIO: Scalars['String']['output'];
|
||||||
|
};
|
||||||
|
|
||||||
|
export type DockerLanPortConflict = {
|
||||||
|
__typename?: 'DockerLanPortConflict';
|
||||||
|
containers: Array<DockerPortConflictContainer>;
|
||||||
|
lanIpPort: Scalars['String']['output'];
|
||||||
|
publicPort?: Maybe<Scalars['Port']['output']>;
|
||||||
|
type: ContainerPortType;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type DockerMutations = {
|
export type DockerMutations = {
|
||||||
__typename?: 'DockerMutations';
|
__typename?: 'DockerMutations';
|
||||||
|
/** Pause (Suspend) a container */
|
||||||
|
pause: DockerContainer;
|
||||||
|
/** Remove a container */
|
||||||
|
removeContainer: Scalars['Boolean']['output'];
|
||||||
/** Start a container */
|
/** Start a container */
|
||||||
start: DockerContainer;
|
start: DockerContainer;
|
||||||
/** Stop a container */
|
/** Stop a container */
|
||||||
stop: DockerContainer;
|
stop: DockerContainer;
|
||||||
|
/** Unpause (Resume) a container */
|
||||||
|
unpause: DockerContainer;
|
||||||
|
/** Update all containers that have available updates */
|
||||||
|
updateAllContainers: Array<DockerContainer>;
|
||||||
|
/** Update auto-start configuration for Docker containers */
|
||||||
|
updateAutostartConfiguration: Scalars['Boolean']['output'];
|
||||||
|
/** Update a container to the latest image */
|
||||||
|
updateContainer: DockerContainer;
|
||||||
|
/** Update multiple containers to the latest images */
|
||||||
|
updateContainers: Array<DockerContainer>;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
export type DockerMutationsPauseArgs = {
|
||||||
|
id: Scalars['PrefixedID']['input'];
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
export type DockerMutationsRemoveContainerArgs = {
|
||||||
|
id: Scalars['PrefixedID']['input'];
|
||||||
|
withImage?: InputMaybe<Scalars['Boolean']['input']>;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
@@ -735,6 +876,27 @@ export type DockerMutationsStopArgs = {
|
|||||||
id: Scalars['PrefixedID']['input'];
|
id: Scalars['PrefixedID']['input'];
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
export type DockerMutationsUnpauseArgs = {
|
||||||
|
id: Scalars['PrefixedID']['input'];
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
export type DockerMutationsUpdateAutostartConfigurationArgs = {
|
||||||
|
entries: Array<DockerAutostartEntryInput>;
|
||||||
|
persistUserPreferences?: InputMaybe<Scalars['Boolean']['input']>;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
export type DockerMutationsUpdateContainerArgs = {
|
||||||
|
id: Scalars['PrefixedID']['input'];
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
export type DockerMutationsUpdateContainersArgs = {
|
||||||
|
ids: Array<Scalars['PrefixedID']['input']>;
|
||||||
|
};
|
||||||
|
|
||||||
export type DockerNetwork = Node & {
|
export type DockerNetwork = Node & {
|
||||||
__typename?: 'DockerNetwork';
|
__typename?: 'DockerNetwork';
|
||||||
attachable: Scalars['Boolean']['output'];
|
attachable: Scalars['Boolean']['output'];
|
||||||
@@ -754,6 +916,26 @@ export type DockerNetwork = Node & {
|
|||||||
scope: Scalars['String']['output'];
|
scope: Scalars['String']['output'];
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export type DockerPortConflictContainer = {
|
||||||
|
__typename?: 'DockerPortConflictContainer';
|
||||||
|
id: Scalars['PrefixedID']['output'];
|
||||||
|
name: Scalars['String']['output'];
|
||||||
|
};
|
||||||
|
|
||||||
|
export type DockerPortConflicts = {
|
||||||
|
__typename?: 'DockerPortConflicts';
|
||||||
|
containerPorts: Array<DockerContainerPortConflict>;
|
||||||
|
lanPorts: Array<DockerLanPortConflict>;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type DockerTemplateSyncResult = {
|
||||||
|
__typename?: 'DockerTemplateSyncResult';
|
||||||
|
errors: Array<Scalars['String']['output']>;
|
||||||
|
matched: Scalars['Int']['output'];
|
||||||
|
scanned: Scalars['Int']['output'];
|
||||||
|
skipped: Scalars['Int']['output'];
|
||||||
|
};
|
||||||
|
|
||||||
export type DynamicRemoteAccessStatus = {
|
export type DynamicRemoteAccessStatus = {
|
||||||
__typename?: 'DynamicRemoteAccessStatus';
|
__typename?: 'DynamicRemoteAccessStatus';
|
||||||
/** The type of dynamic remote access that is enabled */
|
/** The type of dynamic remote access that is enabled */
|
||||||
@@ -799,6 +981,20 @@ export type FlashBackupStatus = {
|
|||||||
status: Scalars['String']['output'];
|
status: Scalars['String']['output'];
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export type FlatOrganizerEntry = {
|
||||||
|
__typename?: 'FlatOrganizerEntry';
|
||||||
|
childrenIds: Array<Scalars['String']['output']>;
|
||||||
|
depth: Scalars['Float']['output'];
|
||||||
|
hasChildren: Scalars['Boolean']['output'];
|
||||||
|
id: Scalars['String']['output'];
|
||||||
|
meta?: Maybe<DockerContainer>;
|
||||||
|
name: Scalars['String']['output'];
|
||||||
|
parentId?: Maybe<Scalars['String']['output']>;
|
||||||
|
path: Array<Scalars['String']['output']>;
|
||||||
|
position: Scalars['Float']['output'];
|
||||||
|
type: Scalars['String']['output'];
|
||||||
|
};
|
||||||
|
|
||||||
export type FormSchema = {
|
export type FormSchema = {
|
||||||
/** The data schema for the form */
|
/** The data schema for the form */
|
||||||
dataSchema: Scalars['JSON']['output'];
|
dataSchema: Scalars['JSON']['output'];
|
||||||
@@ -1223,6 +1419,7 @@ export type Mutation = {
|
|||||||
connectSignIn: Scalars['Boolean']['output'];
|
connectSignIn: Scalars['Boolean']['output'];
|
||||||
connectSignOut: Scalars['Boolean']['output'];
|
connectSignOut: Scalars['Boolean']['output'];
|
||||||
createDockerFolder: ResolvedOrganizerV1;
|
createDockerFolder: ResolvedOrganizerV1;
|
||||||
|
createDockerFolderWithItems: ResolvedOrganizerV1;
|
||||||
/** Creates a new notification record */
|
/** Creates a new notification record */
|
||||||
createNotification: Notification;
|
createNotification: Notification;
|
||||||
/** Deletes all archived notifications on server. */
|
/** Deletes all archived notifications on server. */
|
||||||
@@ -1234,6 +1431,9 @@ export type Mutation = {
|
|||||||
/** Initiates a flash drive backup using a configured remote. */
|
/** Initiates a flash drive backup using a configured remote. */
|
||||||
initiateFlashBackup: FlashBackupStatus;
|
initiateFlashBackup: FlashBackupStatus;
|
||||||
moveDockerEntriesToFolder: ResolvedOrganizerV1;
|
moveDockerEntriesToFolder: ResolvedOrganizerV1;
|
||||||
|
moveDockerItemsToPosition: ResolvedOrganizerV1;
|
||||||
|
/** Creates a notification if an equivalent unread notification does not already exist. */
|
||||||
|
notifyIfUnique?: Maybe<Notification>;
|
||||||
parityCheck: ParityCheckMutations;
|
parityCheck: ParityCheckMutations;
|
||||||
rclone: RCloneMutations;
|
rclone: RCloneMutations;
|
||||||
/** Reads each notification to recompute & update the overview. */
|
/** Reads each notification to recompute & update the overview. */
|
||||||
@@ -1241,13 +1441,18 @@ export type Mutation = {
|
|||||||
refreshDockerDigests: Scalars['Boolean']['output'];
|
refreshDockerDigests: Scalars['Boolean']['output'];
|
||||||
/** Remove one or more plugins from the API. Returns false if restart was triggered automatically, true if manual restart is required. */
|
/** Remove one or more plugins from the API. Returns false if restart was triggered automatically, true if manual restart is required. */
|
||||||
removePlugin: Scalars['Boolean']['output'];
|
removePlugin: Scalars['Boolean']['output'];
|
||||||
|
renameDockerFolder: ResolvedOrganizerV1;
|
||||||
|
/** Reset Docker template mappings to defaults. Use this to recover from corrupted state. */
|
||||||
|
resetDockerTemplateMappings: Scalars['Boolean']['output'];
|
||||||
setDockerFolderChildren: ResolvedOrganizerV1;
|
setDockerFolderChildren: ResolvedOrganizerV1;
|
||||||
setupRemoteAccess: Scalars['Boolean']['output'];
|
setupRemoteAccess: Scalars['Boolean']['output'];
|
||||||
|
syncDockerTemplatePaths: DockerTemplateSyncResult;
|
||||||
unarchiveAll: NotificationOverview;
|
unarchiveAll: NotificationOverview;
|
||||||
unarchiveNotifications: NotificationOverview;
|
unarchiveNotifications: NotificationOverview;
|
||||||
/** Marks a notification as unread. */
|
/** Marks a notification as unread. */
|
||||||
unreadNotification: Notification;
|
unreadNotification: Notification;
|
||||||
updateApiSettings: ConnectSettingsValues;
|
updateApiSettings: ConnectSettingsValues;
|
||||||
|
updateDockerViewPreferences: ResolvedOrganizerV1;
|
||||||
updateSettings: UpdateSettingsResponse;
|
updateSettings: UpdateSettingsResponse;
|
||||||
vm: VmMutations;
|
vm: VmMutations;
|
||||||
};
|
};
|
||||||
@@ -1290,6 +1495,14 @@ export type MutationCreateDockerFolderArgs = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
export type MutationCreateDockerFolderWithItemsArgs = {
|
||||||
|
name: Scalars['String']['input'];
|
||||||
|
parentId?: InputMaybe<Scalars['String']['input']>;
|
||||||
|
position?: InputMaybe<Scalars['Float']['input']>;
|
||||||
|
sourceEntryIds?: InputMaybe<Array<Scalars['String']['input']>>;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
export type MutationCreateNotificationArgs = {
|
export type MutationCreateNotificationArgs = {
|
||||||
input: NotificationData;
|
input: NotificationData;
|
||||||
};
|
};
|
||||||
@@ -1322,11 +1535,29 @@ export type MutationMoveDockerEntriesToFolderArgs = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
export type MutationMoveDockerItemsToPositionArgs = {
|
||||||
|
destinationFolderId: Scalars['String']['input'];
|
||||||
|
position: Scalars['Float']['input'];
|
||||||
|
sourceEntryIds: Array<Scalars['String']['input']>;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
export type MutationNotifyIfUniqueArgs = {
|
||||||
|
input: NotificationData;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
export type MutationRemovePluginArgs = {
|
export type MutationRemovePluginArgs = {
|
||||||
input: PluginManagementInput;
|
input: PluginManagementInput;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
export type MutationRenameDockerFolderArgs = {
|
||||||
|
folderId: Scalars['String']['input'];
|
||||||
|
newName: Scalars['String']['input'];
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
export type MutationSetDockerFolderChildrenArgs = {
|
export type MutationSetDockerFolderChildrenArgs = {
|
||||||
childrenIds: Array<Scalars['String']['input']>;
|
childrenIds: Array<Scalars['String']['input']>;
|
||||||
folderId?: InputMaybe<Scalars['String']['input']>;
|
folderId?: InputMaybe<Scalars['String']['input']>;
|
||||||
@@ -1358,6 +1589,12 @@ export type MutationUpdateApiSettingsArgs = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
export type MutationUpdateDockerViewPreferencesArgs = {
|
||||||
|
prefs: Scalars['JSON']['input'];
|
||||||
|
viewId?: InputMaybe<Scalars['String']['input']>;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
export type MutationUpdateSettingsArgs = {
|
export type MutationUpdateSettingsArgs = {
|
||||||
input: Scalars['JSON']['input'];
|
input: Scalars['JSON']['input'];
|
||||||
};
|
};
|
||||||
@@ -1433,6 +1670,8 @@ export type Notifications = Node & {
|
|||||||
list: Array<Notification>;
|
list: Array<Notification>;
|
||||||
/** A cached overview of the notifications in the system & their severity. */
|
/** A cached overview of the notifications in the system & their severity. */
|
||||||
overview: NotificationOverview;
|
overview: NotificationOverview;
|
||||||
|
/** Deduplicated list of unread warning and alert notifications, sorted latest first. */
|
||||||
|
warningsAndAlerts: Array<Notification>;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
@@ -1498,22 +1737,6 @@ export type OidcSessionValidation = {
|
|||||||
valid: Scalars['Boolean']['output'];
|
valid: Scalars['Boolean']['output'];
|
||||||
};
|
};
|
||||||
|
|
||||||
export type OrganizerContainerResource = {
|
|
||||||
__typename?: 'OrganizerContainerResource';
|
|
||||||
id: Scalars['String']['output'];
|
|
||||||
meta?: Maybe<DockerContainer>;
|
|
||||||
name: Scalars['String']['output'];
|
|
||||||
type: Scalars['String']['output'];
|
|
||||||
};
|
|
||||||
|
|
||||||
export type OrganizerResource = {
|
|
||||||
__typename?: 'OrganizerResource';
|
|
||||||
id: Scalars['String']['output'];
|
|
||||||
meta?: Maybe<Scalars['JSON']['output']>;
|
|
||||||
name: Scalars['String']['output'];
|
|
||||||
type: Scalars['String']['output'];
|
|
||||||
};
|
|
||||||
|
|
||||||
export type Owner = {
|
export type Owner = {
|
||||||
__typename?: 'Owner';
|
__typename?: 'Owner';
|
||||||
avatar: Scalars['String']['output'];
|
avatar: Scalars['String']['output'];
|
||||||
@@ -1882,16 +2105,6 @@ export type RemoveRoleFromApiKeyInput = {
|
|||||||
role: Role;
|
role: Role;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type ResolvedOrganizerEntry = OrganizerContainerResource | OrganizerResource | ResolvedOrganizerFolder;
|
|
||||||
|
|
||||||
export type ResolvedOrganizerFolder = {
|
|
||||||
__typename?: 'ResolvedOrganizerFolder';
|
|
||||||
children: Array<ResolvedOrganizerEntry>;
|
|
||||||
id: Scalars['String']['output'];
|
|
||||||
name: Scalars['String']['output'];
|
|
||||||
type: Scalars['String']['output'];
|
|
||||||
};
|
|
||||||
|
|
||||||
export type ResolvedOrganizerV1 = {
|
export type ResolvedOrganizerV1 = {
|
||||||
__typename?: 'ResolvedOrganizerV1';
|
__typename?: 'ResolvedOrganizerV1';
|
||||||
version: Scalars['Float']['output'];
|
version: Scalars['Float']['output'];
|
||||||
@@ -1900,10 +2113,11 @@ export type ResolvedOrganizerV1 = {
|
|||||||
|
|
||||||
export type ResolvedOrganizerView = {
|
export type ResolvedOrganizerView = {
|
||||||
__typename?: 'ResolvedOrganizerView';
|
__typename?: 'ResolvedOrganizerView';
|
||||||
|
flatEntries: Array<FlatOrganizerEntry>;
|
||||||
id: Scalars['String']['output'];
|
id: Scalars['String']['output'];
|
||||||
name: Scalars['String']['output'];
|
name: Scalars['String']['output'];
|
||||||
prefs?: Maybe<Scalars['JSON']['output']>;
|
prefs?: Maybe<Scalars['JSON']['output']>;
|
||||||
root: ResolvedOrganizerEntry;
|
rootId: Scalars['String']['output'];
|
||||||
};
|
};
|
||||||
|
|
||||||
/** Available resources for permissions */
|
/** Available resources for permissions */
|
||||||
@@ -2046,9 +2260,11 @@ export type SsoSettings = Node & {
|
|||||||
export type Subscription = {
|
export type Subscription = {
|
||||||
__typename?: 'Subscription';
|
__typename?: 'Subscription';
|
||||||
arraySubscription: UnraidArray;
|
arraySubscription: UnraidArray;
|
||||||
|
dockerContainerStats: DockerContainerStats;
|
||||||
logFile: LogFileContent;
|
logFile: LogFileContent;
|
||||||
notificationAdded: Notification;
|
notificationAdded: Notification;
|
||||||
notificationsOverview: NotificationOverview;
|
notificationsOverview: NotificationOverview;
|
||||||
|
notificationsWarningsAndAlerts: Array<Notification>;
|
||||||
ownerSubscription: Owner;
|
ownerSubscription: Owner;
|
||||||
parityHistorySubscription: ParityCheck;
|
parityHistorySubscription: ParityCheck;
|
||||||
serversSubscription: Server;
|
serversSubscription: Server;
|
||||||
@@ -2062,6 +2278,56 @@ export type SubscriptionLogFileArgs = {
|
|||||||
path: Scalars['String']['input'];
|
path: Scalars['String']['input'];
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/** Tailscale exit node connection status */
|
||||||
|
export type TailscaleExitNodeStatus = {
|
||||||
|
__typename?: 'TailscaleExitNodeStatus';
|
||||||
|
/** Whether the exit node is online */
|
||||||
|
online: Scalars['Boolean']['output'];
|
||||||
|
/** Tailscale IPs of the exit node */
|
||||||
|
tailscaleIps?: Maybe<Array<Scalars['String']['output']>>;
|
||||||
|
};
|
||||||
|
|
||||||
|
/** Tailscale status for a Docker container */
|
||||||
|
export type TailscaleStatus = {
|
||||||
|
__typename?: 'TailscaleStatus';
|
||||||
|
/** Authentication URL if Tailscale needs login */
|
||||||
|
authUrl?: Maybe<Scalars['String']['output']>;
|
||||||
|
/** Tailscale backend state (Running, NeedsLogin, Stopped, etc.) */
|
||||||
|
backendState?: Maybe<Scalars['String']['output']>;
|
||||||
|
/** Actual Tailscale DNS name */
|
||||||
|
dnsName?: Maybe<Scalars['String']['output']>;
|
||||||
|
/** Status of the connected exit node (if using one) */
|
||||||
|
exitNodeStatus?: Maybe<TailscaleExitNodeStatus>;
|
||||||
|
/** Configured Tailscale hostname */
|
||||||
|
hostname?: Maybe<Scalars['String']['output']>;
|
||||||
|
/** Whether this container is an exit node */
|
||||||
|
isExitNode: Scalars['Boolean']['output'];
|
||||||
|
/** Whether the Tailscale key has expired */
|
||||||
|
keyExpired: Scalars['Boolean']['output'];
|
||||||
|
/** Tailscale key expiry date */
|
||||||
|
keyExpiry?: Maybe<Scalars['DateTime']['output']>;
|
||||||
|
/** Days until key expires */
|
||||||
|
keyExpiryDays?: Maybe<Scalars['Int']['output']>;
|
||||||
|
/** Latest available Tailscale version */
|
||||||
|
latestVersion?: Maybe<Scalars['String']['output']>;
|
||||||
|
/** Whether Tailscale is online in the container */
|
||||||
|
online: Scalars['Boolean']['output'];
|
||||||
|
/** Advertised subnet routes */
|
||||||
|
primaryRoutes?: Maybe<Array<Scalars['String']['output']>>;
|
||||||
|
/** DERP relay code */
|
||||||
|
relay?: Maybe<Scalars['String']['output']>;
|
||||||
|
/** DERP relay region name */
|
||||||
|
relayName?: Maybe<Scalars['String']['output']>;
|
||||||
|
/** Tailscale IPv4 and IPv6 addresses */
|
||||||
|
tailscaleIps?: Maybe<Array<Scalars['String']['output']>>;
|
||||||
|
/** Whether a Tailscale update is available */
|
||||||
|
updateAvailable: Scalars['Boolean']['output'];
|
||||||
|
/** Current Tailscale version */
|
||||||
|
version?: Maybe<Scalars['String']['output']>;
|
||||||
|
/** Tailscale Serve/Funnel WebUI URL */
|
||||||
|
webUiUrl?: Maybe<Scalars['String']['output']>;
|
||||||
|
};
|
||||||
|
|
||||||
/** Temperature unit */
|
/** Temperature unit */
|
||||||
export enum Temperature {
|
export enum Temperature {
|
||||||
CELSIUS = 'CELSIUS',
|
CELSIUS = 'CELSIUS',
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ import type { ApiConfig } from '@unraid/shared/services/api-config.js';
|
|||||||
import { ConfigFilePersister } from '@unraid/shared/services/config-file.js';
|
import { ConfigFilePersister } from '@unraid/shared/services/config-file.js';
|
||||||
import { csvStringToArray } from '@unraid/shared/util/data.js';
|
import { csvStringToArray } from '@unraid/shared/util/data.js';
|
||||||
|
|
||||||
|
import { isConnectPluginInstalled } from '@app/connect-plugin-cleanup.js';
|
||||||
import { API_VERSION, PATHS_CONFIG_MODULES } from '@app/environment.js';
|
import { API_VERSION, PATHS_CONFIG_MODULES } from '@app/environment.js';
|
||||||
|
|
||||||
export { type ApiConfig };
|
export { type ApiConfig };
|
||||||
@@ -29,6 +30,13 @@ export const loadApiConfig = async () => {
|
|||||||
const apiHandler = new ApiConfigPersistence(new ConfigService()).getFileHandler();
|
const apiHandler = new ApiConfigPersistence(new ConfigService()).getFileHandler();
|
||||||
|
|
||||||
const diskConfig: Partial<ApiConfig> = await apiHandler.loadConfig();
|
const diskConfig: Partial<ApiConfig> = await apiHandler.loadConfig();
|
||||||
|
// Hack: cleanup stale connect plugin entry if necessary
|
||||||
|
if (!isConnectPluginInstalled()) {
|
||||||
|
diskConfig.plugins = diskConfig.plugins?.filter(
|
||||||
|
(plugin) => plugin !== 'unraid-api-plugin-connect'
|
||||||
|
);
|
||||||
|
await apiHandler.writeConfigFile(diskConfig as ApiConfig);
|
||||||
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
...defaultConfig,
|
...defaultConfig,
|
||||||
|
|||||||
@@ -1,9 +1,11 @@
|
|||||||
import { Module } from '@nestjs/common';
|
import { Module } from '@nestjs/common';
|
||||||
|
|
||||||
|
import { CustomizationMutationsResolver } from '@app/unraid-api/graph/resolvers/customization/customization.mutations.resolver.js';
|
||||||
import { CustomizationResolver } from '@app/unraid-api/graph/resolvers/customization/customization.resolver.js';
|
import { CustomizationResolver } from '@app/unraid-api/graph/resolvers/customization/customization.resolver.js';
|
||||||
import { CustomizationService } from '@app/unraid-api/graph/resolvers/customization/customization.service.js';
|
import { CustomizationService } from '@app/unraid-api/graph/resolvers/customization/customization.service.js';
|
||||||
|
|
||||||
@Module({
|
@Module({
|
||||||
providers: [CustomizationService, CustomizationResolver],
|
providers: [CustomizationService, CustomizationResolver, CustomizationMutationsResolver],
|
||||||
|
exports: [CustomizationService],
|
||||||
})
|
})
|
||||||
export class CustomizationModule {}
|
export class CustomizationModule {}
|
||||||
|
|||||||
@@ -0,0 +1,25 @@
|
|||||||
|
import { Args, ResolveField, Resolver } from '@nestjs/graphql';
|
||||||
|
|
||||||
|
import { AuthAction, Resource } from '@unraid/shared/graphql.model.js';
|
||||||
|
import { UsePermissions } from '@unraid/shared/use-permissions.directive.js';
|
||||||
|
|
||||||
|
import { CustomizationService } from '@app/unraid-api/graph/resolvers/customization/customization.service.js';
|
||||||
|
import { Theme, ThemeName } from '@app/unraid-api/graph/resolvers/customization/theme.model.js';
|
||||||
|
import { CustomizationMutations } from '@app/unraid-api/graph/resolvers/mutation/mutation.model.js';
|
||||||
|
|
||||||
|
@Resolver(() => CustomizationMutations)
|
||||||
|
export class CustomizationMutationsResolver {
|
||||||
|
constructor(private readonly customizationService: CustomizationService) {}
|
||||||
|
|
||||||
|
@ResolveField(() => Theme, { description: 'Update the UI theme (writes dynamix.cfg)' })
|
||||||
|
@UsePermissions({
|
||||||
|
action: AuthAction.UPDATE_ANY,
|
||||||
|
resource: Resource.CUSTOMIZATIONS,
|
||||||
|
})
|
||||||
|
async setTheme(
|
||||||
|
@Args('theme', { type: () => ThemeName, description: 'Theme to apply' })
|
||||||
|
theme: ThemeName
|
||||||
|
): Promise<Theme> {
|
||||||
|
return this.customizationService.setTheme(theme);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -9,7 +9,9 @@ import * as ini from 'ini';
|
|||||||
|
|
||||||
import { emcmd } from '@app/core/utils/clients/emcmd.js';
|
import { emcmd } from '@app/core/utils/clients/emcmd.js';
|
||||||
import { fileExists } from '@app/core/utils/files/file-exists.js';
|
import { fileExists } from '@app/core/utils/files/file-exists.js';
|
||||||
|
import { loadDynamixConfigFromDiskSync } from '@app/store/actions/load-dynamix-config-file.js';
|
||||||
import { getters, store } from '@app/store/index.js';
|
import { getters, store } from '@app/store/index.js';
|
||||||
|
import { updateDynamixConfig } from '@app/store/modules/dynamix.js';
|
||||||
import {
|
import {
|
||||||
ActivationCode,
|
ActivationCode,
|
||||||
PublicPartnerInfo,
|
PublicPartnerInfo,
|
||||||
@@ -466,4 +468,16 @@ export class CustomizationService implements OnModuleInit {
|
|||||||
showHeaderDescription: descriptionShow === 'yes',
|
showHeaderDescription: descriptionShow === 'yes',
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public async setTheme(theme: ThemeName): Promise<Theme> {
|
||||||
|
this.logger.log(`Updating theme to ${theme}`);
|
||||||
|
await this.updateCfgFile(this.configFile, 'display', { theme });
|
||||||
|
|
||||||
|
// Refresh in-memory store so subsequent reads get the new theme without a restart
|
||||||
|
const paths = getters.paths();
|
||||||
|
const updatedConfig = loadDynamixConfigFromDiskSync(paths['dynamix-config']);
|
||||||
|
store.dispatch(updateDynamixConfig(updatedConfig));
|
||||||
|
|
||||||
|
return this.getTheme();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ import { DockerConfigService } from '@app/unraid-api/graph/resolvers/docker/dock
|
|||||||
import { DockerManifestService } from '@app/unraid-api/graph/resolvers/docker/docker-manifest.service.js';
|
import { DockerManifestService } from '@app/unraid-api/graph/resolvers/docker/docker-manifest.service.js';
|
||||||
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
export class ContainerStatusJob implements OnApplicationBootstrap {
|
export class ContainerStatusJob {
|
||||||
private readonly logger = new Logger(ContainerStatusJob.name);
|
private readonly logger = new Logger(ContainerStatusJob.name);
|
||||||
constructor(
|
constructor(
|
||||||
private readonly dockerManifestService: DockerManifestService,
|
private readonly dockerManifestService: DockerManifestService,
|
||||||
@@ -17,8 +17,10 @@ export class ContainerStatusJob implements OnApplicationBootstrap {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Initialize cron job for refreshing the update status for all containers on a user-configurable schedule.
|
* Initialize cron job for refreshing the update status for all containers on a user-configurable schedule.
|
||||||
|
*
|
||||||
|
* Disabled for now to avoid duplication of the webgui's update notifier job (under Notification Settings).
|
||||||
*/
|
*/
|
||||||
onApplicationBootstrap() {
|
_disabled_onApplicationBootstrap() {
|
||||||
if (!this.dockerConfigService.enabled()) return;
|
if (!this.dockerConfigService.enabled()) return;
|
||||||
const cronExpression = this.dockerConfigService.getConfig().updateCheckCronSchedule;
|
const cronExpression = this.dockerConfigService.getConfig().updateCheckCronSchedule;
|
||||||
const cronJob = CronJob.from({
|
const cronJob = CronJob.from({
|
||||||
|
|||||||
@@ -0,0 +1,141 @@
|
|||||||
|
import { Test, TestingModule } from '@nestjs/testing';
|
||||||
|
|
||||||
|
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||||
|
|
||||||
|
import { DockerAutostartService } from '@app/unraid-api/graph/resolvers/docker/docker-autostart.service.js';
|
||||||
|
import { DockerContainer } from '@app/unraid-api/graph/resolvers/docker/docker.model.js';
|
||||||
|
|
||||||
|
// Mock store getters
|
||||||
|
const mockPaths = {
|
||||||
|
'docker-autostart': '/path/to/docker-autostart',
|
||||||
|
'docker-userprefs': '/path/to/docker-userprefs',
|
||||||
|
};
|
||||||
|
|
||||||
|
vi.mock('@app/store/index.js', () => ({
|
||||||
|
getters: {
|
||||||
|
paths: () => mockPaths,
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Mock fs/promises
|
||||||
|
const { readFileMock, writeFileMock, unlinkMock } = vi.hoisted(() => ({
|
||||||
|
readFileMock: vi.fn().mockResolvedValue(''),
|
||||||
|
writeFileMock: vi.fn().mockResolvedValue(undefined),
|
||||||
|
unlinkMock: vi.fn().mockResolvedValue(undefined),
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock('fs/promises', () => ({
|
||||||
|
readFile: readFileMock,
|
||||||
|
writeFile: writeFileMock,
|
||||||
|
unlink: unlinkMock,
|
||||||
|
}));
|
||||||
|
|
||||||
|
describe('DockerAutostartService', () => {
|
||||||
|
let service: DockerAutostartService;
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
readFileMock.mockReset();
|
||||||
|
writeFileMock.mockReset();
|
||||||
|
unlinkMock.mockReset();
|
||||||
|
readFileMock.mockResolvedValue('');
|
||||||
|
|
||||||
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
|
providers: [DockerAutostartService],
|
||||||
|
}).compile();
|
||||||
|
|
||||||
|
service = module.get<DockerAutostartService>(DockerAutostartService);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should be defined', () => {
|
||||||
|
expect(service).toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should parse autostart entries correctly', () => {
|
||||||
|
const content = 'container1 10\ncontainer2\ncontainer3 0';
|
||||||
|
const entries = service.parseAutoStartEntries(content);
|
||||||
|
|
||||||
|
expect(entries).toHaveLength(3);
|
||||||
|
expect(entries[0]).toEqual({ name: 'container1', wait: 10, order: 0 });
|
||||||
|
expect(entries[1]).toEqual({ name: 'container2', wait: 0, order: 1 });
|
||||||
|
expect(entries[2]).toEqual({ name: 'container3', wait: 0, order: 2 });
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should refresh autostart entries', async () => {
|
||||||
|
readFileMock.mockResolvedValue('alpha 5');
|
||||||
|
await service.refreshAutoStartEntries();
|
||||||
|
|
||||||
|
const entry = service.getAutoStartEntry('alpha');
|
||||||
|
expect(entry).toBeDefined();
|
||||||
|
expect(entry?.wait).toBe(5);
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('updateAutostartConfiguration', () => {
|
||||||
|
const mockContainers = [
|
||||||
|
{ id: 'c1', names: ['/alpha'] },
|
||||||
|
{ id: 'c2', names: ['/beta'] },
|
||||||
|
] as DockerContainer[];
|
||||||
|
|
||||||
|
it('should update auto-start configuration and persist waits', async () => {
|
||||||
|
await service.updateAutostartConfiguration(
|
||||||
|
[
|
||||||
|
{ id: 'c1', autoStart: true, wait: 15 },
|
||||||
|
{ id: 'c2', autoStart: true, wait: 0 },
|
||||||
|
],
|
||||||
|
mockContainers,
|
||||||
|
{ persistUserPreferences: true }
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(writeFileMock).toHaveBeenCalledWith(
|
||||||
|
mockPaths['docker-autostart'],
|
||||||
|
'alpha 15\nbeta\n',
|
||||||
|
'utf8'
|
||||||
|
);
|
||||||
|
expect(writeFileMock).toHaveBeenCalledWith(
|
||||||
|
mockPaths['docker-userprefs'],
|
||||||
|
'0="alpha"\n1="beta"\n',
|
||||||
|
'utf8'
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should skip updating user preferences when persist flag is false', async () => {
|
||||||
|
await service.updateAutostartConfiguration(
|
||||||
|
[{ id: 'c1', autoStart: true, wait: 5 }],
|
||||||
|
mockContainers
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(writeFileMock).toHaveBeenCalledWith(
|
||||||
|
mockPaths['docker-autostart'],
|
||||||
|
'alpha 5\n',
|
||||||
|
'utf8'
|
||||||
|
);
|
||||||
|
expect(writeFileMock).not.toHaveBeenCalledWith(
|
||||||
|
mockPaths['docker-userprefs'],
|
||||||
|
expect.any(String),
|
||||||
|
expect.any(String)
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should remove auto-start file when no containers are configured', async () => {
|
||||||
|
await service.updateAutostartConfiguration(
|
||||||
|
[{ id: 'c1', autoStart: false, wait: 30 }],
|
||||||
|
mockContainers,
|
||||||
|
{ persistUserPreferences: true }
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(unlinkMock).toHaveBeenCalledWith(mockPaths['docker-autostart']);
|
||||||
|
expect(writeFileMock).toHaveBeenCalledWith(
|
||||||
|
mockPaths['docker-userprefs'],
|
||||||
|
'0="alpha"\n',
|
||||||
|
'utf8'
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should sanitize autostart wait values', () => {
|
||||||
|
expect(service.sanitizeAutoStartWait(null)).toBe(0);
|
||||||
|
expect(service.sanitizeAutoStartWait(undefined)).toBe(0);
|
||||||
|
expect(service.sanitizeAutoStartWait(10)).toBe(10);
|
||||||
|
expect(service.sanitizeAutoStartWait(-5)).toBe(0);
|
||||||
|
expect(service.sanitizeAutoStartWait(NaN)).toBe(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -0,0 +1,175 @@
|
|||||||
|
import { Injectable, Logger } from '@nestjs/common';
|
||||||
|
import { readFile, unlink, writeFile } from 'fs/promises';
|
||||||
|
|
||||||
|
import Docker from 'dockerode';
|
||||||
|
|
||||||
|
import { getters } from '@app/store/index.js';
|
||||||
|
import {
|
||||||
|
DockerAutostartEntryInput,
|
||||||
|
DockerContainer,
|
||||||
|
} from '@app/unraid-api/graph/resolvers/docker/docker.model.js';
|
||||||
|
|
||||||
|
export interface AutoStartEntry {
|
||||||
|
name: string;
|
||||||
|
wait: number;
|
||||||
|
order: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class DockerAutostartService {
|
||||||
|
private readonly logger = new Logger(DockerAutostartService.name);
|
||||||
|
private autoStartEntries: AutoStartEntry[] = [];
|
||||||
|
private autoStartEntryByName = new Map<string, AutoStartEntry>();
|
||||||
|
|
||||||
|
public getAutoStartEntry(name: string): AutoStartEntry | undefined {
|
||||||
|
return this.autoStartEntryByName.get(name);
|
||||||
|
}
|
||||||
|
|
||||||
|
public setAutoStartEntries(entries: AutoStartEntry[]) {
|
||||||
|
this.autoStartEntries = entries;
|
||||||
|
this.autoStartEntryByName = new Map(entries.map((entry) => [entry.name, entry]));
|
||||||
|
}
|
||||||
|
|
||||||
|
public parseAutoStartEntries(rawContent: string): AutoStartEntry[] {
|
||||||
|
const lines = rawContent
|
||||||
|
.split('\n')
|
||||||
|
.map((line) => line.trim())
|
||||||
|
.filter((line) => line.length > 0);
|
||||||
|
|
||||||
|
const seen = new Set<string>();
|
||||||
|
const entries: AutoStartEntry[] = [];
|
||||||
|
|
||||||
|
lines.forEach((line, index) => {
|
||||||
|
const [name, waitRaw] = line.split(/\s+/);
|
||||||
|
if (!name || seen.has(name)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const parsedWait = Number.parseInt(waitRaw ?? '', 10);
|
||||||
|
const wait = Number.isFinite(parsedWait) && parsedWait > 0 ? parsedWait : 0;
|
||||||
|
entries.push({
|
||||||
|
name,
|
||||||
|
wait,
|
||||||
|
order: index,
|
||||||
|
});
|
||||||
|
seen.add(name);
|
||||||
|
});
|
||||||
|
|
||||||
|
return entries;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async refreshAutoStartEntries(): Promise<void> {
|
||||||
|
const autoStartPath = getters.paths()['docker-autostart'];
|
||||||
|
const raw = await readFile(autoStartPath, 'utf8')
|
||||||
|
.then((file) => file.toString())
|
||||||
|
.catch(() => '');
|
||||||
|
const entries = this.parseAutoStartEntries(raw);
|
||||||
|
this.setAutoStartEntries(entries);
|
||||||
|
}
|
||||||
|
|
||||||
|
public sanitizeAutoStartWait(wait?: number | null): number {
|
||||||
|
if (wait === null || wait === undefined) return 0;
|
||||||
|
const coerced = Number.isInteger(wait) ? wait : Number.parseInt(String(wait), 10);
|
||||||
|
if (!Number.isFinite(coerced) || coerced < 0) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
return coerced;
|
||||||
|
}
|
||||||
|
|
||||||
|
public getContainerPrimaryName(container: Docker.ContainerInfo | DockerContainer): string | null {
|
||||||
|
const names =
|
||||||
|
'Names' in container ? container.Names : 'names' in container ? container.names : undefined;
|
||||||
|
const firstName = names?.[0] ?? '';
|
||||||
|
return firstName ? firstName.replace(/^\//, '') : null;
|
||||||
|
}
|
||||||
|
|
||||||
|
private buildUserPreferenceLines(
|
||||||
|
entries: DockerAutostartEntryInput[],
|
||||||
|
containerById: Map<string, DockerContainer>
|
||||||
|
): string[] {
|
||||||
|
const seenNames = new Set<string>();
|
||||||
|
const lines: string[] = [];
|
||||||
|
|
||||||
|
for (const entry of entries) {
|
||||||
|
const container = containerById.get(entry.id);
|
||||||
|
if (!container) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const primaryName = this.getContainerPrimaryName(container);
|
||||||
|
if (!primaryName || seenNames.has(primaryName)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
lines.push(`${lines.length}="${primaryName}"`);
|
||||||
|
seenNames.add(primaryName);
|
||||||
|
}
|
||||||
|
|
||||||
|
return lines;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Docker auto start file
|
||||||
|
*
|
||||||
|
* @note Doesn't exist if array is offline.
|
||||||
|
* @see https://github.com/limetech/webgui/issues/502#issue-480992547
|
||||||
|
*/
|
||||||
|
public async getAutoStarts(): Promise<string[]> {
|
||||||
|
await this.refreshAutoStartEntries();
|
||||||
|
return this.autoStartEntries.map((entry) => entry.name);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async updateAutostartConfiguration(
|
||||||
|
entries: DockerAutostartEntryInput[],
|
||||||
|
containers: DockerContainer[],
|
||||||
|
options?: { persistUserPreferences?: boolean }
|
||||||
|
): Promise<void> {
|
||||||
|
const containerById = new Map(containers.map((container) => [container.id, container]));
|
||||||
|
const paths = getters.paths();
|
||||||
|
const autoStartPath = paths['docker-autostart'];
|
||||||
|
const userPrefsPath = paths['docker-userprefs'];
|
||||||
|
const persistUserPreferences = Boolean(options?.persistUserPreferences);
|
||||||
|
|
||||||
|
const lines: string[] = [];
|
||||||
|
const seenNames = new Set<string>();
|
||||||
|
|
||||||
|
for (const entry of entries) {
|
||||||
|
if (!entry.autoStart) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const container = containerById.get(entry.id);
|
||||||
|
if (!container) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const primaryName = this.getContainerPrimaryName(container);
|
||||||
|
if (!primaryName || seenNames.has(primaryName)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const wait = this.sanitizeAutoStartWait(entry.wait);
|
||||||
|
lines.push(wait > 0 ? `${primaryName} ${wait}` : primaryName);
|
||||||
|
seenNames.add(primaryName);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (lines.length) {
|
||||||
|
await writeFile(autoStartPath, `${lines.join('\n')}\n`, 'utf8');
|
||||||
|
} else {
|
||||||
|
await unlink(autoStartPath)?.catch((error: NodeJS.ErrnoException) => {
|
||||||
|
if (error.code !== 'ENOENT') {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (persistUserPreferences) {
|
||||||
|
const userPrefsLines = this.buildUserPreferenceLines(entries, containerById);
|
||||||
|
if (userPrefsLines.length) {
|
||||||
|
await writeFile(userPrefsPath, `${userPrefsLines.join('\n')}\n`, 'utf8');
|
||||||
|
} else {
|
||||||
|
await unlink(userPrefsPath)?.catch((error: NodeJS.ErrnoException) => {
|
||||||
|
if (error.code !== 'ENOENT') {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
await this.refreshAutoStartEntries();
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,7 +1,22 @@
|
|||||||
import { Field, ObjectType } from '@nestjs/graphql';
|
import { Field, ObjectType } from '@nestjs/graphql';
|
||||||
|
|
||||||
|
import { IsArray, IsObject, IsOptional, IsString } from 'class-validator';
|
||||||
|
import { GraphQLJSON } from 'graphql-scalars';
|
||||||
|
|
||||||
@ObjectType()
|
@ObjectType()
|
||||||
export class DockerConfig {
|
export class DockerConfig {
|
||||||
@Field(() => String)
|
@Field(() => String)
|
||||||
|
@IsString()
|
||||||
updateCheckCronSchedule!: string;
|
updateCheckCronSchedule!: string;
|
||||||
|
|
||||||
|
@Field(() => GraphQLJSON, { nullable: true })
|
||||||
|
@IsOptional()
|
||||||
|
@IsObject()
|
||||||
|
templateMappings?: Record<string, string | null>;
|
||||||
|
|
||||||
|
@Field(() => [String], { nullable: true })
|
||||||
|
@IsOptional()
|
||||||
|
@IsArray()
|
||||||
|
@IsString({ each: true })
|
||||||
|
skipTemplatePaths?: string[];
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -31,6 +31,8 @@ export class DockerConfigService extends ConfigFilePersister<DockerConfig> {
|
|||||||
defaultConfig(): DockerConfig {
|
defaultConfig(): DockerConfig {
|
||||||
return {
|
return {
|
||||||
updateCheckCronSchedule: CronExpression.EVERY_DAY_AT_6AM,
|
updateCheckCronSchedule: CronExpression.EVERY_DAY_AT_6AM,
|
||||||
|
templateMappings: {},
|
||||||
|
skipTemplatePaths: [],
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -40,6 +42,7 @@ export class DockerConfigService extends ConfigFilePersister<DockerConfig> {
|
|||||||
if (!cronExpression.valid) {
|
if (!cronExpression.valid) {
|
||||||
throw new AppError(`Cron expression not supported: ${dockerConfig.updateCheckCronSchedule}`);
|
throw new AppError(`Cron expression not supported: ${dockerConfig.updateCheckCronSchedule}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
return dockerConfig;
|
return dockerConfig;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,18 +1,31 @@
|
|||||||
import { Logger } from '@nestjs/common';
|
import { Logger } from '@nestjs/common';
|
||||||
import { Mutation, Parent, ResolveField, Resolver } from '@nestjs/graphql';
|
import { Args, Mutation, Parent, ResolveField, Resolver } from '@nestjs/graphql';
|
||||||
|
|
||||||
import { Resource } from '@unraid/shared/graphql.model.js';
|
import { Resource } from '@unraid/shared/graphql.model.js';
|
||||||
import { AuthAction, UsePermissions } from '@unraid/shared/use-permissions.directive.js';
|
import { AuthAction, UsePermissions } from '@unraid/shared/use-permissions.directive.js';
|
||||||
|
|
||||||
import { AppError } from '@app/core/errors/app-error.js';
|
import { AppError } from '@app/core/errors/app-error.js';
|
||||||
|
import { getLanIp } from '@app/core/utils/network.js';
|
||||||
import { UseFeatureFlag } from '@app/unraid-api/decorators/use-feature-flag.decorator.js';
|
import { UseFeatureFlag } from '@app/unraid-api/decorators/use-feature-flag.decorator.js';
|
||||||
import { DockerManifestService } from '@app/unraid-api/graph/resolvers/docker/docker-manifest.service.js';
|
import { DockerManifestService } from '@app/unraid-api/graph/resolvers/docker/docker-manifest.service.js';
|
||||||
import { DockerContainer } from '@app/unraid-api/graph/resolvers/docker/docker.model.js';
|
import { DockerTailscaleService } from '@app/unraid-api/graph/resolvers/docker/docker-tailscale.service.js';
|
||||||
|
import { DockerTemplateScannerService } from '@app/unraid-api/graph/resolvers/docker/docker-template-scanner.service.js';
|
||||||
|
import {
|
||||||
|
ContainerPort,
|
||||||
|
ContainerPortType,
|
||||||
|
ContainerState,
|
||||||
|
DockerContainer,
|
||||||
|
TailscaleStatus,
|
||||||
|
} from '@app/unraid-api/graph/resolvers/docker/docker.model.js';
|
||||||
|
|
||||||
@Resolver(() => DockerContainer)
|
@Resolver(() => DockerContainer)
|
||||||
export class DockerContainerResolver {
|
export class DockerContainerResolver {
|
||||||
private readonly logger = new Logger(DockerContainerResolver.name);
|
private readonly logger = new Logger(DockerContainerResolver.name);
|
||||||
constructor(private readonly dockerManifestService: DockerManifestService) {}
|
constructor(
|
||||||
|
private readonly dockerManifestService: DockerManifestService,
|
||||||
|
private readonly dockerTemplateScannerService: DockerTemplateScannerService,
|
||||||
|
private readonly dockerTailscaleService: DockerTailscaleService
|
||||||
|
) {}
|
||||||
|
|
||||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||||
@UsePermissions({
|
@UsePermissions({
|
||||||
@@ -39,6 +52,150 @@ export class DockerContainerResolver {
|
|||||||
return this.dockerManifestService.isRebuildReady(container.hostConfig?.networkMode);
|
return this.dockerManifestService.isRebuildReady(container.hostConfig?.networkMode);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||||
|
@UsePermissions({
|
||||||
|
action: AuthAction.READ_ANY,
|
||||||
|
resource: Resource.DOCKER,
|
||||||
|
})
|
||||||
|
@ResolveField(() => String, { nullable: true })
|
||||||
|
public async projectUrl(@Parent() container: DockerContainer) {
|
||||||
|
if (!container.templatePath) return null;
|
||||||
|
const details = await this.dockerTemplateScannerService.getTemplateDetails(
|
||||||
|
container.templatePath
|
||||||
|
);
|
||||||
|
return details?.project || null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||||
|
@UsePermissions({
|
||||||
|
action: AuthAction.READ_ANY,
|
||||||
|
resource: Resource.DOCKER,
|
||||||
|
})
|
||||||
|
@ResolveField(() => String, { nullable: true })
|
||||||
|
public async registryUrl(@Parent() container: DockerContainer) {
|
||||||
|
if (!container.templatePath) return null;
|
||||||
|
const details = await this.dockerTemplateScannerService.getTemplateDetails(
|
||||||
|
container.templatePath
|
||||||
|
);
|
||||||
|
return details?.registry || null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||||
|
@UsePermissions({
|
||||||
|
action: AuthAction.READ_ANY,
|
||||||
|
resource: Resource.DOCKER,
|
||||||
|
})
|
||||||
|
@ResolveField(() => String, { nullable: true })
|
||||||
|
public async supportUrl(@Parent() container: DockerContainer) {
|
||||||
|
if (!container.templatePath) return null;
|
||||||
|
const details = await this.dockerTemplateScannerService.getTemplateDetails(
|
||||||
|
container.templatePath
|
||||||
|
);
|
||||||
|
return details?.support || null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||||
|
@UsePermissions({
|
||||||
|
action: AuthAction.READ_ANY,
|
||||||
|
resource: Resource.DOCKER,
|
||||||
|
})
|
||||||
|
@ResolveField(() => String, { nullable: true })
|
||||||
|
public async iconUrl(@Parent() container: DockerContainer) {
|
||||||
|
if (container.labels?.['net.unraid.docker.icon']) {
|
||||||
|
return container.labels['net.unraid.docker.icon'];
|
||||||
|
}
|
||||||
|
if (!container.templatePath) return null;
|
||||||
|
const details = await this.dockerTemplateScannerService.getTemplateDetails(
|
||||||
|
container.templatePath
|
||||||
|
);
|
||||||
|
return details?.icon || null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||||
|
@UsePermissions({
|
||||||
|
action: AuthAction.READ_ANY,
|
||||||
|
resource: Resource.DOCKER,
|
||||||
|
})
|
||||||
|
@ResolveField(() => String, { nullable: true, description: 'Shell to use for console access' })
|
||||||
|
public async shell(@Parent() container: DockerContainer): Promise<string | null> {
|
||||||
|
if (!container.templatePath) return null;
|
||||||
|
const details = await this.dockerTemplateScannerService.getTemplateDetails(
|
||||||
|
container.templatePath
|
||||||
|
);
|
||||||
|
return details?.shell || null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||||
|
@UsePermissions({
|
||||||
|
action: AuthAction.READ_ANY,
|
||||||
|
resource: Resource.DOCKER,
|
||||||
|
})
|
||||||
|
@ResolveField(() => [ContainerPort], {
|
||||||
|
nullable: true,
|
||||||
|
description: 'Port mappings from template (used when container is not running)',
|
||||||
|
})
|
||||||
|
public async templatePorts(@Parent() container: DockerContainer): Promise<ContainerPort[] | null> {
|
||||||
|
if (!container.templatePath) return null;
|
||||||
|
const details = await this.dockerTemplateScannerService.getTemplateDetails(
|
||||||
|
container.templatePath
|
||||||
|
);
|
||||||
|
if (!details?.ports?.length) return null;
|
||||||
|
|
||||||
|
return details.ports.map((port) => ({
|
||||||
|
privatePort: port.privatePort,
|
||||||
|
publicPort: port.publicPort,
|
||||||
|
type: port.type.toUpperCase() === 'UDP' ? ContainerPortType.UDP : ContainerPortType.TCP,
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||||
|
@UsePermissions({
|
||||||
|
action: AuthAction.READ_ANY,
|
||||||
|
resource: Resource.DOCKER,
|
||||||
|
})
|
||||||
|
@ResolveField(() => String, {
|
||||||
|
nullable: true,
|
||||||
|
description: 'Resolved WebUI URL from template',
|
||||||
|
})
|
||||||
|
public async webUiUrl(@Parent() container: DockerContainer): Promise<string | null> {
|
||||||
|
if (!container.templatePath) return null;
|
||||||
|
|
||||||
|
const details = await this.dockerTemplateScannerService.getTemplateDetails(
|
||||||
|
container.templatePath
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!details?.webUi) return null;
|
||||||
|
|
||||||
|
const lanIp = getLanIp();
|
||||||
|
if (!lanIp) return null;
|
||||||
|
|
||||||
|
let resolvedUrl = details.webUi;
|
||||||
|
|
||||||
|
// Replace [IP] placeholder with LAN IP
|
||||||
|
resolvedUrl = resolvedUrl.replace(/\[IP\]/g, lanIp);
|
||||||
|
|
||||||
|
// Replace [PORT:XXXX] placeholder
|
||||||
|
const portMatch = resolvedUrl.match(/\[PORT:(\d+)\]/);
|
||||||
|
if (portMatch) {
|
||||||
|
const templatePort = parseInt(portMatch[1], 10);
|
||||||
|
let resolvedPort = templatePort;
|
||||||
|
|
||||||
|
// Check if this port is mapped to a public port
|
||||||
|
if (container.ports) {
|
||||||
|
for (const port of container.ports) {
|
||||||
|
if (port.privatePort === templatePort && port.publicPort) {
|
||||||
|
resolvedPort = port.publicPort;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
resolvedUrl = resolvedUrl.replace(/\[PORT:\d+\]/g, String(resolvedPort));
|
||||||
|
}
|
||||||
|
|
||||||
|
return resolvedUrl;
|
||||||
|
}
|
||||||
|
|
||||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||||
@UsePermissions({
|
@UsePermissions({
|
||||||
action: AuthAction.UPDATE_ANY,
|
action: AuthAction.UPDATE_ANY,
|
||||||
@@ -48,4 +205,65 @@ export class DockerContainerResolver {
|
|||||||
public async refreshDockerDigests() {
|
public async refreshDockerDigests() {
|
||||||
return this.dockerManifestService.refreshDigests();
|
return this.dockerManifestService.refreshDigests();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||||
|
@UsePermissions({
|
||||||
|
action: AuthAction.READ_ANY,
|
||||||
|
resource: Resource.DOCKER,
|
||||||
|
})
|
||||||
|
@ResolveField(() => Boolean, { description: 'Whether Tailscale is enabled for this container' })
|
||||||
|
public tailscaleEnabled(@Parent() container: DockerContainer): boolean {
|
||||||
|
// Check for Tailscale hostname label (set when hostname is explicitly configured)
|
||||||
|
if (container.labels?.['net.unraid.docker.tailscale.hostname']) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for Tailscale hook mount - look for the source path which is an Unraid system path
|
||||||
|
// The hook is mounted from /usr/local/share/docker/tailscale_container_hook
|
||||||
|
const mounts = container.mounts ?? [];
|
||||||
|
return mounts.some((mount: Record<string, unknown>) => {
|
||||||
|
const source = (mount?.Source ?? mount?.source) as string | undefined;
|
||||||
|
return source?.includes('tailscale_container_hook');
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||||
|
@UsePermissions({
|
||||||
|
action: AuthAction.READ_ANY,
|
||||||
|
resource: Resource.DOCKER,
|
||||||
|
})
|
||||||
|
@ResolveField(() => TailscaleStatus, {
|
||||||
|
nullable: true,
|
||||||
|
description: 'Tailscale status for this container (fetched via docker exec)',
|
||||||
|
})
|
||||||
|
public async tailscaleStatus(
|
||||||
|
@Parent() container: DockerContainer,
|
||||||
|
@Args('forceRefresh', { type: () => Boolean, nullable: true, defaultValue: false })
|
||||||
|
forceRefresh: boolean
|
||||||
|
): Promise<TailscaleStatus | null> {
|
||||||
|
// First check if Tailscale is enabled
|
||||||
|
if (!this.tailscaleEnabled(container)) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const labels = container.labels ?? {};
|
||||||
|
const hostname = labels['net.unraid.docker.tailscale.hostname'];
|
||||||
|
|
||||||
|
if (container.state !== ContainerState.RUNNING) {
|
||||||
|
return {
|
||||||
|
online: false,
|
||||||
|
hostname: hostname || undefined,
|
||||||
|
isExitNode: false,
|
||||||
|
updateAvailable: false,
|
||||||
|
keyExpired: false,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const containerName = container.names[0];
|
||||||
|
if (!containerName) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.dockerTailscaleService.getTailscaleStatus(containerName, labels, forceRefresh);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,8 +1,6 @@
|
|||||||
import { Logger } from '@nestjs/common';
|
|
||||||
import { Test, TestingModule } from '@nestjs/testing';
|
import { Test, TestingModule } from '@nestjs/testing';
|
||||||
import { PassThrough, Readable } from 'stream';
|
import { PassThrough } from 'stream';
|
||||||
|
|
||||||
import Docker from 'dockerode';
|
|
||||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||||
|
|
||||||
// Import pubsub for use in tests
|
// Import pubsub for use in tests
|
||||||
@@ -51,6 +49,14 @@ vi.mock('@app/core/pubsub.js', () => ({
|
|||||||
},
|
},
|
||||||
}));
|
}));
|
||||||
|
|
||||||
|
// Mock the docker client utility - this is what the service actually uses
|
||||||
|
const mockDockerClientInstance = {
|
||||||
|
getEvents: vi.fn(),
|
||||||
|
};
|
||||||
|
vi.mock('./utils/docker-client.js', () => ({
|
||||||
|
getDockerClient: vi.fn(() => mockDockerClientInstance),
|
||||||
|
}));
|
||||||
|
|
||||||
// Mock DockerService
|
// Mock DockerService
|
||||||
vi.mock('./docker.service.js', () => ({
|
vi.mock('./docker.service.js', () => ({
|
||||||
DockerService: vi.fn().mockImplementation(() => ({
|
DockerService: vi.fn().mockImplementation(() => ({
|
||||||
@@ -63,20 +69,13 @@ vi.mock('./docker.service.js', () => ({
|
|||||||
describe('DockerEventService', () => {
|
describe('DockerEventService', () => {
|
||||||
let service: DockerEventService;
|
let service: DockerEventService;
|
||||||
let dockerService: DockerService;
|
let dockerService: DockerService;
|
||||||
let mockDockerClient: Docker;
|
|
||||||
let mockEventStream: PassThrough;
|
let mockEventStream: PassThrough;
|
||||||
let mockLogger: Logger;
|
|
||||||
let module: TestingModule;
|
let module: TestingModule;
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
// Create a mock Docker client
|
|
||||||
mockDockerClient = {
|
|
||||||
getEvents: vi.fn(),
|
|
||||||
} as unknown as Docker;
|
|
||||||
|
|
||||||
// Create a mock Docker service *instance*
|
// Create a mock Docker service *instance*
|
||||||
const mockDockerServiceImpl = {
|
const mockDockerServiceImpl = {
|
||||||
getDockerClient: vi.fn().mockReturnValue(mockDockerClient),
|
getDockerClient: vi.fn(),
|
||||||
clearContainerCache: vi.fn(),
|
clearContainerCache: vi.fn(),
|
||||||
getAppInfo: vi.fn().mockResolvedValue({ info: { apps: { installed: 1, running: 1 } } }),
|
getAppInfo: vi.fn().mockResolvedValue({ info: { apps: { installed: 1, running: 1 } } }),
|
||||||
};
|
};
|
||||||
@@ -85,12 +84,7 @@ describe('DockerEventService', () => {
|
|||||||
mockEventStream = new PassThrough();
|
mockEventStream = new PassThrough();
|
||||||
|
|
||||||
// Set up the mock Docker client to return our mock event stream
|
// Set up the mock Docker client to return our mock event stream
|
||||||
vi.spyOn(mockDockerClient, 'getEvents').mockResolvedValue(
|
mockDockerClientInstance.getEvents = vi.fn().mockResolvedValue(mockEventStream);
|
||||||
mockEventStream as unknown as Readable
|
|
||||||
);
|
|
||||||
|
|
||||||
// Create a mock logger
|
|
||||||
mockLogger = new Logger(DockerEventService.name) as Logger;
|
|
||||||
|
|
||||||
// Use the mock implementation in the testing module
|
// Use the mock implementation in the testing module
|
||||||
module = await Test.createTestingModule({
|
module = await Test.createTestingModule({
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ import Docker from 'dockerode';
|
|||||||
import { pubsub, PUBSUB_CHANNEL } from '@app/core/pubsub.js';
|
import { pubsub, PUBSUB_CHANNEL } from '@app/core/pubsub.js';
|
||||||
import { getters } from '@app/store/index.js';
|
import { getters } from '@app/store/index.js';
|
||||||
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
|
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
|
||||||
|
import { getDockerClient } from '@app/unraid-api/graph/resolvers/docker/utils/docker-client.js';
|
||||||
|
|
||||||
enum DockerEventAction {
|
enum DockerEventAction {
|
||||||
DIE = 'die',
|
DIE = 'die',
|
||||||
@@ -66,7 +67,7 @@ export class DockerEventService implements OnModuleDestroy, OnModuleInit {
|
|||||||
];
|
];
|
||||||
|
|
||||||
constructor(private readonly dockerService: DockerService) {
|
constructor(private readonly dockerService: DockerService) {
|
||||||
this.client = this.dockerService.getDockerClient();
|
this.client = getDockerClient();
|
||||||
}
|
}
|
||||||
|
|
||||||
async onModuleInit() {
|
async onModuleInit() {
|
||||||
|
|||||||
@@ -0,0 +1,143 @@
|
|||||||
|
import { Test, TestingModule } from '@nestjs/testing';
|
||||||
|
|
||||||
|
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||||
|
|
||||||
|
import { AppError } from '@app/core/errors/app-error.js';
|
||||||
|
import { DockerLogService } from '@app/unraid-api/graph/resolvers/docker/docker-log.service.js';
|
||||||
|
|
||||||
|
// Mock dependencies
|
||||||
|
const mockExeca = vi.fn();
|
||||||
|
vi.mock('execa', () => ({
|
||||||
|
execa: (cmd: string, args: string[]) => mockExeca(cmd, args),
|
||||||
|
}));
|
||||||
|
|
||||||
|
const { mockDockerInstance, mockGetContainer, mockContainer } = vi.hoisted(() => {
|
||||||
|
const mockContainer = {
|
||||||
|
inspect: vi.fn(),
|
||||||
|
};
|
||||||
|
const mockGetContainer = vi.fn().mockReturnValue(mockContainer);
|
||||||
|
const mockDockerInstance = {
|
||||||
|
getContainer: mockGetContainer,
|
||||||
|
};
|
||||||
|
return { mockDockerInstance, mockGetContainer, mockContainer };
|
||||||
|
});
|
||||||
|
|
||||||
|
vi.mock('@app/unraid-api/graph/resolvers/docker/utils/docker-client.js', () => ({
|
||||||
|
getDockerClient: vi.fn().mockReturnValue(mockDockerInstance),
|
||||||
|
}));
|
||||||
|
|
||||||
|
const { statMock } = vi.hoisted(() => ({
|
||||||
|
statMock: vi.fn().mockResolvedValue({ size: 0 }),
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock('fs/promises', () => ({
|
||||||
|
stat: statMock,
|
||||||
|
}));
|
||||||
|
|
||||||
|
describe('DockerLogService', () => {
|
||||||
|
let service: DockerLogService;
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
mockExeca.mockReset();
|
||||||
|
mockGetContainer.mockReset();
|
||||||
|
mockGetContainer.mockReturnValue(mockContainer);
|
||||||
|
mockContainer.inspect.mockReset();
|
||||||
|
statMock.mockReset();
|
||||||
|
statMock.mockResolvedValue({ size: 0 });
|
||||||
|
|
||||||
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
|
providers: [DockerLogService],
|
||||||
|
}).compile();
|
||||||
|
|
||||||
|
service = module.get<DockerLogService>(DockerLogService);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should be defined', () => {
|
||||||
|
expect(service).toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('getContainerLogSizes', () => {
|
||||||
|
it('should get container log sizes using dockerode inspect', async () => {
|
||||||
|
mockContainer.inspect.mockResolvedValue({
|
||||||
|
LogPath: '/var/lib/docker/containers/id/id-json.log',
|
||||||
|
});
|
||||||
|
statMock.mockResolvedValue({ size: 1024 });
|
||||||
|
|
||||||
|
const sizes = await service.getContainerLogSizes(['test-container']);
|
||||||
|
|
||||||
|
expect(mockGetContainer).toHaveBeenCalledWith('test-container');
|
||||||
|
expect(mockContainer.inspect).toHaveBeenCalled();
|
||||||
|
expect(statMock).toHaveBeenCalledWith('/var/lib/docker/containers/id/id-json.log');
|
||||||
|
expect(sizes.get('test-container')).toBe(1024);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return 0 for missing log path', async () => {
|
||||||
|
mockContainer.inspect.mockResolvedValue({}); // No LogPath
|
||||||
|
|
||||||
|
const sizes = await service.getContainerLogSizes(['test-container']);
|
||||||
|
expect(sizes.get('test-container')).toBe(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle inspect errors gracefully', async () => {
|
||||||
|
mockContainer.inspect.mockRejectedValue(new Error('Inspect failed'));
|
||||||
|
|
||||||
|
const sizes = await service.getContainerLogSizes(['test-container']);
|
||||||
|
expect(sizes.get('test-container')).toBe(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('getContainerLogs', () => {
|
||||||
|
it('should fetch logs via docker CLI', async () => {
|
||||||
|
mockExeca.mockResolvedValue({ stdout: '2023-01-01T00:00:00Z Log message\n' });
|
||||||
|
|
||||||
|
const result = await service.getContainerLogs('test-id');
|
||||||
|
|
||||||
|
expect(mockExeca).toHaveBeenCalledWith('docker', [
|
||||||
|
'logs',
|
||||||
|
'--timestamps',
|
||||||
|
'--tail',
|
||||||
|
'200',
|
||||||
|
'test-id',
|
||||||
|
]);
|
||||||
|
expect(result.lines).toHaveLength(1);
|
||||||
|
expect(result.lines[0].message).toBe('Log message');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should respect tail option', async () => {
|
||||||
|
mockExeca.mockResolvedValue({ stdout: '' });
|
||||||
|
|
||||||
|
await service.getContainerLogs('test-id', { tail: 50 });
|
||||||
|
|
||||||
|
expect(mockExeca).toHaveBeenCalledWith('docker', [
|
||||||
|
'logs',
|
||||||
|
'--timestamps',
|
||||||
|
'--tail',
|
||||||
|
'50',
|
||||||
|
'test-id',
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should respect since option', async () => {
|
||||||
|
mockExeca.mockResolvedValue({ stdout: '' });
|
||||||
|
const since = new Date('2023-01-01T00:00:00Z');
|
||||||
|
|
||||||
|
await service.getContainerLogs('test-id', { since });
|
||||||
|
|
||||||
|
expect(mockExeca).toHaveBeenCalledWith('docker', [
|
||||||
|
'logs',
|
||||||
|
'--timestamps',
|
||||||
|
'--tail',
|
||||||
|
'200',
|
||||||
|
'--since',
|
||||||
|
since.toISOString(),
|
||||||
|
'test-id',
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should throw AppError on execa failure', async () => {
|
||||||
|
mockExeca.mockRejectedValue(new Error('Docker error'));
|
||||||
|
|
||||||
|
await expect(service.getContainerLogs('test-id')).rejects.toThrow(AppError);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
149
api/src/unraid-api/graph/resolvers/docker/docker-log.service.ts
Normal file
149
api/src/unraid-api/graph/resolvers/docker/docker-log.service.ts
Normal file
@@ -0,0 +1,149 @@
|
|||||||
|
import { Injectable, Logger } from '@nestjs/common';
|
||||||
|
import { stat } from 'fs/promises';
|
||||||
|
|
||||||
|
import type { ExecaError } from 'execa';
|
||||||
|
import { execa } from 'execa';
|
||||||
|
|
||||||
|
import { AppError } from '@app/core/errors/app-error.js';
|
||||||
|
import {
|
||||||
|
DockerContainerLogLine,
|
||||||
|
DockerContainerLogs,
|
||||||
|
} from '@app/unraid-api/graph/resolvers/docker/docker.model.js';
|
||||||
|
import { getDockerClient } from '@app/unraid-api/graph/resolvers/docker/utils/docker-client.js';
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class DockerLogService {
|
||||||
|
private readonly logger = new Logger(DockerLogService.name);
|
||||||
|
private readonly client = getDockerClient();
|
||||||
|
|
||||||
|
private static readonly DEFAULT_LOG_TAIL = 200;
|
||||||
|
private static readonly MAX_LOG_TAIL = 2000;
|
||||||
|
|
||||||
|
public async getContainerLogSizes(containerNames: string[]): Promise<Map<string, number>> {
|
||||||
|
const logSizes = new Map<string, number>();
|
||||||
|
if (!Array.isArray(containerNames) || containerNames.length === 0) {
|
||||||
|
return logSizes;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const rawName of containerNames) {
|
||||||
|
const normalized = (rawName ?? '').replace(/^\//, '');
|
||||||
|
if (!normalized) {
|
||||||
|
logSizes.set(normalized, 0);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const container = this.client.getContainer(normalized);
|
||||||
|
const info = await container.inspect();
|
||||||
|
const logPath = info.LogPath;
|
||||||
|
|
||||||
|
if (!logPath || typeof logPath !== 'string' || !logPath.length) {
|
||||||
|
logSizes.set(normalized, 0);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const stats = await stat(logPath).catch(() => null);
|
||||||
|
logSizes.set(normalized, stats?.size ?? 0);
|
||||||
|
} catch (error) {
|
||||||
|
const message =
|
||||||
|
error instanceof Error ? error.message : String(error ?? 'unknown error');
|
||||||
|
this.logger.debug(
|
||||||
|
`Failed to determine log size for container ${normalized}: ${message}`
|
||||||
|
);
|
||||||
|
logSizes.set(normalized, 0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return logSizes;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getContainerLogs(
|
||||||
|
id: string,
|
||||||
|
options?: { since?: Date | null; tail?: number | null }
|
||||||
|
): Promise<DockerContainerLogs> {
|
||||||
|
const normalizedId = (id ?? '').trim();
|
||||||
|
if (!normalizedId) {
|
||||||
|
throw new AppError('Container id is required to fetch logs.', 400);
|
||||||
|
}
|
||||||
|
|
||||||
|
const tail = this.normalizeLogTail(options?.tail);
|
||||||
|
const args = ['logs', '--timestamps', '--tail', String(tail)];
|
||||||
|
const sinceIso = options?.since instanceof Date ? options.since.toISOString() : null;
|
||||||
|
if (sinceIso) {
|
||||||
|
args.push('--since', sinceIso);
|
||||||
|
}
|
||||||
|
args.push(normalizedId);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const { stdout } = await execa('docker', args);
|
||||||
|
const lines = this.parseDockerLogOutput(stdout);
|
||||||
|
const cursor =
|
||||||
|
lines.length > 0 ? lines[lines.length - 1].timestamp : (options?.since ?? null);
|
||||||
|
|
||||||
|
return {
|
||||||
|
containerId: normalizedId,
|
||||||
|
lines,
|
||||||
|
cursor: cursor ?? undefined,
|
||||||
|
};
|
||||||
|
} catch (error: unknown) {
|
||||||
|
const execaError = error as ExecaError;
|
||||||
|
const stderr = typeof execaError?.stderr === 'string' ? execaError.stderr.trim() : '';
|
||||||
|
const message = stderr || execaError?.message || 'Unknown error';
|
||||||
|
this.logger.error(
|
||||||
|
`Failed to fetch logs for container ${normalizedId}: ${message}`,
|
||||||
|
execaError
|
||||||
|
);
|
||||||
|
throw new AppError(`Failed to fetch logs for container ${normalizedId}.`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private normalizeLogTail(tail?: number | null): number {
|
||||||
|
if (typeof tail !== 'number' || Number.isNaN(tail)) {
|
||||||
|
return DockerLogService.DEFAULT_LOG_TAIL;
|
||||||
|
}
|
||||||
|
const coerced = Math.floor(tail);
|
||||||
|
if (!Number.isFinite(coerced) || coerced <= 0) {
|
||||||
|
return DockerLogService.DEFAULT_LOG_TAIL;
|
||||||
|
}
|
||||||
|
return Math.min(coerced, DockerLogService.MAX_LOG_TAIL);
|
||||||
|
}
|
||||||
|
|
||||||
|
private parseDockerLogOutput(output: string): DockerContainerLogLine[] {
|
||||||
|
if (!output) {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
return output
|
||||||
|
.split(/\r?\n/g)
|
||||||
|
.map((line) => line.trim())
|
||||||
|
.filter((line) => line.length > 0)
|
||||||
|
.map((line) => this.parseDockerLogLine(line))
|
||||||
|
.filter((entry): entry is DockerContainerLogLine => Boolean(entry));
|
||||||
|
}
|
||||||
|
|
||||||
|
private parseDockerLogLine(line: string): DockerContainerLogLine | null {
|
||||||
|
const trimmed = line.trim();
|
||||||
|
if (!trimmed.length) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
const firstSpaceIndex = trimmed.indexOf(' ');
|
||||||
|
if (firstSpaceIndex === -1) {
|
||||||
|
return {
|
||||||
|
timestamp: new Date(),
|
||||||
|
message: trimmed,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
const potentialTimestamp = trimmed.slice(0, firstSpaceIndex);
|
||||||
|
const message = trimmed.slice(firstSpaceIndex + 1);
|
||||||
|
const parsedTimestamp = new Date(potentialTimestamp);
|
||||||
|
if (Number.isNaN(parsedTimestamp.getTime())) {
|
||||||
|
return {
|
||||||
|
timestamp: new Date(),
|
||||||
|
message: trimmed,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
timestamp: parsedTimestamp,
|
||||||
|
message,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -16,6 +16,14 @@ export class DockerManifestService {
|
|||||||
return this.dockerPhpService.refreshDigestsViaPhp();
|
return this.dockerPhpService.refreshDigestsViaPhp();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reads the cached update status file and returns the parsed contents.
|
||||||
|
* Exposed so other services can reuse the parsed data when evaluating many containers.
|
||||||
|
*/
|
||||||
|
async getCachedUpdateStatuses(): Promise<Record<string, CachedStatusEntry>> {
|
||||||
|
return this.dockerPhpService.readCachedUpdateStatus();
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Recomputes local/remote docker container digests and writes them to /var/lib/docker/unraid-update-status.json
|
* Recomputes local/remote docker container digests and writes them to /var/lib/docker/unraid-update-status.json
|
||||||
* @param mutex - Optional mutex to use for the operation. If not provided, a default mutex will be used.
|
* @param mutex - Optional mutex to use for the operation. If not provided, a default mutex will be used.
|
||||||
@@ -41,7 +49,22 @@ export class DockerManifestService {
|
|||||||
cacheData ??= await this.dockerPhpService.readCachedUpdateStatus();
|
cacheData ??= await this.dockerPhpService.readCachedUpdateStatus();
|
||||||
const containerData = cacheData[taggedRef];
|
const containerData = cacheData[taggedRef];
|
||||||
if (!containerData) return null;
|
if (!containerData) return null;
|
||||||
return containerData.status?.toLowerCase() === 'true';
|
|
||||||
|
const normalize = (digest?: string | null) => {
|
||||||
|
const value = digest?.trim().toLowerCase();
|
||||||
|
return value && value !== 'undef' ? value : null;
|
||||||
|
};
|
||||||
|
|
||||||
|
const localDigest = normalize(containerData.local);
|
||||||
|
const remoteDigest = normalize(containerData.remote);
|
||||||
|
if (localDigest && remoteDigest) {
|
||||||
|
return localDigest !== remoteDigest;
|
||||||
|
}
|
||||||
|
|
||||||
|
const status = containerData.status?.toLowerCase();
|
||||||
|
if (status === 'true') return true;
|
||||||
|
if (status === 'false') return false;
|
||||||
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -0,0 +1,89 @@
|
|||||||
|
import { CACHE_MANAGER } from '@nestjs/cache-manager';
|
||||||
|
import { Test, TestingModule } from '@nestjs/testing';
|
||||||
|
|
||||||
|
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||||
|
|
||||||
|
import { DockerNetworkService } from '@app/unraid-api/graph/resolvers/docker/docker-network.service.js';
|
||||||
|
|
||||||
|
const { mockDockerInstance, mockListNetworks } = vi.hoisted(() => {
|
||||||
|
const mockListNetworks = vi.fn();
|
||||||
|
const mockDockerInstance = {
|
||||||
|
listNetworks: mockListNetworks,
|
||||||
|
};
|
||||||
|
return { mockDockerInstance, mockListNetworks };
|
||||||
|
});
|
||||||
|
|
||||||
|
vi.mock('@app/unraid-api/graph/resolvers/docker/utils/docker-client.js', () => ({
|
||||||
|
getDockerClient: vi.fn().mockReturnValue(mockDockerInstance),
|
||||||
|
}));
|
||||||
|
|
||||||
|
const mockCacheManager = {
|
||||||
|
get: vi.fn(),
|
||||||
|
set: vi.fn(),
|
||||||
|
};
|
||||||
|
|
||||||
|
describe('DockerNetworkService', () => {
|
||||||
|
let service: DockerNetworkService;
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
mockListNetworks.mockReset();
|
||||||
|
mockCacheManager.get.mockReset();
|
||||||
|
mockCacheManager.set.mockReset();
|
||||||
|
|
||||||
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
|
providers: [
|
||||||
|
DockerNetworkService,
|
||||||
|
{
|
||||||
|
provide: CACHE_MANAGER,
|
||||||
|
useValue: mockCacheManager,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}).compile();
|
||||||
|
|
||||||
|
service = module.get<DockerNetworkService>(DockerNetworkService);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should be defined', () => {
|
||||||
|
expect(service).toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('getNetworks', () => {
|
||||||
|
it('should return cached networks if available and not skipped', async () => {
|
||||||
|
const cached = [{ id: 'net1', name: 'test-net' }];
|
||||||
|
mockCacheManager.get.mockResolvedValue(cached);
|
||||||
|
|
||||||
|
const result = await service.getNetworks({ skipCache: false });
|
||||||
|
expect(result).toEqual(cached);
|
||||||
|
expect(mockListNetworks).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should fetch networks from docker if cache skipped', async () => {
|
||||||
|
const rawNetworks = [
|
||||||
|
{
|
||||||
|
Id: 'net1',
|
||||||
|
Name: 'test-net',
|
||||||
|
Driver: 'bridge',
|
||||||
|
},
|
||||||
|
];
|
||||||
|
mockListNetworks.mockResolvedValue(rawNetworks);
|
||||||
|
|
||||||
|
const result = await service.getNetworks({ skipCache: true });
|
||||||
|
expect(result).toHaveLength(1);
|
||||||
|
expect(result[0].id).toBe('net1');
|
||||||
|
expect(mockListNetworks).toHaveBeenCalled();
|
||||||
|
expect(mockCacheManager.set).toHaveBeenCalledWith(
|
||||||
|
DockerNetworkService.NETWORK_CACHE_KEY,
|
||||||
|
expect.anything(),
|
||||||
|
expect.anything()
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should fetch networks from docker if cache miss', async () => {
|
||||||
|
mockCacheManager.get.mockResolvedValue(undefined);
|
||||||
|
mockListNetworks.mockResolvedValue([]);
|
||||||
|
|
||||||
|
await service.getNetworks({ skipCache: false });
|
||||||
|
expect(mockListNetworks).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -0,0 +1,69 @@
|
|||||||
|
import { CACHE_MANAGER } from '@nestjs/cache-manager';
|
||||||
|
import { Inject, Injectable, Logger } from '@nestjs/common';
|
||||||
|
|
||||||
|
import { type Cache } from 'cache-manager';
|
||||||
|
|
||||||
|
import { catchHandlers } from '@app/core/utils/misc/catch-handlers.js';
|
||||||
|
import { DockerNetwork } from '@app/unraid-api/graph/resolvers/docker/docker.model.js';
|
||||||
|
import { getDockerClient } from '@app/unraid-api/graph/resolvers/docker/utils/docker-client.js';
|
||||||
|
|
||||||
|
interface NetworkListingOptions {
|
||||||
|
skipCache: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class DockerNetworkService {
|
||||||
|
private readonly logger = new Logger(DockerNetworkService.name);
|
||||||
|
private readonly client = getDockerClient();
|
||||||
|
|
||||||
|
public static readonly NETWORK_CACHE_KEY = 'docker_networks';
|
||||||
|
private static readonly CACHE_TTL_SECONDS = 60;
|
||||||
|
|
||||||
|
constructor(@Inject(CACHE_MANAGER) private cacheManager: Cache) {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get all Docker networks
|
||||||
|
* @returns All the in/active Docker networks on the system.
|
||||||
|
*/
|
||||||
|
public async getNetworks({ skipCache }: NetworkListingOptions): Promise<DockerNetwork[]> {
|
||||||
|
if (!skipCache) {
|
||||||
|
const cachedNetworks = await this.cacheManager.get<DockerNetwork[]>(
|
||||||
|
DockerNetworkService.NETWORK_CACHE_KEY
|
||||||
|
);
|
||||||
|
if (cachedNetworks) {
|
||||||
|
this.logger.debug('Using docker network cache');
|
||||||
|
return cachedNetworks;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.logger.debug('Updating docker network cache');
|
||||||
|
const rawNetworks = await this.client.listNetworks().catch(catchHandlers.docker);
|
||||||
|
const networks = rawNetworks.map(
|
||||||
|
(network) =>
|
||||||
|
({
|
||||||
|
name: network.Name || '',
|
||||||
|
id: network.Id || '',
|
||||||
|
created: network.Created || '',
|
||||||
|
scope: network.Scope || '',
|
||||||
|
driver: network.Driver || '',
|
||||||
|
enableIPv6: network.EnableIPv6 || false,
|
||||||
|
ipam: network.IPAM || {},
|
||||||
|
internal: network.Internal || false,
|
||||||
|
attachable: network.Attachable || false,
|
||||||
|
ingress: network.Ingress || false,
|
||||||
|
configFrom: network.ConfigFrom || {},
|
||||||
|
configOnly: network.ConfigOnly || false,
|
||||||
|
containers: network.Containers || {},
|
||||||
|
options: network.Options || {},
|
||||||
|
labels: network.Labels || {},
|
||||||
|
}) as DockerNetwork
|
||||||
|
);
|
||||||
|
|
||||||
|
await this.cacheManager.set(
|
||||||
|
DockerNetworkService.NETWORK_CACHE_KEY,
|
||||||
|
networks,
|
||||||
|
DockerNetworkService.CACHE_TTL_SECONDS * 1000
|
||||||
|
);
|
||||||
|
return networks;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,84 @@
|
|||||||
|
import { Test, TestingModule } from '@nestjs/testing';
|
||||||
|
|
||||||
|
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||||
|
|
||||||
|
import { DockerPortService } from '@app/unraid-api/graph/resolvers/docker/docker-port.service.js';
|
||||||
|
import {
|
||||||
|
ContainerPortType,
|
||||||
|
DockerContainer,
|
||||||
|
} from '@app/unraid-api/graph/resolvers/docker/docker.model.js';
|
||||||
|
|
||||||
|
vi.mock('@app/core/utils/network.js', () => ({
|
||||||
|
getLanIp: vi.fn().mockReturnValue('192.168.1.100'),
|
||||||
|
}));
|
||||||
|
|
||||||
|
describe('DockerPortService', () => {
|
||||||
|
let service: DockerPortService;
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
|
providers: [DockerPortService],
|
||||||
|
}).compile();
|
||||||
|
|
||||||
|
service = module.get<DockerPortService>(DockerPortService);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should be defined', () => {
|
||||||
|
expect(service).toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('deduplicateContainerPorts', () => {
|
||||||
|
it('should deduplicate ports', () => {
|
||||||
|
const ports = [
|
||||||
|
{ PrivatePort: 80, PublicPort: 80, Type: 'tcp' },
|
||||||
|
{ PrivatePort: 80, PublicPort: 80, Type: 'tcp' },
|
||||||
|
{ PrivatePort: 443, PublicPort: 443, Type: 'tcp' },
|
||||||
|
];
|
||||||
|
// @ts-expect-error - types are loosely mocked
|
||||||
|
const result = service.deduplicateContainerPorts(ports);
|
||||||
|
expect(result).toHaveLength(2);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('calculateConflicts', () => {
|
||||||
|
it('should detect port conflicts', () => {
|
||||||
|
const containers = [
|
||||||
|
{
|
||||||
|
id: 'c1',
|
||||||
|
names: ['/web1'],
|
||||||
|
ports: [{ privatePort: 80, type: ContainerPortType.TCP }],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'c2',
|
||||||
|
names: ['/web2'],
|
||||||
|
ports: [{ privatePort: 80, type: ContainerPortType.TCP }],
|
||||||
|
},
|
||||||
|
] as DockerContainer[];
|
||||||
|
|
||||||
|
const result = service.calculateConflicts(containers);
|
||||||
|
expect(result.containerPorts).toHaveLength(1);
|
||||||
|
expect(result.containerPorts[0].privatePort).toBe(80);
|
||||||
|
expect(result.containerPorts[0].containers).toHaveLength(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should detect lan port conflicts', () => {
|
||||||
|
const containers = [
|
||||||
|
{
|
||||||
|
id: 'c1',
|
||||||
|
names: ['/web1'],
|
||||||
|
ports: [{ publicPort: 8080, type: ContainerPortType.TCP }],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'c2',
|
||||||
|
names: ['/web2'],
|
||||||
|
ports: [{ publicPort: 8080, type: ContainerPortType.TCP }],
|
||||||
|
},
|
||||||
|
] as DockerContainer[];
|
||||||
|
|
||||||
|
const result = service.calculateConflicts(containers);
|
||||||
|
expect(result.lanPorts).toHaveLength(1);
|
||||||
|
expect(result.lanPorts[0].publicPort).toBe(8080);
|
||||||
|
expect(result.lanPorts[0].containers).toHaveLength(2);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
178
api/src/unraid-api/graph/resolvers/docker/docker-port.service.ts
Normal file
178
api/src/unraid-api/graph/resolvers/docker/docker-port.service.ts
Normal file
@@ -0,0 +1,178 @@
|
|||||||
|
import { Injectable } from '@nestjs/common';
|
||||||
|
|
||||||
|
import Docker from 'dockerode';
|
||||||
|
|
||||||
|
import { getLanIp } from '@app/core/utils/network.js';
|
||||||
|
import {
|
||||||
|
ContainerPortType,
|
||||||
|
DockerContainer,
|
||||||
|
DockerContainerPortConflict,
|
||||||
|
DockerLanPortConflict,
|
||||||
|
DockerPortConflictContainer,
|
||||||
|
DockerPortConflicts,
|
||||||
|
} from '@app/unraid-api/graph/resolvers/docker/docker.model.js';
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class DockerPortService {
|
||||||
|
public deduplicateContainerPorts(
|
||||||
|
ports: Docker.ContainerInfo['Ports'] | undefined
|
||||||
|
): Docker.ContainerInfo['Ports'] {
|
||||||
|
if (!Array.isArray(ports)) {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
const seen = new Set<string>();
|
||||||
|
const uniquePorts: Docker.ContainerInfo['Ports'] = [];
|
||||||
|
|
||||||
|
for (const port of ports) {
|
||||||
|
const key = `${port.PrivatePort ?? ''}-${port.PublicPort ?? ''}-${(port.Type ?? '').toLowerCase()}`;
|
||||||
|
if (seen.has(key)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
seen.add(key);
|
||||||
|
uniquePorts.push(port);
|
||||||
|
}
|
||||||
|
|
||||||
|
return uniquePorts;
|
||||||
|
}
|
||||||
|
|
||||||
|
public calculateConflicts(containers: DockerContainer[]): DockerPortConflicts {
|
||||||
|
return {
|
||||||
|
containerPorts: this.buildContainerPortConflicts(containers),
|
||||||
|
lanPorts: this.buildLanPortConflicts(containers),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private buildPortConflictContainerRef(container: DockerContainer): DockerPortConflictContainer {
|
||||||
|
const primaryName = this.getContainerPrimaryName(container);
|
||||||
|
const fallback = container.names?.[0] ?? container.id;
|
||||||
|
const normalized = typeof fallback === 'string' ? fallback.replace(/^\//, '') : container.id;
|
||||||
|
return {
|
||||||
|
id: container.id,
|
||||||
|
name: primaryName || normalized,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private getContainerPrimaryName(container: DockerContainer): string | null {
|
||||||
|
const names = container.names;
|
||||||
|
const firstName = names?.[0] ?? '';
|
||||||
|
return firstName ? firstName.replace(/^\//, '') : null;
|
||||||
|
}
|
||||||
|
|
||||||
|
private buildContainerPortConflicts(containers: DockerContainer[]): DockerContainerPortConflict[] {
|
||||||
|
const groups = new Map<
|
||||||
|
string,
|
||||||
|
{
|
||||||
|
privatePort: number;
|
||||||
|
type: ContainerPortType;
|
||||||
|
containers: DockerContainer[];
|
||||||
|
seen: Set<string>;
|
||||||
|
}
|
||||||
|
>();
|
||||||
|
|
||||||
|
for (const container of containers) {
|
||||||
|
if (!Array.isArray(container.ports)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
for (const port of container.ports) {
|
||||||
|
if (!port || typeof port.privatePort !== 'number') {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const type = port.type ?? ContainerPortType.TCP;
|
||||||
|
const key = `${port.privatePort}/${type}`;
|
||||||
|
let group = groups.get(key);
|
||||||
|
if (!group) {
|
||||||
|
group = {
|
||||||
|
privatePort: port.privatePort,
|
||||||
|
type,
|
||||||
|
containers: [],
|
||||||
|
seen: new Set<string>(),
|
||||||
|
};
|
||||||
|
groups.set(key, group);
|
||||||
|
}
|
||||||
|
if (group.seen.has(container.id)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
group.seen.add(container.id);
|
||||||
|
group.containers.push(container);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return Array.from(groups.values())
|
||||||
|
.filter((group) => group.containers.length > 1)
|
||||||
|
.map((group) => ({
|
||||||
|
privatePort: group.privatePort,
|
||||||
|
type: group.type,
|
||||||
|
containers: group.containers.map((container) =>
|
||||||
|
this.buildPortConflictContainerRef(container)
|
||||||
|
),
|
||||||
|
}))
|
||||||
|
.sort((a, b) => {
|
||||||
|
if (a.privatePort !== b.privatePort) {
|
||||||
|
return a.privatePort - b.privatePort;
|
||||||
|
}
|
||||||
|
return a.type.localeCompare(b.type);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
private buildLanPortConflicts(containers: DockerContainer[]): DockerLanPortConflict[] {
|
||||||
|
const lanIp = getLanIp();
|
||||||
|
const groups = new Map<
|
||||||
|
string,
|
||||||
|
{
|
||||||
|
lanIpPort: string;
|
||||||
|
publicPort: number;
|
||||||
|
type: ContainerPortType;
|
||||||
|
containers: DockerContainer[];
|
||||||
|
seen: Set<string>;
|
||||||
|
}
|
||||||
|
>();
|
||||||
|
|
||||||
|
for (const container of containers) {
|
||||||
|
if (!Array.isArray(container.ports)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
for (const port of container.ports) {
|
||||||
|
if (!port || typeof port.publicPort !== 'number') {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const type = port.type ?? ContainerPortType.TCP;
|
||||||
|
const lanIpPort = lanIp ? `${lanIp}:${port.publicPort}` : `${port.publicPort}`;
|
||||||
|
const key = `${lanIpPort}/${type}`;
|
||||||
|
let group = groups.get(key);
|
||||||
|
if (!group) {
|
||||||
|
group = {
|
||||||
|
lanIpPort,
|
||||||
|
publicPort: port.publicPort,
|
||||||
|
type,
|
||||||
|
containers: [],
|
||||||
|
seen: new Set<string>(),
|
||||||
|
};
|
||||||
|
groups.set(key, group);
|
||||||
|
}
|
||||||
|
if (group.seen.has(container.id)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
group.seen.add(container.id);
|
||||||
|
group.containers.push(container);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return Array.from(groups.values())
|
||||||
|
.filter((group) => group.containers.length > 1)
|
||||||
|
.map((group) => ({
|
||||||
|
lanIpPort: group.lanIpPort,
|
||||||
|
publicPort: group.publicPort,
|
||||||
|
type: group.type,
|
||||||
|
containers: group.containers.map((container) =>
|
||||||
|
this.buildPortConflictContainerRef(container)
|
||||||
|
),
|
||||||
|
}))
|
||||||
|
.sort((a, b) => {
|
||||||
|
if ((a.publicPort ?? 0) !== (b.publicPort ?? 0)) {
|
||||||
|
return (a.publicPort ?? 0) - (b.publicPort ?? 0);
|
||||||
|
}
|
||||||
|
return a.type.localeCompare(b.type);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,117 @@
|
|||||||
|
import { Injectable, Logger, OnModuleDestroy } from '@nestjs/common';
|
||||||
|
import { createInterface } from 'readline';
|
||||||
|
|
||||||
|
import { execa } from 'execa';
|
||||||
|
|
||||||
|
import { pubsub, PUBSUB_CHANNEL } from '@app/core/pubsub.js';
|
||||||
|
import { catchHandlers } from '@app/core/utils/misc/catch-handlers.js';
|
||||||
|
import { DockerContainerStats } from '@app/unraid-api/graph/resolvers/docker/docker.model.js';
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class DockerStatsService implements OnModuleDestroy {
|
||||||
|
private readonly logger = new Logger(DockerStatsService.name);
|
||||||
|
private statsProcess: ReturnType<typeof execa> | null = null;
|
||||||
|
private readonly STATS_FORMAT =
|
||||||
|
'{{.ID}};{{.CPUPerc}};{{.MemUsage}};{{.MemPerc}};{{.NetIO}};{{.BlockIO}}';
|
||||||
|
|
||||||
|
onModuleDestroy() {
|
||||||
|
this.stopStatsStream();
|
||||||
|
}
|
||||||
|
|
||||||
|
public startStatsStream() {
|
||||||
|
if (this.statsProcess) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.logger.log('Starting docker stats stream');
|
||||||
|
|
||||||
|
try {
|
||||||
|
this.statsProcess = execa('docker', ['stats', '--format', this.STATS_FORMAT, '--no-trunc'], {
|
||||||
|
all: true,
|
||||||
|
reject: false, // Don't throw on exit code != 0, handle via parsing/events
|
||||||
|
});
|
||||||
|
|
||||||
|
if (this.statsProcess.stdout) {
|
||||||
|
const rl = createInterface({
|
||||||
|
input: this.statsProcess.stdout,
|
||||||
|
crlfDelay: Infinity,
|
||||||
|
});
|
||||||
|
|
||||||
|
rl.on('line', (line) => {
|
||||||
|
if (!line.trim()) return;
|
||||||
|
this.processStatsLine(line);
|
||||||
|
});
|
||||||
|
|
||||||
|
rl.on('error', (err) => {
|
||||||
|
this.logger.error('Error reading docker stats stream', err);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.statsProcess.stderr) {
|
||||||
|
this.statsProcess.stderr.on('data', (data: Buffer) => {
|
||||||
|
// Log docker stats errors but don't crash
|
||||||
|
this.logger.debug(`Docker stats stderr: ${data.toString()}`);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle process exit
|
||||||
|
this.statsProcess
|
||||||
|
.then((result) => {
|
||||||
|
if (result.failed && !result.signal) {
|
||||||
|
this.logger.error('Docker stats process exited with error', result.shortMessage);
|
||||||
|
this.stopStatsStream();
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.catch((err) => {
|
||||||
|
if (!err.killed) {
|
||||||
|
this.logger.error('Docker stats process ended unexpectedly', err);
|
||||||
|
this.stopStatsStream();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
this.logger.error('Failed to start docker stats', error);
|
||||||
|
catchHandlers.docker(error as Error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public stopStatsStream() {
|
||||||
|
if (this.statsProcess) {
|
||||||
|
this.logger.log('Stopping docker stats stream');
|
||||||
|
this.statsProcess.kill();
|
||||||
|
this.statsProcess = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private processStatsLine(line: string) {
|
||||||
|
try {
|
||||||
|
// format: ID;CPUPerc;MemUsage;MemPerc;NetIO;BlockIO
|
||||||
|
// Example: 123abcde;0.00%;10MiB / 100MiB;10.00%;1kB / 2kB;0B / 0B
|
||||||
|
|
||||||
|
// Remove ANSI escape codes if any (docker stats sometimes includes them)
|
||||||
|
// eslint-disable-next-line no-control-regex
|
||||||
|
const cleanLine = line.replace(/\x1B\[[0-9;]*[mK]/g, '');
|
||||||
|
|
||||||
|
const parts = cleanLine.split(';');
|
||||||
|
if (parts.length < 6) return;
|
||||||
|
|
||||||
|
const [id, cpuPercStr, memUsage, memPercStr, netIO, blockIO] = parts;
|
||||||
|
|
||||||
|
const stats: DockerContainerStats = {
|
||||||
|
id,
|
||||||
|
cpuPercent: this.parsePercentage(cpuPercStr),
|
||||||
|
memUsage,
|
||||||
|
memPercent: this.parsePercentage(memPercStr),
|
||||||
|
netIO,
|
||||||
|
blockIO,
|
||||||
|
};
|
||||||
|
|
||||||
|
pubsub.publish(PUBSUB_CHANNEL.DOCKER_STATS, { dockerContainerStats: stats });
|
||||||
|
} catch (error) {
|
||||||
|
this.logger.debug(`Failed to process stats line: ${line}`, error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private parsePercentage(value: string): number {
|
||||||
|
return parseFloat(value.replace('%', '')) || 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,357 @@
|
|||||||
|
import { CACHE_MANAGER } from '@nestjs/cache-manager';
|
||||||
|
import { Inject, Injectable, Logger } from '@nestjs/common';
|
||||||
|
|
||||||
|
import { type Cache } from 'cache-manager';
|
||||||
|
|
||||||
|
import { TailscaleStatus } from '@app/unraid-api/graph/resolvers/docker/docker.model.js';
|
||||||
|
import { getDockerClient } from '@app/unraid-api/graph/resolvers/docker/utils/docker-client.js';
|
||||||
|
|
||||||
|
interface RawTailscaleStatus {
|
||||||
|
Self: {
|
||||||
|
Online: boolean;
|
||||||
|
DNSName: string;
|
||||||
|
TailscaleIPs?: string[];
|
||||||
|
Relay?: string;
|
||||||
|
PrimaryRoutes?: string[];
|
||||||
|
ExitNodeOption?: boolean;
|
||||||
|
KeyExpiry?: string;
|
||||||
|
};
|
||||||
|
ExitNodeStatus?: {
|
||||||
|
Online: boolean;
|
||||||
|
TailscaleIPs?: string[];
|
||||||
|
};
|
||||||
|
Version: string;
|
||||||
|
BackendState?: string;
|
||||||
|
AuthURL?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface DerpRegion {
|
||||||
|
RegionCode: string;
|
||||||
|
RegionName: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface DerpMap {
|
||||||
|
Regions: Record<string, DerpRegion>;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface TailscaleVersionResponse {
|
||||||
|
TarballsVersion: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class DockerTailscaleService {
|
||||||
|
private readonly logger = new Logger(DockerTailscaleService.name);
|
||||||
|
private readonly docker = getDockerClient();
|
||||||
|
|
||||||
|
private static readonly DERP_MAP_CACHE_KEY = 'tailscale_derp_map';
|
||||||
|
private static readonly VERSION_CACHE_KEY = 'tailscale_latest_version';
|
||||||
|
private static readonly STATUS_CACHE_PREFIX = 'tailscale_status_';
|
||||||
|
private static readonly DERP_MAP_TTL = 86400000; // 24 hours in ms
|
||||||
|
private static readonly VERSION_TTL = 86400000; // 24 hours in ms
|
||||||
|
private static readonly STATUS_TTL = 30000; // 30 seconds in ms
|
||||||
|
|
||||||
|
constructor(@Inject(CACHE_MANAGER) private cacheManager: Cache) {}
|
||||||
|
|
||||||
|
async getTailscaleStatus(
|
||||||
|
containerName: string,
|
||||||
|
labels: Record<string, string>,
|
||||||
|
forceRefresh = false
|
||||||
|
): Promise<TailscaleStatus | null> {
|
||||||
|
const hostname = labels['net.unraid.docker.tailscale.hostname'];
|
||||||
|
const webUiTemplate = labels['net.unraid.docker.tailscale.webui'];
|
||||||
|
|
||||||
|
const cacheKey = `${DockerTailscaleService.STATUS_CACHE_PREFIX}${containerName}`;
|
||||||
|
|
||||||
|
if (forceRefresh) {
|
||||||
|
await this.cacheManager.del(cacheKey);
|
||||||
|
} else {
|
||||||
|
const cached = await this.cacheManager.get<TailscaleStatus>(cacheKey);
|
||||||
|
if (cached) {
|
||||||
|
return cached;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const rawStatus = await this.execTailscaleStatus(containerName);
|
||||||
|
if (!rawStatus) {
|
||||||
|
// Don't cache failures - return without caching so next request retries
|
||||||
|
return {
|
||||||
|
online: false,
|
||||||
|
hostname: hostname || undefined,
|
||||||
|
isExitNode: false,
|
||||||
|
updateAvailable: false,
|
||||||
|
keyExpired: false,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const [derpMap, latestVersion] = await Promise.all([this.getDerpMap(), this.getLatestVersion()]);
|
||||||
|
|
||||||
|
const version = rawStatus.Version?.split('-')[0];
|
||||||
|
const updateAvailable = Boolean(
|
||||||
|
version && latestVersion && this.isVersionLessThan(version, latestVersion)
|
||||||
|
);
|
||||||
|
|
||||||
|
const dnsName = rawStatus.Self.DNSName;
|
||||||
|
|
||||||
|
let relayName: string | undefined;
|
||||||
|
if (rawStatus.Self.Relay && derpMap) {
|
||||||
|
relayName = this.mapRelayToRegion(rawStatus.Self.Relay, derpMap);
|
||||||
|
}
|
||||||
|
|
||||||
|
let keyExpiry: Date | undefined;
|
||||||
|
let keyExpiryDays: number | undefined;
|
||||||
|
let keyExpired = false;
|
||||||
|
|
||||||
|
if (rawStatus.Self.KeyExpiry) {
|
||||||
|
keyExpiry = new Date(rawStatus.Self.KeyExpiry);
|
||||||
|
const now = new Date();
|
||||||
|
const diffMs = keyExpiry.getTime() - now.getTime();
|
||||||
|
keyExpiryDays = Math.floor(diffMs / (1000 * 60 * 60 * 24));
|
||||||
|
keyExpired = diffMs < 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
const webUiUrl = webUiTemplate ? this.resolveWebUiUrl(webUiTemplate, rawStatus) : undefined;
|
||||||
|
|
||||||
|
const status: TailscaleStatus = {
|
||||||
|
online: rawStatus.Self.Online,
|
||||||
|
version,
|
||||||
|
latestVersion: latestVersion ?? undefined,
|
||||||
|
updateAvailable,
|
||||||
|
hostname,
|
||||||
|
dnsName: dnsName || undefined,
|
||||||
|
relay: rawStatus.Self.Relay,
|
||||||
|
relayName,
|
||||||
|
tailscaleIps: rawStatus.Self.TailscaleIPs,
|
||||||
|
primaryRoutes: rawStatus.Self.PrimaryRoutes,
|
||||||
|
isExitNode: Boolean(rawStatus.Self.ExitNodeOption),
|
||||||
|
exitNodeStatus: rawStatus.ExitNodeStatus
|
||||||
|
? {
|
||||||
|
online: rawStatus.ExitNodeStatus.Online,
|
||||||
|
tailscaleIps: rawStatus.ExitNodeStatus.TailscaleIPs,
|
||||||
|
}
|
||||||
|
: undefined,
|
||||||
|
webUiUrl,
|
||||||
|
keyExpiry,
|
||||||
|
keyExpiryDays,
|
||||||
|
keyExpired,
|
||||||
|
backendState: rawStatus.BackendState,
|
||||||
|
authUrl: rawStatus.AuthURL,
|
||||||
|
};
|
||||||
|
|
||||||
|
await this.cacheManager.set(cacheKey, status, DockerTailscaleService.STATUS_TTL);
|
||||||
|
|
||||||
|
return status;
|
||||||
|
}
|
||||||
|
|
||||||
|
async getDerpMap(): Promise<DerpMap | null> {
|
||||||
|
const cached = await this.cacheManager.get<DerpMap>(DockerTailscaleService.DERP_MAP_CACHE_KEY);
|
||||||
|
if (cached) {
|
||||||
|
return cached;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch('https://login.tailscale.com/derpmap/default', {
|
||||||
|
signal: AbortSignal.timeout(3000),
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
this.logger.warn(`Failed to fetch DERP map: ${response.status}`);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = (await response.json()) as DerpMap;
|
||||||
|
await this.cacheManager.set(
|
||||||
|
DockerTailscaleService.DERP_MAP_CACHE_KEY,
|
||||||
|
data,
|
||||||
|
DockerTailscaleService.DERP_MAP_TTL
|
||||||
|
);
|
||||||
|
return data;
|
||||||
|
} catch (error) {
|
||||||
|
this.logger.warn('Failed to fetch DERP map', error);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async getLatestVersion(): Promise<string | null> {
|
||||||
|
const cached = await this.cacheManager.get<string>(DockerTailscaleService.VERSION_CACHE_KEY);
|
||||||
|
if (cached) {
|
||||||
|
return cached;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch('https://pkgs.tailscale.com/stable/?mode=json', {
|
||||||
|
signal: AbortSignal.timeout(3000),
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
this.logger.warn(`Failed to fetch Tailscale version: ${response.status}`);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = (await response.json()) as TailscaleVersionResponse;
|
||||||
|
const version = data.TarballsVersion;
|
||||||
|
await this.cacheManager.set(
|
||||||
|
DockerTailscaleService.VERSION_CACHE_KEY,
|
||||||
|
version,
|
||||||
|
DockerTailscaleService.VERSION_TTL
|
||||||
|
);
|
||||||
|
return version;
|
||||||
|
} catch (error) {
|
||||||
|
this.logger.warn('Failed to fetch Tailscale version', error);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async execTailscaleStatus(containerName: string): Promise<RawTailscaleStatus | null> {
|
||||||
|
try {
|
||||||
|
const cleanName = containerName.replace(/^\//, '');
|
||||||
|
const container = this.docker.getContainer(cleanName);
|
||||||
|
|
||||||
|
const exec = await container.exec({
|
||||||
|
Cmd: ['/bin/sh', '-c', 'tailscale status --json'],
|
||||||
|
AttachStdout: true,
|
||||||
|
AttachStderr: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
const stream = await exec.start({ hijack: true, stdin: false });
|
||||||
|
const output = await this.collectStreamOutput(stream);
|
||||||
|
|
||||||
|
this.logger.debug(`Raw tailscale output for ${cleanName}: ${output.substring(0, 500)}...`);
|
||||||
|
|
||||||
|
if (!output.trim()) {
|
||||||
|
this.logger.warn(`Empty tailscale output for ${cleanName}`);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const parsed = JSON.parse(output) as RawTailscaleStatus;
|
||||||
|
this.logger.debug(
|
||||||
|
`Parsed tailscale status for ${cleanName}: DNSName=${parsed.Self?.DNSName}, Online=${parsed.Self?.Online}`
|
||||||
|
);
|
||||||
|
return parsed;
|
||||||
|
} catch (error) {
|
||||||
|
this.logger.debug(`Failed to get Tailscale status for ${containerName}: ${error}`);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async collectStreamOutput(stream: NodeJS.ReadableStream): Promise<string> {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const chunks: Buffer[] = [];
|
||||||
|
stream.on('data', (chunk: Buffer) => {
|
||||||
|
chunks.push(chunk);
|
||||||
|
});
|
||||||
|
stream.on('end', () => {
|
||||||
|
const buffer = Buffer.concat(chunks);
|
||||||
|
const output = this.demuxDockerStream(buffer);
|
||||||
|
resolve(output);
|
||||||
|
});
|
||||||
|
stream.on('error', reject);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
private demuxDockerStream(buffer: Buffer): string {
|
||||||
|
// Check if the buffer looks like it starts with JSON (not multiplexed)
|
||||||
|
// Docker multiplexed streams start with stream type byte (0, 1, or 2)
|
||||||
|
// followed by 3 zero bytes, then 4-byte size
|
||||||
|
if (buffer.length > 0) {
|
||||||
|
const firstChar = buffer.toString('utf8', 0, 1);
|
||||||
|
if (firstChar === '{' || firstChar === '[') {
|
||||||
|
// Already plain text/JSON, not multiplexed
|
||||||
|
return buffer.toString('utf8');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let offset = 0;
|
||||||
|
const output: string[] = [];
|
||||||
|
|
||||||
|
while (offset < buffer.length) {
|
||||||
|
if (offset + 8 > buffer.length) break;
|
||||||
|
|
||||||
|
const streamType = buffer.readUInt8(offset);
|
||||||
|
// Valid stream types are 0 (stdin), 1 (stdout), 2 (stderr)
|
||||||
|
if (streamType > 2) {
|
||||||
|
// Doesn't look like multiplexed stream, treat as raw
|
||||||
|
return buffer.toString('utf8');
|
||||||
|
}
|
||||||
|
|
||||||
|
const size = buffer.readUInt32BE(offset + 4);
|
||||||
|
offset += 8;
|
||||||
|
|
||||||
|
if (offset + size > buffer.length) break;
|
||||||
|
|
||||||
|
const chunk = buffer.slice(offset, offset + size).toString('utf8');
|
||||||
|
output.push(chunk);
|
||||||
|
offset += size;
|
||||||
|
}
|
||||||
|
|
||||||
|
return output.join('');
|
||||||
|
}
|
||||||
|
|
||||||
|
private mapRelayToRegion(relayCode: string, derpMap: DerpMap): string | undefined {
|
||||||
|
for (const region of Object.values(derpMap.Regions)) {
|
||||||
|
if (region.RegionCode === relayCode) {
|
||||||
|
return region.RegionName;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
private isVersionLessThan(current: string, latest: string): boolean {
|
||||||
|
const currentParts = current.split('.').map(Number);
|
||||||
|
const latestParts = latest.split('.').map(Number);
|
||||||
|
|
||||||
|
for (let i = 0; i < Math.max(currentParts.length, latestParts.length); i++) {
|
||||||
|
const curr = currentParts[i] || 0;
|
||||||
|
const lat = latestParts[i] || 0;
|
||||||
|
if (curr < lat) return true;
|
||||||
|
if (curr > lat) return false;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
private resolveWebUiUrl(template: string, status: RawTailscaleStatus): string | undefined {
|
||||||
|
if (!template) return undefined;
|
||||||
|
|
||||||
|
let url = template;
|
||||||
|
const dnsName = status.Self.DNSName?.replace(/\.$/, '');
|
||||||
|
|
||||||
|
// Handle [hostname][magicdns] or [hostname] - use MagicDNS name and port 443
|
||||||
|
if (url.includes('[hostname]')) {
|
||||||
|
if (dnsName) {
|
||||||
|
// Replace [hostname][magicdns] with the full DNS name
|
||||||
|
url = url.replace('[hostname][magicdns]', dnsName);
|
||||||
|
// Replace standalone [hostname] with the DNS name
|
||||||
|
url = url.replace('[hostname]', dnsName);
|
||||||
|
// When using MagicDNS, also replace [IP] with DNS name
|
||||||
|
url = url.replace(/\[IP\]/g, dnsName);
|
||||||
|
// When using MagicDNS with Serve/Funnel, port is always 443
|
||||||
|
url = url.replace(/\[PORT:\d+\]/g, '443');
|
||||||
|
} else {
|
||||||
|
// DNS name not available, can't resolve
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
} else if (url.includes('[noserve]')) {
|
||||||
|
// Handle [noserve] - use direct Tailscale IP
|
||||||
|
const ipv4 = status.Self.TailscaleIPs?.find((ip) => !ip.includes(':'));
|
||||||
|
if (ipv4) {
|
||||||
|
const portMatch = template.match(/\[PORT:(\d+)\]/);
|
||||||
|
const port = portMatch ? `:${portMatch[1]}` : '';
|
||||||
|
url = `http://${ipv4}${port}`;
|
||||||
|
} else {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Custom URL - just do basic replacements
|
||||||
|
if (url.includes('[IP]') && status.Self.TailscaleIPs?.[0]) {
|
||||||
|
const ipv4 = status.Self.TailscaleIPs.find((ip) => !ip.includes(':'));
|
||||||
|
url = url.replace(/\[IP\]/g, ipv4 || status.Self.TailscaleIPs[0]);
|
||||||
|
}
|
||||||
|
|
||||||
|
const portMatch = url.match(/\[PORT:(\d+)\]/);
|
||||||
|
if (portMatch) {
|
||||||
|
url = url.replace(portMatch[0], portMatch[1]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return url;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,61 @@
|
|||||||
|
import { Injectable, Logger } from '@nestjs/common';
|
||||||
|
import { readFile } from 'fs/promises';
|
||||||
|
|
||||||
|
import { XMLParser } from 'fast-xml-parser';
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class DockerTemplateIconService {
|
||||||
|
private readonly logger = new Logger(DockerTemplateIconService.name);
|
||||||
|
private readonly xmlParser = new XMLParser({
|
||||||
|
ignoreAttributes: false,
|
||||||
|
parseAttributeValue: true,
|
||||||
|
trimValues: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
async getIconFromTemplate(templatePath: string): Promise<string | null> {
|
||||||
|
try {
|
||||||
|
const content = await readFile(templatePath, 'utf-8');
|
||||||
|
const parsed = this.xmlParser.parse(content);
|
||||||
|
|
||||||
|
if (!parsed.Container) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return parsed.Container.Icon || null;
|
||||||
|
} catch (error) {
|
||||||
|
this.logger.debug(
|
||||||
|
`Failed to read icon from template ${templatePath}: ${error instanceof Error ? error.message : 'Unknown error'}`
|
||||||
|
);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async getIconsForContainers(
|
||||||
|
containers: Array<{ id: string; templatePath?: string }>
|
||||||
|
): Promise<Map<string, string>> {
|
||||||
|
const iconMap = new Map<string, string>();
|
||||||
|
|
||||||
|
const iconPromises = containers.map(async (container) => {
|
||||||
|
if (!container.templatePath) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const icon = await this.getIconFromTemplate(container.templatePath);
|
||||||
|
if (icon) {
|
||||||
|
return { id: container.id, icon };
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
});
|
||||||
|
|
||||||
|
const results = await Promise.all(iconPromises);
|
||||||
|
|
||||||
|
for (const result of results) {
|
||||||
|
if (result) {
|
||||||
|
iconMap.set(result.id, result.icon);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.logger.debug(`Loaded ${iconMap.size} icons from ${containers.length} containers`);
|
||||||
|
return iconMap;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,16 @@
|
|||||||
|
import { Field, Int, ObjectType } from '@nestjs/graphql';
|
||||||
|
|
||||||
|
@ObjectType()
|
||||||
|
export class DockerTemplateSyncResult {
|
||||||
|
@Field(() => Int)
|
||||||
|
scanned!: number;
|
||||||
|
|
||||||
|
@Field(() => Int)
|
||||||
|
matched!: number;
|
||||||
|
|
||||||
|
@Field(() => Int)
|
||||||
|
skipped!: number;
|
||||||
|
|
||||||
|
@Field(() => [String])
|
||||||
|
errors!: string[];
|
||||||
|
}
|
||||||
@@ -0,0 +1,425 @@
|
|||||||
|
import { Test, TestingModule } from '@nestjs/testing';
|
||||||
|
import { mkdir, rm, writeFile } from 'fs/promises';
|
||||||
|
import { join } from 'path';
|
||||||
|
|
||||||
|
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||||
|
|
||||||
|
import { DockerConfigService } from '@app/unraid-api/graph/resolvers/docker/docker-config.service.js';
|
||||||
|
import { DockerTemplateScannerService } from '@app/unraid-api/graph/resolvers/docker/docker-template-scanner.service.js';
|
||||||
|
import { DockerContainer } from '@app/unraid-api/graph/resolvers/docker/docker.model.js';
|
||||||
|
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
|
||||||
|
|
||||||
|
vi.mock('@app/environment.js', () => ({
|
||||||
|
PATHS_DOCKER_TEMPLATES: ['/tmp/test-templates'],
|
||||||
|
ENABLE_NEXT_DOCKER_RELEASE: true,
|
||||||
|
}));
|
||||||
|
|
||||||
|
describe('DockerTemplateScannerService', () => {
|
||||||
|
let service: DockerTemplateScannerService;
|
||||||
|
let dockerConfigService: DockerConfigService;
|
||||||
|
let dockerService: DockerService;
|
||||||
|
const testTemplateDir = '/tmp/test-templates';
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
await mkdir(testTemplateDir, { recursive: true });
|
||||||
|
|
||||||
|
const mockDockerService = {
|
||||||
|
getContainers: vi.fn(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockDockerConfigService = {
|
||||||
|
getConfig: vi.fn(),
|
||||||
|
replaceConfig: vi.fn(),
|
||||||
|
validate: vi.fn((config) => Promise.resolve(config)),
|
||||||
|
};
|
||||||
|
|
||||||
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
|
providers: [
|
||||||
|
DockerTemplateScannerService,
|
||||||
|
{
|
||||||
|
provide: DockerConfigService,
|
||||||
|
useValue: mockDockerConfigService,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
provide: DockerService,
|
||||||
|
useValue: mockDockerService,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}).compile();
|
||||||
|
|
||||||
|
service = module.get<DockerTemplateScannerService>(DockerTemplateScannerService);
|
||||||
|
dockerConfigService = module.get<DockerConfigService>(DockerConfigService);
|
||||||
|
dockerService = module.get<DockerService>(DockerService);
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(async () => {
|
||||||
|
await rm(testTemplateDir, { recursive: true, force: true });
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('parseTemplate', () => {
|
||||||
|
it('should parse valid XML template', async () => {
|
||||||
|
const templatePath = join(testTemplateDir, 'test.xml');
|
||||||
|
const templateContent = `<?xml version="1.0"?>
|
||||||
|
<Container version="2">
|
||||||
|
<Name>test-container</Name>
|
||||||
|
<Repository>test/image</Repository>
|
||||||
|
</Container>`;
|
||||||
|
await writeFile(templatePath, templateContent);
|
||||||
|
|
||||||
|
const result = await (service as any).parseTemplate(templatePath);
|
||||||
|
|
||||||
|
expect(result).toEqual({
|
||||||
|
filePath: templatePath,
|
||||||
|
name: 'test-container',
|
||||||
|
repository: 'test/image',
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle invalid XML gracefully by returning null', async () => {
|
||||||
|
const templatePath = join(testTemplateDir, 'invalid.xml');
|
||||||
|
await writeFile(templatePath, 'not xml');
|
||||||
|
|
||||||
|
const result = await (service as any).parseTemplate(templatePath);
|
||||||
|
expect(result).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return null for XML without Container element', async () => {
|
||||||
|
const templatePath = join(testTemplateDir, 'no-container.xml');
|
||||||
|
const templateContent = `<?xml version="1.0"?><Root></Root>`;
|
||||||
|
await writeFile(templatePath, templateContent);
|
||||||
|
|
||||||
|
const result = await (service as any).parseTemplate(templatePath);
|
||||||
|
|
||||||
|
expect(result).toBeNull();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('matchContainerToTemplate', () => {
|
||||||
|
it('should match by container name (exact match)', () => {
|
||||||
|
const container: DockerContainer = {
|
||||||
|
id: 'abc123',
|
||||||
|
names: ['/test-container'],
|
||||||
|
image: 'different/image:latest',
|
||||||
|
} as DockerContainer;
|
||||||
|
|
||||||
|
const templates = [
|
||||||
|
{ filePath: '/path/1', name: 'test-container', repository: 'some/repo' },
|
||||||
|
{ filePath: '/path/2', name: 'other', repository: 'other/repo' },
|
||||||
|
];
|
||||||
|
|
||||||
|
const result = (service as any).matchContainerToTemplate(container, templates);
|
||||||
|
|
||||||
|
expect(result).toEqual(templates[0]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should match by repository when name does not match', () => {
|
||||||
|
const container: DockerContainer = {
|
||||||
|
id: 'abc123',
|
||||||
|
names: ['/my-container'],
|
||||||
|
image: 'test/image:v1.0',
|
||||||
|
} as DockerContainer;
|
||||||
|
|
||||||
|
const templates = [
|
||||||
|
{ filePath: '/path/1', name: 'different', repository: 'other/repo' },
|
||||||
|
{ filePath: '/path/2', name: 'also-different', repository: 'test/image' },
|
||||||
|
];
|
||||||
|
|
||||||
|
const result = (service as any).matchContainerToTemplate(container, templates);
|
||||||
|
|
||||||
|
expect(result).toEqual(templates[1]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should strip tags when matching repository', () => {
|
||||||
|
const container: DockerContainer = {
|
||||||
|
id: 'abc123',
|
||||||
|
names: ['/my-container'],
|
||||||
|
image: 'test/image:latest',
|
||||||
|
} as DockerContainer;
|
||||||
|
|
||||||
|
const templates = [
|
||||||
|
{ filePath: '/path/1', name: 'different', repository: 'test/image:v1.0' },
|
||||||
|
];
|
||||||
|
|
||||||
|
const result = (service as any).matchContainerToTemplate(container, templates);
|
||||||
|
|
||||||
|
expect(result).toEqual(templates[0]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return null when no match found', () => {
|
||||||
|
const container: DockerContainer = {
|
||||||
|
id: 'abc123',
|
||||||
|
names: ['/my-container'],
|
||||||
|
image: 'test/image:latest',
|
||||||
|
} as DockerContainer;
|
||||||
|
|
||||||
|
const templates = [{ filePath: '/path/1', name: 'different', repository: 'other/image' }];
|
||||||
|
|
||||||
|
const result = (service as any).matchContainerToTemplate(container, templates);
|
||||||
|
|
||||||
|
expect(result).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should be case-insensitive', () => {
|
||||||
|
const container: DockerContainer = {
|
||||||
|
id: 'abc123',
|
||||||
|
names: ['/Test-Container'],
|
||||||
|
image: 'Test/Image:latest',
|
||||||
|
} as DockerContainer;
|
||||||
|
|
||||||
|
const templates = [
|
||||||
|
{ filePath: '/path/1', name: 'test-container', repository: 'test/image' },
|
||||||
|
];
|
||||||
|
|
||||||
|
const result = (service as any).matchContainerToTemplate(container, templates);
|
||||||
|
|
||||||
|
expect(result).toEqual(templates[0]);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('scanTemplates', () => {
|
||||||
|
it('should scan templates and create mappings', async () => {
|
||||||
|
const template1 = join(testTemplateDir, 'redis.xml');
|
||||||
|
await writeFile(
|
||||||
|
template1,
|
||||||
|
`<?xml version="1.0"?>
|
||||||
|
<Container version="2">
|
||||||
|
<Name>redis</Name>
|
||||||
|
<Repository>redis</Repository>
|
||||||
|
</Container>`
|
||||||
|
);
|
||||||
|
|
||||||
|
const containers: DockerContainer[] = [
|
||||||
|
{
|
||||||
|
id: 'container1',
|
||||||
|
names: ['/redis'],
|
||||||
|
image: 'redis:latest',
|
||||||
|
} as DockerContainer,
|
||||||
|
];
|
||||||
|
|
||||||
|
vi.mocked(dockerService.getContainers).mockResolvedValue(containers);
|
||||||
|
vi.mocked(dockerConfigService.getConfig).mockReturnValue({
|
||||||
|
updateCheckCronSchedule: '0 6 * * *',
|
||||||
|
templateMappings: {},
|
||||||
|
skipTemplatePaths: [],
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await service.scanTemplates();
|
||||||
|
|
||||||
|
expect(result.scanned).toBe(1);
|
||||||
|
expect(result.matched).toBe(1);
|
||||||
|
expect(result.errors).toHaveLength(0);
|
||||||
|
expect(dockerConfigService.replaceConfig).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
templateMappings: {
|
||||||
|
redis: template1,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should skip containers in skipTemplatePaths', async () => {
|
||||||
|
const template1 = join(testTemplateDir, 'redis.xml');
|
||||||
|
await writeFile(
|
||||||
|
template1,
|
||||||
|
`<?xml version="1.0"?>
|
||||||
|
<Container version="2">
|
||||||
|
<Name>redis</Name>
|
||||||
|
<Repository>redis</Repository>
|
||||||
|
</Container>`
|
||||||
|
);
|
||||||
|
|
||||||
|
const containers: DockerContainer[] = [
|
||||||
|
{
|
||||||
|
id: 'container1',
|
||||||
|
names: ['/redis'],
|
||||||
|
image: 'redis:latest',
|
||||||
|
} as DockerContainer,
|
||||||
|
];
|
||||||
|
|
||||||
|
vi.mocked(dockerService.getContainers).mockResolvedValue(containers);
|
||||||
|
vi.mocked(dockerConfigService.getConfig).mockReturnValue({
|
||||||
|
updateCheckCronSchedule: '0 6 * * *',
|
||||||
|
templateMappings: {},
|
||||||
|
skipTemplatePaths: ['redis'],
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await service.scanTemplates();
|
||||||
|
|
||||||
|
expect(result.skipped).toBe(1);
|
||||||
|
expect(result.matched).toBe(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle missing template directory gracefully', async () => {
|
||||||
|
await rm(testTemplateDir, { recursive: true, force: true });
|
||||||
|
|
||||||
|
const containers: DockerContainer[] = [];
|
||||||
|
|
||||||
|
vi.mocked(dockerService.getContainers).mockResolvedValue(containers);
|
||||||
|
vi.mocked(dockerConfigService.getConfig).mockReturnValue({
|
||||||
|
updateCheckCronSchedule: '0 6 * * *',
|
||||||
|
templateMappings: {},
|
||||||
|
skipTemplatePaths: [],
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await service.scanTemplates();
|
||||||
|
|
||||||
|
expect(result.scanned).toBe(0);
|
||||||
|
expect(result.errors.length).toBeGreaterThan(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle docker service errors gracefully', async () => {
|
||||||
|
vi.mocked(dockerService.getContainers).mockRejectedValue(new Error('Docker error'));
|
||||||
|
vi.mocked(dockerConfigService.getConfig).mockReturnValue({
|
||||||
|
updateCheckCronSchedule: '0 6 * * *',
|
||||||
|
templateMappings: {},
|
||||||
|
skipTemplatePaths: [],
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await service.scanTemplates();
|
||||||
|
|
||||||
|
expect(result.errors.length).toBeGreaterThan(0);
|
||||||
|
expect(result.errors[0]).toContain('Failed to get containers');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should set null mapping for unmatched containers', async () => {
|
||||||
|
const containers: DockerContainer[] = [
|
||||||
|
{
|
||||||
|
id: 'container1',
|
||||||
|
names: ['/unknown'],
|
||||||
|
image: 'unknown:latest',
|
||||||
|
} as DockerContainer,
|
||||||
|
];
|
||||||
|
|
||||||
|
vi.mocked(dockerService.getContainers).mockResolvedValue(containers);
|
||||||
|
vi.mocked(dockerConfigService.getConfig).mockReturnValue({
|
||||||
|
updateCheckCronSchedule: '0 6 * * *',
|
||||||
|
templateMappings: {},
|
||||||
|
skipTemplatePaths: [],
|
||||||
|
});
|
||||||
|
|
||||||
|
await service.scanTemplates();
|
||||||
|
|
||||||
|
expect(dockerConfigService.replaceConfig).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
templateMappings: {
|
||||||
|
unknown: null,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('syncMissingContainers', () => {
|
||||||
|
it('should return true and trigger scan when containers are missing mappings', async () => {
|
||||||
|
const containers: DockerContainer[] = [
|
||||||
|
{
|
||||||
|
id: 'container1',
|
||||||
|
names: ['/redis'],
|
||||||
|
image: 'redis:latest',
|
||||||
|
} as DockerContainer,
|
||||||
|
];
|
||||||
|
|
||||||
|
vi.mocked(dockerConfigService.getConfig).mockReturnValue({
|
||||||
|
updateCheckCronSchedule: '0 6 * * *',
|
||||||
|
templateMappings: {},
|
||||||
|
skipTemplatePaths: [],
|
||||||
|
});
|
||||||
|
|
||||||
|
vi.mocked(dockerService.getContainers).mockResolvedValue(containers);
|
||||||
|
|
||||||
|
const scanSpy = vi.spyOn(service, 'scanTemplates').mockResolvedValue({
|
||||||
|
scanned: 0,
|
||||||
|
matched: 0,
|
||||||
|
skipped: 0,
|
||||||
|
errors: [],
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await service.syncMissingContainers(containers);
|
||||||
|
|
||||||
|
expect(result).toBe(true);
|
||||||
|
expect(scanSpy).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return false when all containers have mappings', async () => {
|
||||||
|
const containers: DockerContainer[] = [
|
||||||
|
{
|
||||||
|
id: 'container1',
|
||||||
|
names: ['/redis'],
|
||||||
|
image: 'redis:latest',
|
||||||
|
} as DockerContainer,
|
||||||
|
];
|
||||||
|
|
||||||
|
vi.mocked(dockerConfigService.getConfig).mockReturnValue({
|
||||||
|
updateCheckCronSchedule: '0 6 * * *',
|
||||||
|
templateMappings: {
|
||||||
|
redis: '/path/to/template.xml',
|
||||||
|
},
|
||||||
|
skipTemplatePaths: [],
|
||||||
|
});
|
||||||
|
|
||||||
|
const scanSpy = vi.spyOn(service, 'scanTemplates');
|
||||||
|
|
||||||
|
const result = await service.syncMissingContainers(containers);
|
||||||
|
|
||||||
|
expect(result).toBe(false);
|
||||||
|
expect(scanSpy).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not trigger scan for containers in skip list', async () => {
|
||||||
|
const containers: DockerContainer[] = [
|
||||||
|
{
|
||||||
|
id: 'container1',
|
||||||
|
names: ['/redis'],
|
||||||
|
image: 'redis:latest',
|
||||||
|
} as DockerContainer,
|
||||||
|
];
|
||||||
|
|
||||||
|
vi.mocked(dockerConfigService.getConfig).mockReturnValue({
|
||||||
|
updateCheckCronSchedule: '0 6 * * *',
|
||||||
|
templateMappings: {},
|
||||||
|
skipTemplatePaths: ['redis'],
|
||||||
|
});
|
||||||
|
|
||||||
|
const scanSpy = vi.spyOn(service, 'scanTemplates');
|
||||||
|
|
||||||
|
const result = await service.syncMissingContainers(containers);
|
||||||
|
|
||||||
|
expect(result).toBe(false);
|
||||||
|
expect(scanSpy).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('normalizeContainerName', () => {
|
||||||
|
it('should remove leading slash', () => {
|
||||||
|
const result = (service as any).normalizeContainerName('/container-name');
|
||||||
|
expect(result).toBe('container-name');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should convert to lowercase', () => {
|
||||||
|
const result = (service as any).normalizeContainerName('/Container-Name');
|
||||||
|
expect(result).toBe('container-name');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('normalizeRepository', () => {
|
||||||
|
it('should strip tag', () => {
|
||||||
|
const result = (service as any).normalizeRepository('redis:latest');
|
||||||
|
expect(result).toBe('redis');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should strip version tag', () => {
|
||||||
|
const result = (service as any).normalizeRepository('postgres:14.5');
|
||||||
|
expect(result).toBe('postgres');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should convert to lowercase', () => {
|
||||||
|
const result = (service as any).normalizeRepository('Redis:Latest');
|
||||||
|
expect(result).toBe('redis');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle repository without tag', () => {
|
||||||
|
const result = (service as any).normalizeRepository('nginx');
|
||||||
|
expect(result).toBe('nginx');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -0,0 +1,293 @@
|
|||||||
|
import { Injectable, Logger } from '@nestjs/common';
|
||||||
|
import { Timeout } from '@nestjs/schedule';
|
||||||
|
import { readdir, readFile } from 'fs/promises';
|
||||||
|
import { join } from 'path';
|
||||||
|
|
||||||
|
import { XMLParser } from 'fast-xml-parser';
|
||||||
|
|
||||||
|
import { ENABLE_NEXT_DOCKER_RELEASE, PATHS_DOCKER_TEMPLATES } from '@app/environment.js';
|
||||||
|
import { DockerConfigService } from '@app/unraid-api/graph/resolvers/docker/docker-config.service.js';
|
||||||
|
import { DockerTemplateSyncResult } from '@app/unraid-api/graph/resolvers/docker/docker-template-scanner.model.js';
|
||||||
|
import { DockerContainer } from '@app/unraid-api/graph/resolvers/docker/docker.model.js';
|
||||||
|
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
|
||||||
|
|
||||||
|
interface ParsedTemplate {
|
||||||
|
filePath: string;
|
||||||
|
name?: string;
|
||||||
|
repository?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class DockerTemplateScannerService {
|
||||||
|
private readonly logger = new Logger(DockerTemplateScannerService.name);
|
||||||
|
private readonly xmlParser = new XMLParser({
|
||||||
|
ignoreAttributes: false,
|
||||||
|
parseAttributeValue: true,
|
||||||
|
trimValues: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
private readonly dockerConfigService: DockerConfigService,
|
||||||
|
private readonly dockerService: DockerService
|
||||||
|
) {}
|
||||||
|
|
||||||
|
@Timeout(5_000)
|
||||||
|
async bootstrapScan(attempt = 1, maxAttempts = 5): Promise<void> {
|
||||||
|
if (!ENABLE_NEXT_DOCKER_RELEASE) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
this.logger.log(`Starting template scan (attempt ${attempt}/${maxAttempts})`);
|
||||||
|
const result = await this.scanTemplates();
|
||||||
|
this.logger.log(
|
||||||
|
`Template scan complete: ${result.matched} matched, ${result.scanned} scanned, ${result.skipped} skipped`
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
if (attempt < maxAttempts) {
|
||||||
|
this.logger.warn(
|
||||||
|
`Template scan failed (attempt ${attempt}/${maxAttempts}), retrying in 60s: ${error instanceof Error ? error.message : 'Unknown error'}`
|
||||||
|
);
|
||||||
|
setTimeout(() => this.bootstrapScan(attempt + 1, maxAttempts), 60_000);
|
||||||
|
} else {
|
||||||
|
this.logger.error(
|
||||||
|
`Template scan failed after ${maxAttempts} attempts: ${error instanceof Error ? error.message : 'Unknown error'}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async syncMissingContainers(containers: DockerContainer[]): Promise<boolean> {
|
||||||
|
const config = this.dockerConfigService.getConfig();
|
||||||
|
const mappings = config.templateMappings || {};
|
||||||
|
const skipSet = new Set(config.skipTemplatePaths || []);
|
||||||
|
|
||||||
|
const needsSync = containers.filter((c) => {
|
||||||
|
const containerName = this.normalizeContainerName(c.names[0]);
|
||||||
|
return !mappings[containerName] && !skipSet.has(containerName);
|
||||||
|
});
|
||||||
|
|
||||||
|
if (needsSync.length > 0) {
|
||||||
|
this.logger.log(
|
||||||
|
`Found ${needsSync.length} containers without template mappings, triggering sync`
|
||||||
|
);
|
||||||
|
await this.scanTemplates();
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
async scanTemplates(): Promise<DockerTemplateSyncResult> {
|
||||||
|
const result: DockerTemplateSyncResult = {
|
||||||
|
scanned: 0,
|
||||||
|
matched: 0,
|
||||||
|
skipped: 0,
|
||||||
|
errors: [],
|
||||||
|
};
|
||||||
|
|
||||||
|
const templates = await this.loadAllTemplates(result);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const containers = await this.dockerService.getContainers({ skipCache: true });
|
||||||
|
const config = this.dockerConfigService.getConfig();
|
||||||
|
const currentMappings = config.templateMappings || {};
|
||||||
|
const skipSet = new Set(config.skipTemplatePaths || []);
|
||||||
|
|
||||||
|
const newMappings: Record<string, string | null> = { ...currentMappings };
|
||||||
|
|
||||||
|
for (const container of containers) {
|
||||||
|
const containerName = this.normalizeContainerName(container.names[0]);
|
||||||
|
if (skipSet.has(containerName)) {
|
||||||
|
result.skipped++;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const match = this.matchContainerToTemplate(container, templates);
|
||||||
|
if (match) {
|
||||||
|
newMappings[containerName] = match.filePath;
|
||||||
|
result.matched++;
|
||||||
|
} else {
|
||||||
|
newMappings[containerName] = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
await this.updateMappings(newMappings);
|
||||||
|
} catch (error) {
|
||||||
|
const errorMsg = `Failed to get containers: ${error instanceof Error ? error.message : 'Unknown error'}`;
|
||||||
|
this.logger.error(error, 'Failed to get containers');
|
||||||
|
result.errors.push(errorMsg);
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
async getTemplateDetails(filePath: string): Promise<{
|
||||||
|
project?: string;
|
||||||
|
registry?: string;
|
||||||
|
support?: string;
|
||||||
|
overview?: string;
|
||||||
|
icon?: string;
|
||||||
|
webUi?: string;
|
||||||
|
shell?: string;
|
||||||
|
ports?: Array<{ privatePort: number; publicPort: number; type: 'tcp' | 'udp' }>;
|
||||||
|
} | null> {
|
||||||
|
try {
|
||||||
|
const content = await readFile(filePath, 'utf-8');
|
||||||
|
const parsed = this.xmlParser.parse(content);
|
||||||
|
|
||||||
|
if (!parsed.Container) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const container = parsed.Container;
|
||||||
|
const ports = this.extractTemplatePorts(container);
|
||||||
|
|
||||||
|
return {
|
||||||
|
project: container.Project,
|
||||||
|
registry: container.Registry,
|
||||||
|
support: container.Support,
|
||||||
|
overview: container.ReadMe || container.Overview,
|
||||||
|
icon: container.Icon,
|
||||||
|
webUi: container.WebUI,
|
||||||
|
shell: container.Shell,
|
||||||
|
ports,
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
this.logger.warn(
|
||||||
|
`Failed to parse template ${filePath}: ${error instanceof Error ? error.message : 'Unknown error'}`
|
||||||
|
);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractTemplatePorts(
|
||||||
|
container: Record<string, unknown>
|
||||||
|
): Array<{ privatePort: number; publicPort: number; type: 'tcp' | 'udp' }> {
|
||||||
|
const ports: Array<{ privatePort: number; publicPort: number; type: 'tcp' | 'udp' }> = [];
|
||||||
|
|
||||||
|
const configs = container.Config;
|
||||||
|
if (!configs) {
|
||||||
|
return ports;
|
||||||
|
}
|
||||||
|
|
||||||
|
const configArray = Array.isArray(configs) ? configs : [configs];
|
||||||
|
|
||||||
|
for (const config of configArray) {
|
||||||
|
if (!config || typeof config !== 'object') continue;
|
||||||
|
|
||||||
|
const attrs = config['@_Type'];
|
||||||
|
if (attrs !== 'Port') continue;
|
||||||
|
|
||||||
|
const target = config['@_Target'];
|
||||||
|
const mode = config['@_Mode'];
|
||||||
|
const value = config['#text'];
|
||||||
|
|
||||||
|
if (target === undefined || value === undefined) continue;
|
||||||
|
|
||||||
|
const privatePort = parseInt(String(target), 10);
|
||||||
|
const publicPort = parseInt(String(value), 10);
|
||||||
|
|
||||||
|
if (isNaN(privatePort) || isNaN(publicPort)) continue;
|
||||||
|
|
||||||
|
const type = String(mode).toLowerCase() === 'udp' ? 'udp' : 'tcp';
|
||||||
|
ports.push({ privatePort, publicPort, type });
|
||||||
|
}
|
||||||
|
|
||||||
|
return ports;
|
||||||
|
}
|
||||||
|
|
||||||
|
private async loadAllTemplates(result: DockerTemplateSyncResult): Promise<ParsedTemplate[]> {
|
||||||
|
const allTemplates: ParsedTemplate[] = [];
|
||||||
|
|
||||||
|
for (const directory of PATHS_DOCKER_TEMPLATES) {
|
||||||
|
try {
|
||||||
|
const files = await readdir(directory);
|
||||||
|
const xmlFiles = files.filter((f) => f.endsWith('.xml'));
|
||||||
|
result.scanned += xmlFiles.length;
|
||||||
|
|
||||||
|
for (const file of xmlFiles) {
|
||||||
|
const filePath = join(directory, file);
|
||||||
|
try {
|
||||||
|
const template = await this.parseTemplate(filePath);
|
||||||
|
if (template) {
|
||||||
|
allTemplates.push(template);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
const errorMsg = `Failed to parse template ${filePath}: ${error instanceof Error ? error.message : 'Unknown error'}`;
|
||||||
|
this.logger.warn(errorMsg);
|
||||||
|
result.errors.push(errorMsg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
const errorMsg = `Failed to read template directory ${directory}: ${error instanceof Error ? error.message : 'Unknown error'}`;
|
||||||
|
this.logger.warn(errorMsg);
|
||||||
|
result.errors.push(errorMsg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return allTemplates;
|
||||||
|
}
|
||||||
|
|
||||||
|
private async parseTemplate(filePath: string): Promise<ParsedTemplate | null> {
|
||||||
|
const content = await readFile(filePath, 'utf-8');
|
||||||
|
const parsed = this.xmlParser.parse(content);
|
||||||
|
|
||||||
|
if (!parsed.Container) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const container = parsed.Container;
|
||||||
|
return {
|
||||||
|
filePath,
|
||||||
|
name: container.Name,
|
||||||
|
repository: container.Repository,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private matchContainerToTemplate(
|
||||||
|
container: DockerContainer,
|
||||||
|
templates: ParsedTemplate[]
|
||||||
|
): ParsedTemplate | null {
|
||||||
|
const containerName = this.normalizeContainerName(container.names[0]);
|
||||||
|
const containerImage = this.normalizeRepository(container.image);
|
||||||
|
|
||||||
|
for (const template of templates) {
|
||||||
|
if (template.name && this.normalizeContainerName(template.name) === containerName) {
|
||||||
|
return template;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const template of templates) {
|
||||||
|
if (
|
||||||
|
template.repository &&
|
||||||
|
this.normalizeRepository(template.repository) === containerImage
|
||||||
|
) {
|
||||||
|
return template;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
private normalizeContainerName(name: string): string {
|
||||||
|
return name.replace(/^\//, '').toLowerCase();
|
||||||
|
}
|
||||||
|
|
||||||
|
private normalizeRepository(repository: string): string {
|
||||||
|
// Strip digest if present (e.g., image@sha256:abc123)
|
||||||
|
const [withoutDigest] = repository.split('@');
|
||||||
|
// Only remove tag if colon appears after last slash (i.e., it's a tag, not a port)
|
||||||
|
const lastColon = withoutDigest.lastIndexOf(':');
|
||||||
|
const lastSlash = withoutDigest.lastIndexOf('/');
|
||||||
|
const withoutTag = lastColon > lastSlash ? withoutDigest.slice(0, lastColon) : withoutDigest;
|
||||||
|
return withoutTag.toLowerCase();
|
||||||
|
}
|
||||||
|
|
||||||
|
private async updateMappings(mappings: Record<string, string | null>): Promise<void> {
|
||||||
|
const config = this.dockerConfigService.getConfig();
|
||||||
|
const updated = await this.dockerConfigService.validate({
|
||||||
|
...config,
|
||||||
|
templateMappings: mappings,
|
||||||
|
});
|
||||||
|
this.dockerConfigService.replaceConfig(updated);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,6 +1,15 @@
|
|||||||
import { Field, ID, Int, ObjectType, registerEnumType } from '@nestjs/graphql';
|
import {
|
||||||
|
Field,
|
||||||
|
Float,
|
||||||
|
GraphQLISODateTime,
|
||||||
|
InputType,
|
||||||
|
Int,
|
||||||
|
ObjectType,
|
||||||
|
registerEnumType,
|
||||||
|
} from '@nestjs/graphql';
|
||||||
|
|
||||||
import { Node } from '@unraid/shared/graphql.model.js';
|
import { Node } from '@unraid/shared/graphql.model.js';
|
||||||
|
import { PrefixedID } from '@unraid/shared/prefixed-id-scalar.js';
|
||||||
import { GraphQLBigInt, GraphQLJSON, GraphQLPort } from 'graphql-scalars';
|
import { GraphQLBigInt, GraphQLJSON, GraphQLPort } from 'graphql-scalars';
|
||||||
|
|
||||||
export enum ContainerPortType {
|
export enum ContainerPortType {
|
||||||
@@ -27,8 +36,54 @@ export class ContainerPort {
|
|||||||
type!: ContainerPortType;
|
type!: ContainerPortType;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ObjectType()
|
||||||
|
export class DockerPortConflictContainer {
|
||||||
|
@Field(() => PrefixedID)
|
||||||
|
id!: string;
|
||||||
|
|
||||||
|
@Field(() => String)
|
||||||
|
name!: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
@ObjectType()
|
||||||
|
export class DockerContainerPortConflict {
|
||||||
|
@Field(() => GraphQLPort)
|
||||||
|
privatePort!: number;
|
||||||
|
|
||||||
|
@Field(() => ContainerPortType)
|
||||||
|
type!: ContainerPortType;
|
||||||
|
|
||||||
|
@Field(() => [DockerPortConflictContainer])
|
||||||
|
containers!: DockerPortConflictContainer[];
|
||||||
|
}
|
||||||
|
|
||||||
|
@ObjectType()
|
||||||
|
export class DockerLanPortConflict {
|
||||||
|
@Field(() => String)
|
||||||
|
lanIpPort!: string;
|
||||||
|
|
||||||
|
@Field(() => GraphQLPort, { nullable: true })
|
||||||
|
publicPort?: number;
|
||||||
|
|
||||||
|
@Field(() => ContainerPortType)
|
||||||
|
type!: ContainerPortType;
|
||||||
|
|
||||||
|
@Field(() => [DockerPortConflictContainer])
|
||||||
|
containers!: DockerPortConflictContainer[];
|
||||||
|
}
|
||||||
|
|
||||||
|
@ObjectType()
|
||||||
|
export class DockerPortConflicts {
|
||||||
|
@Field(() => [DockerContainerPortConflict])
|
||||||
|
containerPorts!: DockerContainerPortConflict[];
|
||||||
|
|
||||||
|
@Field(() => [DockerLanPortConflict])
|
||||||
|
lanPorts!: DockerLanPortConflict[];
|
||||||
|
}
|
||||||
|
|
||||||
export enum ContainerState {
|
export enum ContainerState {
|
||||||
RUNNING = 'RUNNING',
|
RUNNING = 'RUNNING',
|
||||||
|
PAUSED = 'PAUSED',
|
||||||
EXITED = 'EXITED',
|
EXITED = 'EXITED',
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -89,12 +144,30 @@ export class DockerContainer extends Node {
|
|||||||
@Field(() => [ContainerPort])
|
@Field(() => [ContainerPort])
|
||||||
ports!: ContainerPort[];
|
ports!: ContainerPort[];
|
||||||
|
|
||||||
|
@Field(() => [String], {
|
||||||
|
nullable: true,
|
||||||
|
description: 'List of LAN-accessible host:port values',
|
||||||
|
})
|
||||||
|
lanIpPorts?: string[];
|
||||||
|
|
||||||
@Field(() => GraphQLBigInt, {
|
@Field(() => GraphQLBigInt, {
|
||||||
nullable: true,
|
nullable: true,
|
||||||
description: 'Total size of all files in the container (in bytes)',
|
description: 'Total size of all files in the container (in bytes)',
|
||||||
})
|
})
|
||||||
sizeRootFs?: number;
|
sizeRootFs?: number;
|
||||||
|
|
||||||
|
@Field(() => GraphQLBigInt, {
|
||||||
|
nullable: true,
|
||||||
|
description: 'Size of writable layer (in bytes)',
|
||||||
|
})
|
||||||
|
sizeRw?: number;
|
||||||
|
|
||||||
|
@Field(() => GraphQLBigInt, {
|
||||||
|
nullable: true,
|
||||||
|
description: 'Size of container logs (in bytes)',
|
||||||
|
})
|
||||||
|
sizeLog?: number;
|
||||||
|
|
||||||
@Field(() => GraphQLJSON, { nullable: true })
|
@Field(() => GraphQLJSON, { nullable: true })
|
||||||
labels?: Record<string, any>;
|
labels?: Record<string, any>;
|
||||||
|
|
||||||
@@ -115,6 +188,45 @@ export class DockerContainer extends Node {
|
|||||||
|
|
||||||
@Field(() => Boolean)
|
@Field(() => Boolean)
|
||||||
autoStart!: boolean;
|
autoStart!: boolean;
|
||||||
|
|
||||||
|
@Field(() => Int, { nullable: true, description: 'Zero-based order in the auto-start list' })
|
||||||
|
autoStartOrder?: number;
|
||||||
|
|
||||||
|
@Field(() => Int, { nullable: true, description: 'Wait time in seconds applied after start' })
|
||||||
|
autoStartWait?: number;
|
||||||
|
|
||||||
|
@Field(() => String, { nullable: true })
|
||||||
|
templatePath?: string;
|
||||||
|
|
||||||
|
@Field(() => String, { nullable: true, description: 'Project/Product homepage URL' })
|
||||||
|
projectUrl?: string;
|
||||||
|
|
||||||
|
@Field(() => String, { nullable: true, description: 'Registry/Docker Hub URL' })
|
||||||
|
registryUrl?: string;
|
||||||
|
|
||||||
|
@Field(() => String, { nullable: true, description: 'Support page/thread URL' })
|
||||||
|
supportUrl?: string;
|
||||||
|
|
||||||
|
@Field(() => String, { nullable: true, description: 'Icon URL' })
|
||||||
|
iconUrl?: string;
|
||||||
|
|
||||||
|
@Field(() => String, { nullable: true, description: 'Resolved WebUI URL from template' })
|
||||||
|
webUiUrl?: string;
|
||||||
|
|
||||||
|
@Field(() => String, {
|
||||||
|
nullable: true,
|
||||||
|
description: 'Shell to use for console access (from template)',
|
||||||
|
})
|
||||||
|
shell?: string;
|
||||||
|
|
||||||
|
@Field(() => [ContainerPort], {
|
||||||
|
nullable: true,
|
||||||
|
description: 'Port mappings from template (used when container is not running)',
|
||||||
|
})
|
||||||
|
templatePorts?: ContainerPort[];
|
||||||
|
|
||||||
|
@Field(() => Boolean, { description: 'Whether the container is orphaned (no template found)' })
|
||||||
|
isOrphaned!: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ObjectType({ implements: () => Node })
|
@ObjectType({ implements: () => Node })
|
||||||
@@ -162,6 +274,127 @@ export class DockerNetwork extends Node {
|
|||||||
labels!: Record<string, any>;
|
labels!: Record<string, any>;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ObjectType()
|
||||||
|
export class DockerContainerLogLine {
|
||||||
|
@Field(() => GraphQLISODateTime)
|
||||||
|
timestamp!: Date;
|
||||||
|
|
||||||
|
@Field(() => String)
|
||||||
|
message!: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
@ObjectType()
|
||||||
|
export class DockerContainerLogs {
|
||||||
|
@Field(() => PrefixedID)
|
||||||
|
containerId!: string;
|
||||||
|
|
||||||
|
@Field(() => [DockerContainerLogLine])
|
||||||
|
lines!: DockerContainerLogLine[];
|
||||||
|
|
||||||
|
@Field(() => GraphQLISODateTime, {
|
||||||
|
nullable: true,
|
||||||
|
description:
|
||||||
|
'Cursor that can be passed back through the since argument to continue streaming logs.',
|
||||||
|
})
|
||||||
|
cursor?: Date | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@ObjectType()
|
||||||
|
export class DockerContainerStats {
|
||||||
|
@Field(() => PrefixedID)
|
||||||
|
id!: string;
|
||||||
|
|
||||||
|
@Field(() => Float, { description: 'CPU Usage Percentage' })
|
||||||
|
cpuPercent!: number;
|
||||||
|
|
||||||
|
@Field(() => String, { description: 'Memory Usage String (e.g. 100MB / 1GB)' })
|
||||||
|
memUsage!: string;
|
||||||
|
|
||||||
|
@Field(() => Float, { description: 'Memory Usage Percentage' })
|
||||||
|
memPercent!: number;
|
||||||
|
|
||||||
|
@Field(() => String, { description: 'Network I/O String (e.g. 100MB / 1GB)' })
|
||||||
|
netIO!: string;
|
||||||
|
|
||||||
|
@Field(() => String, { description: 'Block I/O String (e.g. 100MB / 1GB)' })
|
||||||
|
blockIO!: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
@ObjectType({ description: 'Tailscale exit node connection status' })
|
||||||
|
export class TailscaleExitNodeStatus {
|
||||||
|
@Field(() => Boolean, { description: 'Whether the exit node is online' })
|
||||||
|
online!: boolean;
|
||||||
|
|
||||||
|
@Field(() => [String], { nullable: true, description: 'Tailscale IPs of the exit node' })
|
||||||
|
tailscaleIps?: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
@ObjectType({ description: 'Tailscale status for a Docker container' })
|
||||||
|
export class TailscaleStatus {
|
||||||
|
@Field(() => Boolean, { description: 'Whether Tailscale is online in the container' })
|
||||||
|
online!: boolean;
|
||||||
|
|
||||||
|
@Field(() => String, { nullable: true, description: 'Current Tailscale version' })
|
||||||
|
version?: string;
|
||||||
|
|
||||||
|
@Field(() => String, { nullable: true, description: 'Latest available Tailscale version' })
|
||||||
|
latestVersion?: string;
|
||||||
|
|
||||||
|
@Field(() => Boolean, { description: 'Whether a Tailscale update is available' })
|
||||||
|
updateAvailable!: boolean;
|
||||||
|
|
||||||
|
@Field(() => String, { nullable: true, description: 'Configured Tailscale hostname' })
|
||||||
|
hostname?: string;
|
||||||
|
|
||||||
|
@Field(() => String, { nullable: true, description: 'Actual Tailscale DNS name' })
|
||||||
|
dnsName?: string;
|
||||||
|
|
||||||
|
@Field(() => String, { nullable: true, description: 'DERP relay code' })
|
||||||
|
relay?: string;
|
||||||
|
|
||||||
|
@Field(() => String, { nullable: true, description: 'DERP relay region name' })
|
||||||
|
relayName?: string;
|
||||||
|
|
||||||
|
@Field(() => [String], { nullable: true, description: 'Tailscale IPv4 and IPv6 addresses' })
|
||||||
|
tailscaleIps?: string[];
|
||||||
|
|
||||||
|
@Field(() => [String], { nullable: true, description: 'Advertised subnet routes' })
|
||||||
|
primaryRoutes?: string[];
|
||||||
|
|
||||||
|
@Field(() => Boolean, { description: 'Whether this container is an exit node' })
|
||||||
|
isExitNode!: boolean;
|
||||||
|
|
||||||
|
@Field(() => TailscaleExitNodeStatus, {
|
||||||
|
nullable: true,
|
||||||
|
description: 'Status of the connected exit node (if using one)',
|
||||||
|
})
|
||||||
|
exitNodeStatus?: TailscaleExitNodeStatus;
|
||||||
|
|
||||||
|
@Field(() => String, { nullable: true, description: 'Tailscale Serve/Funnel WebUI URL' })
|
||||||
|
webUiUrl?: string;
|
||||||
|
|
||||||
|
@Field(() => GraphQLISODateTime, { nullable: true, description: 'Tailscale key expiry date' })
|
||||||
|
keyExpiry?: Date;
|
||||||
|
|
||||||
|
@Field(() => Int, { nullable: true, description: 'Days until key expires' })
|
||||||
|
keyExpiryDays?: number;
|
||||||
|
|
||||||
|
@Field(() => Boolean, { description: 'Whether the Tailscale key has expired' })
|
||||||
|
keyExpired!: boolean;
|
||||||
|
|
||||||
|
@Field(() => String, {
|
||||||
|
nullable: true,
|
||||||
|
description: 'Tailscale backend state (Running, NeedsLogin, Stopped, etc.)',
|
||||||
|
})
|
||||||
|
backendState?: string;
|
||||||
|
|
||||||
|
@Field(() => String, {
|
||||||
|
nullable: true,
|
||||||
|
description: 'Authentication URL if Tailscale needs login',
|
||||||
|
})
|
||||||
|
authUrl?: string;
|
||||||
|
}
|
||||||
|
|
||||||
@ObjectType({
|
@ObjectType({
|
||||||
implements: () => Node,
|
implements: () => Node,
|
||||||
})
|
})
|
||||||
@@ -171,4 +404,28 @@ export class Docker extends Node {
|
|||||||
|
|
||||||
@Field(() => [DockerNetwork])
|
@Field(() => [DockerNetwork])
|
||||||
networks!: DockerNetwork[];
|
networks!: DockerNetwork[];
|
||||||
|
|
||||||
|
@Field(() => DockerPortConflicts)
|
||||||
|
portConflicts!: DockerPortConflicts;
|
||||||
|
|
||||||
|
@Field(() => DockerContainerLogs, {
|
||||||
|
description:
|
||||||
|
'Access container logs. Requires specifying a target container id through resolver arguments.',
|
||||||
|
})
|
||||||
|
logs!: DockerContainerLogs;
|
||||||
|
}
|
||||||
|
|
||||||
|
@InputType()
|
||||||
|
export class DockerAutostartEntryInput {
|
||||||
|
@Field(() => PrefixedID, { description: 'Docker container identifier' })
|
||||||
|
id!: string;
|
||||||
|
|
||||||
|
@Field(() => Boolean, { description: 'Whether the container should auto-start' })
|
||||||
|
autoStart!: boolean;
|
||||||
|
|
||||||
|
@Field(() => Int, {
|
||||||
|
nullable: true,
|
||||||
|
description: 'Number of seconds to wait after starting the container',
|
||||||
|
})
|
||||||
|
wait?: number | null;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,21 +1,28 @@
|
|||||||
|
import { CacheModule } from '@nestjs/cache-manager';
|
||||||
import { Test, TestingModule } from '@nestjs/testing';
|
import { Test, TestingModule } from '@nestjs/testing';
|
||||||
|
|
||||||
import { describe, expect, it, vi } from 'vitest';
|
import { describe, expect, it, vi } from 'vitest';
|
||||||
|
|
||||||
import { DockerConfigService } from '@app/unraid-api/graph/resolvers/docker/docker-config.service.js';
|
import { DockerConfigService } from '@app/unraid-api/graph/resolvers/docker/docker-config.service.js';
|
||||||
import { DockerEventService } from '@app/unraid-api/graph/resolvers/docker/docker-event.service.js';
|
import { DockerLogService } from '@app/unraid-api/graph/resolvers/docker/docker-log.service.js';
|
||||||
|
import { DockerNetworkService } from '@app/unraid-api/graph/resolvers/docker/docker-network.service.js';
|
||||||
import { DockerPhpService } from '@app/unraid-api/graph/resolvers/docker/docker-php.service.js';
|
import { DockerPhpService } from '@app/unraid-api/graph/resolvers/docker/docker-php.service.js';
|
||||||
|
import { DockerPortService } from '@app/unraid-api/graph/resolvers/docker/docker-port.service.js';
|
||||||
|
import { DockerStatsService } from '@app/unraid-api/graph/resolvers/docker/docker-stats.service.js';
|
||||||
|
import { DockerTemplateScannerService } from '@app/unraid-api/graph/resolvers/docker/docker-template-scanner.service.js';
|
||||||
import { DockerModule } from '@app/unraid-api/graph/resolvers/docker/docker.module.js';
|
import { DockerModule } from '@app/unraid-api/graph/resolvers/docker/docker.module.js';
|
||||||
import { DockerMutationsResolver } from '@app/unraid-api/graph/resolvers/docker/docker.mutations.resolver.js';
|
import { DockerMutationsResolver } from '@app/unraid-api/graph/resolvers/docker/docker.mutations.resolver.js';
|
||||||
import { DockerResolver } from '@app/unraid-api/graph/resolvers/docker/docker.resolver.js';
|
import { DockerResolver } from '@app/unraid-api/graph/resolvers/docker/docker.resolver.js';
|
||||||
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
|
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
|
||||||
import { DockerOrganizerConfigService } from '@app/unraid-api/graph/resolvers/docker/organizer/docker-organizer-config.service.js';
|
import { DockerOrganizerConfigService } from '@app/unraid-api/graph/resolvers/docker/organizer/docker-organizer-config.service.js';
|
||||||
import { DockerOrganizerService } from '@app/unraid-api/graph/resolvers/docker/organizer/docker-organizer.service.js';
|
import { DockerOrganizerService } from '@app/unraid-api/graph/resolvers/docker/organizer/docker-organizer.service.js';
|
||||||
|
import { SubscriptionHelperService } from '@app/unraid-api/graph/services/subscription-helper.service.js';
|
||||||
|
import { SubscriptionTrackerService } from '@app/unraid-api/graph/services/subscription-tracker.service.js';
|
||||||
|
|
||||||
describe('DockerModule', () => {
|
describe('DockerModule', () => {
|
||||||
it('should compile the module', async () => {
|
it('should compile the module', async () => {
|
||||||
const module = await Test.createTestingModule({
|
const module = await Test.createTestingModule({
|
||||||
imports: [DockerModule],
|
imports: [CacheModule.register({ isGlobal: true }), DockerModule],
|
||||||
})
|
})
|
||||||
.overrideProvider(DockerService)
|
.overrideProvider(DockerService)
|
||||||
.useValue({ getDockerClient: vi.fn() })
|
.useValue({ getDockerClient: vi.fn() })
|
||||||
@@ -23,6 +30,22 @@ describe('DockerModule', () => {
|
|||||||
.useValue({ getConfig: vi.fn() })
|
.useValue({ getConfig: vi.fn() })
|
||||||
.overrideProvider(DockerConfigService)
|
.overrideProvider(DockerConfigService)
|
||||||
.useValue({ getConfig: vi.fn() })
|
.useValue({ getConfig: vi.fn() })
|
||||||
|
.overrideProvider(DockerLogService)
|
||||||
|
.useValue({})
|
||||||
|
.overrideProvider(DockerNetworkService)
|
||||||
|
.useValue({})
|
||||||
|
.overrideProvider(DockerPortService)
|
||||||
|
.useValue({})
|
||||||
|
.overrideProvider(SubscriptionTrackerService)
|
||||||
|
.useValue({
|
||||||
|
registerTopic: vi.fn(),
|
||||||
|
subscribe: vi.fn(),
|
||||||
|
unsubscribe: vi.fn(),
|
||||||
|
})
|
||||||
|
.overrideProvider(SubscriptionHelperService)
|
||||||
|
.useValue({
|
||||||
|
createTrackedSubscription: vi.fn(),
|
||||||
|
})
|
||||||
.compile();
|
.compile();
|
||||||
|
|
||||||
expect(module).toBeDefined();
|
expect(module).toBeDefined();
|
||||||
@@ -46,25 +69,52 @@ describe('DockerModule', () => {
|
|||||||
expect(service).toHaveProperty('getDockerClient');
|
expect(service).toHaveProperty('getDockerClient');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should provide DockerEventService', async () => {
|
|
||||||
const module: TestingModule = await Test.createTestingModule({
|
|
||||||
providers: [
|
|
||||||
DockerEventService,
|
|
||||||
{ provide: DockerService, useValue: { getDockerClient: vi.fn() } },
|
|
||||||
],
|
|
||||||
}).compile();
|
|
||||||
|
|
||||||
const service = module.get<DockerEventService>(DockerEventService);
|
|
||||||
expect(service).toBeInstanceOf(DockerEventService);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should provide DockerResolver', async () => {
|
it('should provide DockerResolver', async () => {
|
||||||
const module: TestingModule = await Test.createTestingModule({
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
providers: [
|
providers: [
|
||||||
DockerResolver,
|
DockerResolver,
|
||||||
{ provide: DockerService, useValue: {} },
|
{ provide: DockerService, useValue: { clearContainerCache: vi.fn() } },
|
||||||
|
{
|
||||||
|
provide: DockerConfigService,
|
||||||
|
useValue: {
|
||||||
|
defaultConfig: vi
|
||||||
|
.fn()
|
||||||
|
.mockReturnValue({ templateMappings: {}, skipTemplatePaths: [] }),
|
||||||
|
getConfig: vi
|
||||||
|
.fn()
|
||||||
|
.mockReturnValue({ templateMappings: {}, skipTemplatePaths: [] }),
|
||||||
|
validate: vi.fn().mockImplementation((config) => Promise.resolve(config)),
|
||||||
|
replaceConfig: vi.fn(),
|
||||||
|
},
|
||||||
|
},
|
||||||
{ provide: DockerOrganizerService, useValue: {} },
|
{ provide: DockerOrganizerService, useValue: {} },
|
||||||
{ provide: DockerPhpService, useValue: { getContainerUpdateStatuses: vi.fn() } },
|
{ provide: DockerPhpService, useValue: { getContainerUpdateStatuses: vi.fn() } },
|
||||||
|
{
|
||||||
|
provide: DockerTemplateScannerService,
|
||||||
|
useValue: {
|
||||||
|
scanTemplates: vi.fn(),
|
||||||
|
syncMissingContainers: vi.fn(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
provide: DockerStatsService,
|
||||||
|
useValue: {
|
||||||
|
startStatsStream: vi.fn(),
|
||||||
|
stopStatsStream: vi.fn(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
provide: SubscriptionTrackerService,
|
||||||
|
useValue: {
|
||||||
|
registerTopic: vi.fn(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
provide: SubscriptionHelperService,
|
||||||
|
useValue: {
|
||||||
|
createTrackedSubscription: vi.fn(),
|
||||||
|
},
|
||||||
|
},
|
||||||
],
|
],
|
||||||
}).compile();
|
}).compile();
|
||||||
|
|
||||||
|
|||||||
@@ -2,27 +2,44 @@ import { Module } from '@nestjs/common';
|
|||||||
|
|
||||||
import { JobModule } from '@app/unraid-api/cron/job.module.js';
|
import { JobModule } from '@app/unraid-api/cron/job.module.js';
|
||||||
import { ContainerStatusJob } from '@app/unraid-api/graph/resolvers/docker/container-status.job.js';
|
import { ContainerStatusJob } from '@app/unraid-api/graph/resolvers/docker/container-status.job.js';
|
||||||
|
import { DockerAutostartService } from '@app/unraid-api/graph/resolvers/docker/docker-autostart.service.js';
|
||||||
import { DockerConfigService } from '@app/unraid-api/graph/resolvers/docker/docker-config.service.js';
|
import { DockerConfigService } from '@app/unraid-api/graph/resolvers/docker/docker-config.service.js';
|
||||||
import { DockerContainerResolver } from '@app/unraid-api/graph/resolvers/docker/docker-container.resolver.js';
|
import { DockerContainerResolver } from '@app/unraid-api/graph/resolvers/docker/docker-container.resolver.js';
|
||||||
|
import { DockerLogService } from '@app/unraid-api/graph/resolvers/docker/docker-log.service.js';
|
||||||
import { DockerManifestService } from '@app/unraid-api/graph/resolvers/docker/docker-manifest.service.js';
|
import { DockerManifestService } from '@app/unraid-api/graph/resolvers/docker/docker-manifest.service.js';
|
||||||
|
import { DockerNetworkService } from '@app/unraid-api/graph/resolvers/docker/docker-network.service.js';
|
||||||
import { DockerPhpService } from '@app/unraid-api/graph/resolvers/docker/docker-php.service.js';
|
import { DockerPhpService } from '@app/unraid-api/graph/resolvers/docker/docker-php.service.js';
|
||||||
|
import { DockerPortService } from '@app/unraid-api/graph/resolvers/docker/docker-port.service.js';
|
||||||
|
import { DockerStatsService } from '@app/unraid-api/graph/resolvers/docker/docker-stats.service.js';
|
||||||
|
import { DockerTailscaleService } from '@app/unraid-api/graph/resolvers/docker/docker-tailscale.service.js';
|
||||||
|
import { DockerTemplateIconService } from '@app/unraid-api/graph/resolvers/docker/docker-template-icon.service.js';
|
||||||
|
import { DockerTemplateScannerService } from '@app/unraid-api/graph/resolvers/docker/docker-template-scanner.service.js';
|
||||||
import { DockerMutationsResolver } from '@app/unraid-api/graph/resolvers/docker/docker.mutations.resolver.js';
|
import { DockerMutationsResolver } from '@app/unraid-api/graph/resolvers/docker/docker.mutations.resolver.js';
|
||||||
import { DockerResolver } from '@app/unraid-api/graph/resolvers/docker/docker.resolver.js';
|
import { DockerResolver } from '@app/unraid-api/graph/resolvers/docker/docker.resolver.js';
|
||||||
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
|
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
|
||||||
import { DockerOrganizerConfigService } from '@app/unraid-api/graph/resolvers/docker/organizer/docker-organizer-config.service.js';
|
import { DockerOrganizerConfigService } from '@app/unraid-api/graph/resolvers/docker/organizer/docker-organizer-config.service.js';
|
||||||
import { DockerOrganizerService } from '@app/unraid-api/graph/resolvers/docker/organizer/docker-organizer.service.js';
|
import { DockerOrganizerService } from '@app/unraid-api/graph/resolvers/docker/organizer/docker-organizer.service.js';
|
||||||
|
import { NotificationsModule } from '@app/unraid-api/graph/resolvers/notifications/notifications.module.js';
|
||||||
|
import { ServicesModule } from '@app/unraid-api/graph/services/services.module.js';
|
||||||
|
|
||||||
@Module({
|
@Module({
|
||||||
imports: [JobModule],
|
imports: [JobModule, NotificationsModule, ServicesModule],
|
||||||
providers: [
|
providers: [
|
||||||
// Services
|
// Services
|
||||||
DockerService,
|
DockerService,
|
||||||
|
DockerAutostartService,
|
||||||
DockerOrganizerConfigService,
|
DockerOrganizerConfigService,
|
||||||
DockerOrganizerService,
|
DockerOrganizerService,
|
||||||
DockerManifestService,
|
DockerManifestService,
|
||||||
DockerPhpService,
|
DockerPhpService,
|
||||||
DockerConfigService,
|
DockerConfigService,
|
||||||
// DockerEventService,
|
DockerTemplateScannerService,
|
||||||
|
DockerTemplateIconService,
|
||||||
|
DockerStatsService,
|
||||||
|
DockerTailscaleService,
|
||||||
|
DockerLogService,
|
||||||
|
DockerNetworkService,
|
||||||
|
DockerPortService,
|
||||||
|
|
||||||
// Jobs
|
// Jobs
|
||||||
ContainerStatusJob,
|
ContainerStatusJob,
|
||||||
|
|||||||
@@ -45,6 +45,7 @@ describe('DockerMutationsResolver', () => {
|
|||||||
state: ContainerState.RUNNING,
|
state: ContainerState.RUNNING,
|
||||||
status: 'Up 2 hours',
|
status: 'Up 2 hours',
|
||||||
names: ['test-container'],
|
names: ['test-container'],
|
||||||
|
isOrphaned: false,
|
||||||
};
|
};
|
||||||
vi.mocked(dockerService.start).mockResolvedValue(mockContainer);
|
vi.mocked(dockerService.start).mockResolvedValue(mockContainer);
|
||||||
|
|
||||||
@@ -65,6 +66,7 @@ describe('DockerMutationsResolver', () => {
|
|||||||
state: ContainerState.EXITED,
|
state: ContainerState.EXITED,
|
||||||
status: 'Exited',
|
status: 'Exited',
|
||||||
names: ['test-container'],
|
names: ['test-container'],
|
||||||
|
isOrphaned: false,
|
||||||
};
|
};
|
||||||
vi.mocked(dockerService.stop).mockResolvedValue(mockContainer);
|
vi.mocked(dockerService.stop).mockResolvedValue(mockContainer);
|
||||||
|
|
||||||
|
|||||||
@@ -4,7 +4,11 @@ import { AuthAction, Resource } from '@unraid/shared/graphql.model.js';
|
|||||||
import { PrefixedID } from '@unraid/shared/prefixed-id-scalar.js';
|
import { PrefixedID } from '@unraid/shared/prefixed-id-scalar.js';
|
||||||
import { UsePermissions } from '@unraid/shared/use-permissions.directive.js';
|
import { UsePermissions } from '@unraid/shared/use-permissions.directive.js';
|
||||||
|
|
||||||
import { DockerContainer } from '@app/unraid-api/graph/resolvers/docker/docker.model.js';
|
import { UseFeatureFlag } from '@app/unraid-api/decorators/use-feature-flag.decorator.js';
|
||||||
|
import {
|
||||||
|
DockerAutostartEntryInput,
|
||||||
|
DockerContainer,
|
||||||
|
} from '@app/unraid-api/graph/resolvers/docker/docker.model.js';
|
||||||
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
|
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
|
||||||
import { DockerMutations } from '@app/unraid-api/graph/resolvers/mutation/mutation.model.js';
|
import { DockerMutations } from '@app/unraid-api/graph/resolvers/mutation/mutation.model.js';
|
||||||
|
|
||||||
@@ -32,4 +36,86 @@ export class DockerMutationsResolver {
|
|||||||
public async stop(@Args('id', { type: () => PrefixedID }) id: string) {
|
public async stop(@Args('id', { type: () => PrefixedID }) id: string) {
|
||||||
return this.dockerService.stop(id);
|
return this.dockerService.stop(id);
|
||||||
}
|
}
|
||||||
|
@ResolveField(() => DockerContainer, { description: 'Pause (Suspend) a container' })
|
||||||
|
@UsePermissions({
|
||||||
|
action: AuthAction.UPDATE_ANY,
|
||||||
|
resource: Resource.DOCKER,
|
||||||
|
})
|
||||||
|
public async pause(@Args('id', { type: () => PrefixedID }) id: string) {
|
||||||
|
return this.dockerService.pause(id);
|
||||||
|
}
|
||||||
|
@ResolveField(() => DockerContainer, { description: 'Unpause (Resume) a container' })
|
||||||
|
@UsePermissions({
|
||||||
|
action: AuthAction.UPDATE_ANY,
|
||||||
|
resource: Resource.DOCKER,
|
||||||
|
})
|
||||||
|
public async unpause(@Args('id', { type: () => PrefixedID }) id: string) {
|
||||||
|
return this.dockerService.unpause(id);
|
||||||
|
}
|
||||||
|
|
||||||
|
@ResolveField(() => Boolean, { description: 'Remove a container' })
|
||||||
|
@UsePermissions({
|
||||||
|
action: AuthAction.DELETE_ANY,
|
||||||
|
resource: Resource.DOCKER,
|
||||||
|
})
|
||||||
|
public async removeContainer(
|
||||||
|
@Args('id', { type: () => PrefixedID }) id: string,
|
||||||
|
@Args('withImage', { type: () => Boolean, nullable: true }) withImage?: boolean
|
||||||
|
) {
|
||||||
|
return this.dockerService.removeContainer(id, { withImage });
|
||||||
|
}
|
||||||
|
|
||||||
|
@ResolveField(() => Boolean, {
|
||||||
|
description: 'Update auto-start configuration for Docker containers',
|
||||||
|
})
|
||||||
|
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||||
|
@UsePermissions({
|
||||||
|
action: AuthAction.UPDATE_ANY,
|
||||||
|
resource: Resource.DOCKER,
|
||||||
|
})
|
||||||
|
public async updateAutostartConfiguration(
|
||||||
|
@Args('entries', { type: () => [DockerAutostartEntryInput] })
|
||||||
|
entries: DockerAutostartEntryInput[],
|
||||||
|
@Args('persistUserPreferences', { type: () => Boolean, nullable: true })
|
||||||
|
persistUserPreferences?: boolean
|
||||||
|
) {
|
||||||
|
await this.dockerService.updateAutostartConfiguration(entries, {
|
||||||
|
persistUserPreferences,
|
||||||
|
});
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
@ResolveField(() => DockerContainer, { description: 'Update a container to the latest image' })
|
||||||
|
@UsePermissions({
|
||||||
|
action: AuthAction.UPDATE_ANY,
|
||||||
|
resource: Resource.DOCKER,
|
||||||
|
})
|
||||||
|
public async updateContainer(@Args('id', { type: () => PrefixedID }) id: string) {
|
||||||
|
return this.dockerService.updateContainer(id);
|
||||||
|
}
|
||||||
|
|
||||||
|
@ResolveField(() => [DockerContainer], {
|
||||||
|
description: 'Update multiple containers to the latest images',
|
||||||
|
})
|
||||||
|
@UsePermissions({
|
||||||
|
action: AuthAction.UPDATE_ANY,
|
||||||
|
resource: Resource.DOCKER,
|
||||||
|
})
|
||||||
|
public async updateContainers(
|
||||||
|
@Args('ids', { type: () => [PrefixedID] })
|
||||||
|
ids: string[]
|
||||||
|
) {
|
||||||
|
return this.dockerService.updateContainers(ids);
|
||||||
|
}
|
||||||
|
|
||||||
|
@ResolveField(() => [DockerContainer], {
|
||||||
|
description: 'Update all containers that have available updates',
|
||||||
|
})
|
||||||
|
@UsePermissions({
|
||||||
|
action: AuthAction.UPDATE_ANY,
|
||||||
|
resource: Resource.DOCKER,
|
||||||
|
})
|
||||||
|
public async updateAllContainers() {
|
||||||
|
return this.dockerService.updateAllContainers();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,11 +3,20 @@ import { Test } from '@nestjs/testing';
|
|||||||
|
|
||||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||||
|
|
||||||
|
import { DockerConfigService } from '@app/unraid-api/graph/resolvers/docker/docker-config.service.js';
|
||||||
import { DockerPhpService } from '@app/unraid-api/graph/resolvers/docker/docker-php.service.js';
|
import { DockerPhpService } from '@app/unraid-api/graph/resolvers/docker/docker-php.service.js';
|
||||||
import { ContainerState, DockerContainer } from '@app/unraid-api/graph/resolvers/docker/docker.model.js';
|
import { DockerStatsService } from '@app/unraid-api/graph/resolvers/docker/docker-stats.service.js';
|
||||||
|
import { DockerTemplateScannerService } from '@app/unraid-api/graph/resolvers/docker/docker-template-scanner.service.js';
|
||||||
|
import {
|
||||||
|
ContainerState,
|
||||||
|
DockerContainer,
|
||||||
|
DockerContainerLogs,
|
||||||
|
} from '@app/unraid-api/graph/resolvers/docker/docker.model.js';
|
||||||
import { DockerResolver } from '@app/unraid-api/graph/resolvers/docker/docker.resolver.js';
|
import { DockerResolver } from '@app/unraid-api/graph/resolvers/docker/docker.resolver.js';
|
||||||
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
|
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
|
||||||
import { DockerOrganizerService } from '@app/unraid-api/graph/resolvers/docker/organizer/docker-organizer.service.js';
|
import { DockerOrganizerService } from '@app/unraid-api/graph/resolvers/docker/organizer/docker-organizer.service.js';
|
||||||
|
import { SubscriptionHelperService } from '@app/unraid-api/graph/services/subscription-helper.service.js';
|
||||||
|
import { SubscriptionTrackerService } from '@app/unraid-api/graph/services/subscription-tracker.service.js';
|
||||||
import { GraphQLFieldHelper } from '@app/unraid-api/utils/graphql-field-helper.js';
|
import { GraphQLFieldHelper } from '@app/unraid-api/utils/graphql-field-helper.js';
|
||||||
|
|
||||||
vi.mock('@app/unraid-api/utils/graphql-field-helper.js', () => ({
|
vi.mock('@app/unraid-api/utils/graphql-field-helper.js', () => ({
|
||||||
@@ -29,6 +38,22 @@ describe('DockerResolver', () => {
|
|||||||
useValue: {
|
useValue: {
|
||||||
getContainers: vi.fn(),
|
getContainers: vi.fn(),
|
||||||
getNetworks: vi.fn(),
|
getNetworks: vi.fn(),
|
||||||
|
getContainerLogSizes: vi.fn(),
|
||||||
|
getContainerLogs: vi.fn(),
|
||||||
|
clearContainerCache: vi.fn(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
provide: DockerConfigService,
|
||||||
|
useValue: {
|
||||||
|
defaultConfig: vi
|
||||||
|
.fn()
|
||||||
|
.mockReturnValue({ templateMappings: {}, skipTemplatePaths: [] }),
|
||||||
|
getConfig: vi
|
||||||
|
.fn()
|
||||||
|
.mockReturnValue({ templateMappings: {}, skipTemplatePaths: [] }),
|
||||||
|
validate: vi.fn().mockImplementation((config) => Promise.resolve(config)),
|
||||||
|
replaceConfig: vi.fn(),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -43,6 +68,39 @@ describe('DockerResolver', () => {
|
|||||||
getContainerUpdateStatuses: vi.fn(),
|
getContainerUpdateStatuses: vi.fn(),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
provide: DockerTemplateScannerService,
|
||||||
|
useValue: {
|
||||||
|
scanTemplates: vi.fn().mockResolvedValue({
|
||||||
|
scanned: 0,
|
||||||
|
matched: 0,
|
||||||
|
skipped: 0,
|
||||||
|
errors: [],
|
||||||
|
}),
|
||||||
|
syncMissingContainers: vi.fn().mockResolvedValue(false),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
provide: DockerStatsService,
|
||||||
|
useValue: {
|
||||||
|
startStatsStream: vi.fn(),
|
||||||
|
stopStatsStream: vi.fn(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
provide: SubscriptionTrackerService,
|
||||||
|
useValue: {
|
||||||
|
registerTopic: vi.fn(),
|
||||||
|
subscribe: vi.fn(),
|
||||||
|
unsubscribe: vi.fn(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
provide: SubscriptionHelperService,
|
||||||
|
useValue: {
|
||||||
|
createTrackedSubscription: vi.fn(),
|
||||||
|
},
|
||||||
|
},
|
||||||
],
|
],
|
||||||
}).compile();
|
}).compile();
|
||||||
|
|
||||||
@@ -51,6 +109,8 @@ describe('DockerResolver', () => {
|
|||||||
|
|
||||||
// Reset mocks before each test
|
// Reset mocks before each test
|
||||||
vi.clearAllMocks();
|
vi.clearAllMocks();
|
||||||
|
vi.mocked(GraphQLFieldHelper.isFieldRequested).mockImplementation(() => false);
|
||||||
|
vi.mocked(dockerService.getContainerLogSizes).mockResolvedValue(new Map());
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should be defined', () => {
|
it('should be defined', () => {
|
||||||
@@ -75,6 +135,7 @@ describe('DockerResolver', () => {
|
|||||||
ports: [],
|
ports: [],
|
||||||
state: ContainerState.EXITED,
|
state: ContainerState.EXITED,
|
||||||
status: 'Exited',
|
status: 'Exited',
|
||||||
|
isOrphaned: false,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: '2',
|
id: '2',
|
||||||
@@ -87,16 +148,19 @@ describe('DockerResolver', () => {
|
|||||||
ports: [],
|
ports: [],
|
||||||
state: ContainerState.RUNNING,
|
state: ContainerState.RUNNING,
|
||||||
status: 'Up 2 hours',
|
status: 'Up 2 hours',
|
||||||
|
isOrphaned: false,
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
vi.mocked(dockerService.getContainers).mockResolvedValue(mockContainers);
|
vi.mocked(dockerService.getContainers).mockResolvedValue(mockContainers);
|
||||||
vi.mocked(GraphQLFieldHelper.isFieldRequested).mockReturnValue(false);
|
vi.mocked(GraphQLFieldHelper.isFieldRequested).mockImplementation(() => false);
|
||||||
|
|
||||||
const mockInfo = {} as any;
|
const mockInfo = {} as any;
|
||||||
|
|
||||||
const result = await resolver.containers(false, mockInfo);
|
const result = await resolver.containers(false, mockInfo);
|
||||||
expect(result).toEqual(mockContainers);
|
expect(result).toEqual(mockContainers);
|
||||||
expect(GraphQLFieldHelper.isFieldRequested).toHaveBeenCalledWith(mockInfo, 'sizeRootFs');
|
expect(GraphQLFieldHelper.isFieldRequested).toHaveBeenCalledWith(mockInfo, 'sizeRootFs');
|
||||||
|
expect(GraphQLFieldHelper.isFieldRequested).toHaveBeenCalledWith(mockInfo, 'sizeRw');
|
||||||
|
expect(GraphQLFieldHelper.isFieldRequested).toHaveBeenCalledWith(mockInfo, 'sizeLog');
|
||||||
expect(dockerService.getContainers).toHaveBeenCalledWith({ skipCache: false, size: false });
|
expect(dockerService.getContainers).toHaveBeenCalledWith({ skipCache: false, size: false });
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -114,10 +178,13 @@ describe('DockerResolver', () => {
|
|||||||
sizeRootFs: 1024000,
|
sizeRootFs: 1024000,
|
||||||
state: ContainerState.EXITED,
|
state: ContainerState.EXITED,
|
||||||
status: 'Exited',
|
status: 'Exited',
|
||||||
|
isOrphaned: false,
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
vi.mocked(dockerService.getContainers).mockResolvedValue(mockContainers);
|
vi.mocked(dockerService.getContainers).mockResolvedValue(mockContainers);
|
||||||
vi.mocked(GraphQLFieldHelper.isFieldRequested).mockReturnValue(true);
|
vi.mocked(GraphQLFieldHelper.isFieldRequested).mockImplementation((_, field) => {
|
||||||
|
return field === 'sizeRootFs';
|
||||||
|
});
|
||||||
|
|
||||||
const mockInfo = {} as any;
|
const mockInfo = {} as any;
|
||||||
|
|
||||||
@@ -127,10 +194,61 @@ describe('DockerResolver', () => {
|
|||||||
expect(dockerService.getContainers).toHaveBeenCalledWith({ skipCache: false, size: true });
|
expect(dockerService.getContainers).toHaveBeenCalledWith({ skipCache: false, size: true });
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should request size when sizeRw field is requested', async () => {
|
||||||
|
const mockContainers: DockerContainer[] = [];
|
||||||
|
vi.mocked(dockerService.getContainers).mockResolvedValue(mockContainers);
|
||||||
|
vi.mocked(GraphQLFieldHelper.isFieldRequested).mockImplementation((_, field) => {
|
||||||
|
return field === 'sizeRw';
|
||||||
|
});
|
||||||
|
|
||||||
|
const mockInfo = {} as any;
|
||||||
|
|
||||||
|
await resolver.containers(false, mockInfo);
|
||||||
|
expect(GraphQLFieldHelper.isFieldRequested).toHaveBeenCalledWith(mockInfo, 'sizeRw');
|
||||||
|
expect(dockerService.getContainers).toHaveBeenCalledWith({ skipCache: false, size: true });
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should fetch log sizes when sizeLog field is requested', async () => {
|
||||||
|
const mockContainers: DockerContainer[] = [
|
||||||
|
{
|
||||||
|
id: '1',
|
||||||
|
autoStart: false,
|
||||||
|
command: 'test',
|
||||||
|
names: ['/test-container'],
|
||||||
|
created: 1234567890,
|
||||||
|
image: 'test-image',
|
||||||
|
imageId: 'test-image-id',
|
||||||
|
ports: [],
|
||||||
|
state: ContainerState.EXITED,
|
||||||
|
status: 'Exited',
|
||||||
|
isOrphaned: false,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
vi.mocked(dockerService.getContainers).mockResolvedValue(mockContainers);
|
||||||
|
vi.mocked(GraphQLFieldHelper.isFieldRequested).mockImplementation((_, field) => {
|
||||||
|
if (field === 'sizeLog') return true;
|
||||||
|
return false;
|
||||||
|
});
|
||||||
|
|
||||||
|
const logSizeMap = new Map<string, number>([['test-container', 42]]);
|
||||||
|
vi.mocked(dockerService.getContainerLogSizes).mockResolvedValue(logSizeMap);
|
||||||
|
|
||||||
|
const mockInfo = {} as any;
|
||||||
|
|
||||||
|
const result = await resolver.containers(false, mockInfo);
|
||||||
|
|
||||||
|
expect(GraphQLFieldHelper.isFieldRequested).toHaveBeenCalledWith(mockInfo, 'sizeLog');
|
||||||
|
expect(dockerService.getContainerLogSizes).toHaveBeenCalledWith(['test-container']);
|
||||||
|
expect(result[0]?.sizeLog).toBe(42);
|
||||||
|
expect(dockerService.getContainers).toHaveBeenCalledWith({ skipCache: false, size: false });
|
||||||
|
});
|
||||||
|
|
||||||
it('should request size when GraphQLFieldHelper indicates sizeRootFs is requested', async () => {
|
it('should request size when GraphQLFieldHelper indicates sizeRootFs is requested', async () => {
|
||||||
const mockContainers: DockerContainer[] = [];
|
const mockContainers: DockerContainer[] = [];
|
||||||
vi.mocked(dockerService.getContainers).mockResolvedValue(mockContainers);
|
vi.mocked(dockerService.getContainers).mockResolvedValue(mockContainers);
|
||||||
vi.mocked(GraphQLFieldHelper.isFieldRequested).mockReturnValue(true);
|
vi.mocked(GraphQLFieldHelper.isFieldRequested).mockImplementation((_, field) => {
|
||||||
|
return field === 'sizeRootFs';
|
||||||
|
});
|
||||||
|
|
||||||
const mockInfo = {} as any;
|
const mockInfo = {} as any;
|
||||||
|
|
||||||
@@ -142,7 +260,7 @@ describe('DockerResolver', () => {
|
|||||||
it('should not request size when GraphQLFieldHelper indicates sizeRootFs is not requested', async () => {
|
it('should not request size when GraphQLFieldHelper indicates sizeRootFs is not requested', async () => {
|
||||||
const mockContainers: DockerContainer[] = [];
|
const mockContainers: DockerContainer[] = [];
|
||||||
vi.mocked(dockerService.getContainers).mockResolvedValue(mockContainers);
|
vi.mocked(dockerService.getContainers).mockResolvedValue(mockContainers);
|
||||||
vi.mocked(GraphQLFieldHelper.isFieldRequested).mockReturnValue(false);
|
vi.mocked(GraphQLFieldHelper.isFieldRequested).mockImplementation(() => false);
|
||||||
|
|
||||||
const mockInfo = {} as any;
|
const mockInfo = {} as any;
|
||||||
|
|
||||||
@@ -161,4 +279,22 @@ describe('DockerResolver', () => {
|
|||||||
await resolver.containers(true, mockInfo);
|
await resolver.containers(true, mockInfo);
|
||||||
expect(dockerService.getContainers).toHaveBeenCalledWith({ skipCache: true, size: false });
|
expect(dockerService.getContainers).toHaveBeenCalledWith({ skipCache: true, size: false });
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should fetch container logs with provided arguments', async () => {
|
||||||
|
const since = new Date('2024-01-01T00:00:00.000Z');
|
||||||
|
const logResult: DockerContainerLogs = {
|
||||||
|
containerId: '1',
|
||||||
|
lines: [],
|
||||||
|
cursor: since,
|
||||||
|
};
|
||||||
|
vi.mocked(dockerService.getContainerLogs).mockResolvedValue(logResult);
|
||||||
|
|
||||||
|
const result = await resolver.logs('1', since, 25);
|
||||||
|
|
||||||
|
expect(result).toEqual(logResult);
|
||||||
|
expect(dockerService.getContainerLogs).toHaveBeenCalledWith('1', {
|
||||||
|
since,
|
||||||
|
tail: 25,
|
||||||
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,19 +1,41 @@
|
|||||||
import { Args, Info, Mutation, Query, ResolveField, Resolver } from '@nestjs/graphql';
|
import {
|
||||||
|
Args,
|
||||||
|
GraphQLISODateTime,
|
||||||
|
Info,
|
||||||
|
Int,
|
||||||
|
Mutation,
|
||||||
|
Query,
|
||||||
|
ResolveField,
|
||||||
|
Resolver,
|
||||||
|
Subscription,
|
||||||
|
} from '@nestjs/graphql';
|
||||||
|
|
||||||
import type { GraphQLResolveInfo } from 'graphql';
|
import type { GraphQLResolveInfo } from 'graphql';
|
||||||
import { AuthAction, Resource } from '@unraid/shared/graphql.model.js';
|
import { AuthAction, Resource } from '@unraid/shared/graphql.model.js';
|
||||||
|
import { PrefixedID } from '@unraid/shared/prefixed-id-scalar.js';
|
||||||
import { UsePermissions } from '@unraid/shared/use-permissions.directive.js';
|
import { UsePermissions } from '@unraid/shared/use-permissions.directive.js';
|
||||||
|
import { GraphQLJSON } from 'graphql-scalars';
|
||||||
|
|
||||||
|
import { PUBSUB_CHANNEL } from '@app/core/pubsub.js';
|
||||||
import { UseFeatureFlag } from '@app/unraid-api/decorators/use-feature-flag.decorator.js';
|
import { UseFeatureFlag } from '@app/unraid-api/decorators/use-feature-flag.decorator.js';
|
||||||
|
import { DockerConfigService } from '@app/unraid-api/graph/resolvers/docker/docker-config.service.js';
|
||||||
import { DockerPhpService } from '@app/unraid-api/graph/resolvers/docker/docker-php.service.js';
|
import { DockerPhpService } from '@app/unraid-api/graph/resolvers/docker/docker-php.service.js';
|
||||||
|
import { DockerStatsService } from '@app/unraid-api/graph/resolvers/docker/docker-stats.service.js';
|
||||||
|
import { DockerTemplateSyncResult } from '@app/unraid-api/graph/resolvers/docker/docker-template-scanner.model.js';
|
||||||
|
import { DockerTemplateScannerService } from '@app/unraid-api/graph/resolvers/docker/docker-template-scanner.service.js';
|
||||||
import { ExplicitStatusItem } from '@app/unraid-api/graph/resolvers/docker/docker-update-status.model.js';
|
import { ExplicitStatusItem } from '@app/unraid-api/graph/resolvers/docker/docker-update-status.model.js';
|
||||||
import {
|
import {
|
||||||
Docker,
|
Docker,
|
||||||
DockerContainer,
|
DockerContainer,
|
||||||
|
DockerContainerLogs,
|
||||||
|
DockerContainerStats,
|
||||||
DockerNetwork,
|
DockerNetwork,
|
||||||
|
DockerPortConflicts,
|
||||||
} from '@app/unraid-api/graph/resolvers/docker/docker.model.js';
|
} from '@app/unraid-api/graph/resolvers/docker/docker.model.js';
|
||||||
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
|
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
|
||||||
import { DockerOrganizerService } from '@app/unraid-api/graph/resolvers/docker/organizer/docker-organizer.service.js';
|
import { DockerOrganizerService } from '@app/unraid-api/graph/resolvers/docker/organizer/docker-organizer.service.js';
|
||||||
|
import { SubscriptionHelperService } from '@app/unraid-api/graph/services/subscription-helper.service.js';
|
||||||
|
import { SubscriptionTrackerService } from '@app/unraid-api/graph/services/subscription-tracker.service.js';
|
||||||
import { DEFAULT_ORGANIZER_ROOT_ID } from '@app/unraid-api/organizer/organizer.js';
|
import { DEFAULT_ORGANIZER_ROOT_ID } from '@app/unraid-api/organizer/organizer.js';
|
||||||
import { ResolvedOrganizerV1 } from '@app/unraid-api/organizer/organizer.model.js';
|
import { ResolvedOrganizerV1 } from '@app/unraid-api/organizer/organizer.model.js';
|
||||||
import { GraphQLFieldHelper } from '@app/unraid-api/utils/graphql-field-helper.js';
|
import { GraphQLFieldHelper } from '@app/unraid-api/utils/graphql-field-helper.js';
|
||||||
@@ -22,9 +44,20 @@ import { GraphQLFieldHelper } from '@app/unraid-api/utils/graphql-field-helper.j
|
|||||||
export class DockerResolver {
|
export class DockerResolver {
|
||||||
constructor(
|
constructor(
|
||||||
private readonly dockerService: DockerService,
|
private readonly dockerService: DockerService,
|
||||||
|
private readonly dockerConfigService: DockerConfigService,
|
||||||
private readonly dockerOrganizerService: DockerOrganizerService,
|
private readonly dockerOrganizerService: DockerOrganizerService,
|
||||||
private readonly dockerPhpService: DockerPhpService
|
private readonly dockerPhpService: DockerPhpService,
|
||||||
) {}
|
private readonly dockerTemplateScannerService: DockerTemplateScannerService,
|
||||||
|
private readonly dockerStatsService: DockerStatsService,
|
||||||
|
private readonly subscriptionTracker: SubscriptionTrackerService,
|
||||||
|
private readonly subscriptionHelper: SubscriptionHelperService
|
||||||
|
) {
|
||||||
|
this.subscriptionTracker.registerTopic(
|
||||||
|
PUBSUB_CHANNEL.DOCKER_STATS,
|
||||||
|
() => this.dockerStatsService.startStatsStream(),
|
||||||
|
() => this.dockerStatsService.stopStatsStream()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
@UsePermissions({
|
@UsePermissions({
|
||||||
action: AuthAction.READ_ANY,
|
action: AuthAction.READ_ANY,
|
||||||
@@ -37,6 +70,17 @@ export class DockerResolver {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||||
|
@UsePermissions({
|
||||||
|
action: AuthAction.READ_ANY,
|
||||||
|
resource: Resource.DOCKER,
|
||||||
|
})
|
||||||
|
@ResolveField(() => DockerContainer, { nullable: true })
|
||||||
|
public async container(@Args('id', { type: () => PrefixedID }) id: string) {
|
||||||
|
const containers = await this.dockerService.getContainers({ skipCache: false });
|
||||||
|
return containers.find((c) => c.id === id) ?? null;
|
||||||
|
}
|
||||||
|
|
||||||
@UsePermissions({
|
@UsePermissions({
|
||||||
action: AuthAction.READ_ANY,
|
action: AuthAction.READ_ANY,
|
||||||
resource: Resource.DOCKER,
|
resource: Resource.DOCKER,
|
||||||
@@ -46,8 +90,47 @@ export class DockerResolver {
|
|||||||
@Args('skipCache', { defaultValue: false, type: () => Boolean }) skipCache: boolean,
|
@Args('skipCache', { defaultValue: false, type: () => Boolean }) skipCache: boolean,
|
||||||
@Info() info: GraphQLResolveInfo
|
@Info() info: GraphQLResolveInfo
|
||||||
) {
|
) {
|
||||||
const requestsSize = GraphQLFieldHelper.isFieldRequested(info, 'sizeRootFs');
|
const requestsRootFsSize = GraphQLFieldHelper.isFieldRequested(info, 'sizeRootFs');
|
||||||
return this.dockerService.getContainers({ skipCache, size: requestsSize });
|
const requestsRwSize = GraphQLFieldHelper.isFieldRequested(info, 'sizeRw');
|
||||||
|
const requestsLogSize = GraphQLFieldHelper.isFieldRequested(info, 'sizeLog');
|
||||||
|
const containers = await this.dockerService.getContainers({
|
||||||
|
skipCache,
|
||||||
|
size: requestsRootFsSize || requestsRwSize,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (requestsLogSize) {
|
||||||
|
const names = Array.from(
|
||||||
|
new Set(
|
||||||
|
containers
|
||||||
|
.map((container) => container.names?.[0]?.replace(/^\//, '') || null)
|
||||||
|
.filter((name): name is string => Boolean(name))
|
||||||
|
)
|
||||||
|
);
|
||||||
|
const logSizes = await this.dockerService.getContainerLogSizes(names);
|
||||||
|
containers.forEach((container) => {
|
||||||
|
const normalized = container.names?.[0]?.replace(/^\//, '') || '';
|
||||||
|
container.sizeLog = normalized ? (logSizes.get(normalized) ?? 0) : 0;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const wasSynced = await this.dockerTemplateScannerService.syncMissingContainers(containers);
|
||||||
|
return wasSynced ? await this.dockerService.getContainers({ skipCache: true }) : containers;
|
||||||
|
}
|
||||||
|
|
||||||
|
@UsePermissions({
|
||||||
|
action: AuthAction.READ_ANY,
|
||||||
|
resource: Resource.DOCKER,
|
||||||
|
})
|
||||||
|
@ResolveField(() => DockerContainerLogs)
|
||||||
|
public async logs(
|
||||||
|
@Args('id', { type: () => PrefixedID }) id: string,
|
||||||
|
@Args('since', { type: () => GraphQLISODateTime, nullable: true }) since?: Date | null,
|
||||||
|
@Args('tail', { type: () => Int, nullable: true }) tail?: number | null
|
||||||
|
) {
|
||||||
|
return this.dockerService.getContainerLogs(id, {
|
||||||
|
since: since ?? undefined,
|
||||||
|
tail,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@UsePermissions({
|
@UsePermissions({
|
||||||
@@ -61,14 +144,27 @@ export class DockerResolver {
|
|||||||
return this.dockerService.getNetworks({ skipCache });
|
return this.dockerService.getNetworks({ skipCache });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@UsePermissions({
|
||||||
|
action: AuthAction.READ_ANY,
|
||||||
|
resource: Resource.DOCKER,
|
||||||
|
})
|
||||||
|
@ResolveField(() => DockerPortConflicts)
|
||||||
|
public async portConflicts(
|
||||||
|
@Args('skipCache', { defaultValue: false, type: () => Boolean }) skipCache: boolean
|
||||||
|
) {
|
||||||
|
return this.dockerService.getPortConflicts({ skipCache });
|
||||||
|
}
|
||||||
|
|
||||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||||
@UsePermissions({
|
@UsePermissions({
|
||||||
action: AuthAction.READ_ANY,
|
action: AuthAction.READ_ANY,
|
||||||
resource: Resource.DOCKER,
|
resource: Resource.DOCKER,
|
||||||
})
|
})
|
||||||
@ResolveField(() => ResolvedOrganizerV1)
|
@ResolveField(() => ResolvedOrganizerV1)
|
||||||
public async organizer() {
|
public async organizer(
|
||||||
return this.dockerOrganizerService.resolveOrganizer();
|
@Args('skipCache', { defaultValue: false, type: () => Boolean }) skipCache: boolean
|
||||||
|
) {
|
||||||
|
return this.dockerOrganizerService.resolveOrganizer(undefined, { skipCache });
|
||||||
}
|
}
|
||||||
|
|
||||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||||
@@ -107,6 +203,11 @@ export class DockerResolver {
|
|||||||
return this.dockerOrganizerService.resolveOrganizer(organizer);
|
return this.dockerOrganizerService.resolveOrganizer(organizer);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Deletes organizer entries (folders). When a folder is deleted, its container
|
||||||
|
* children are automatically appended to the end of the root folder via
|
||||||
|
* `addMissingResourcesToView`. Containers are never permanently deleted by this operation.
|
||||||
|
*/
|
||||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||||
@UsePermissions({
|
@UsePermissions({
|
||||||
action: AuthAction.UPDATE_ANY,
|
action: AuthAction.UPDATE_ANY,
|
||||||
@@ -137,6 +238,80 @@ export class DockerResolver {
|
|||||||
return this.dockerOrganizerService.resolveOrganizer(organizer);
|
return this.dockerOrganizerService.resolveOrganizer(organizer);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||||
|
@UsePermissions({
|
||||||
|
action: AuthAction.UPDATE_ANY,
|
||||||
|
resource: Resource.DOCKER,
|
||||||
|
})
|
||||||
|
@Mutation(() => ResolvedOrganizerV1)
|
||||||
|
public async moveDockerItemsToPosition(
|
||||||
|
@Args('sourceEntryIds', { type: () => [String] }) sourceEntryIds: string[],
|
||||||
|
@Args('destinationFolderId') destinationFolderId: string,
|
||||||
|
@Args('position', { type: () => Number }) position: number
|
||||||
|
) {
|
||||||
|
const organizer = await this.dockerOrganizerService.moveItemsToPosition({
|
||||||
|
sourceEntryIds,
|
||||||
|
destinationFolderId,
|
||||||
|
position,
|
||||||
|
});
|
||||||
|
return this.dockerOrganizerService.resolveOrganizer(organizer);
|
||||||
|
}
|
||||||
|
|
||||||
|
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||||
|
@UsePermissions({
|
||||||
|
action: AuthAction.UPDATE_ANY,
|
||||||
|
resource: Resource.DOCKER,
|
||||||
|
})
|
||||||
|
@Mutation(() => ResolvedOrganizerV1)
|
||||||
|
public async renameDockerFolder(
|
||||||
|
@Args('folderId') folderId: string,
|
||||||
|
@Args('newName') newName: string
|
||||||
|
) {
|
||||||
|
const organizer = await this.dockerOrganizerService.renameFolderById({
|
||||||
|
folderId,
|
||||||
|
newName,
|
||||||
|
});
|
||||||
|
return this.dockerOrganizerService.resolveOrganizer(organizer);
|
||||||
|
}
|
||||||
|
|
||||||
|
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||||
|
@UsePermissions({
|
||||||
|
action: AuthAction.UPDATE_ANY,
|
||||||
|
resource: Resource.DOCKER,
|
||||||
|
})
|
||||||
|
@Mutation(() => ResolvedOrganizerV1)
|
||||||
|
public async createDockerFolderWithItems(
|
||||||
|
@Args('name') name: string,
|
||||||
|
@Args('parentId', { nullable: true }) parentId?: string,
|
||||||
|
@Args('sourceEntryIds', { type: () => [String], nullable: true }) sourceEntryIds?: string[],
|
||||||
|
@Args('position', { type: () => Number, nullable: true }) position?: number
|
||||||
|
) {
|
||||||
|
const organizer = await this.dockerOrganizerService.createFolderWithItems({
|
||||||
|
name,
|
||||||
|
parentId: parentId ?? DEFAULT_ORGANIZER_ROOT_ID,
|
||||||
|
sourceEntryIds: sourceEntryIds ?? [],
|
||||||
|
position,
|
||||||
|
});
|
||||||
|
return this.dockerOrganizerService.resolveOrganizer(organizer);
|
||||||
|
}
|
||||||
|
|
||||||
|
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||||
|
@UsePermissions({
|
||||||
|
action: AuthAction.UPDATE_ANY,
|
||||||
|
resource: Resource.DOCKER,
|
||||||
|
})
|
||||||
|
@Mutation(() => ResolvedOrganizerV1)
|
||||||
|
public async updateDockerViewPreferences(
|
||||||
|
@Args('viewId', { nullable: true, defaultValue: 'default' }) viewId: string,
|
||||||
|
@Args('prefs', { type: () => GraphQLJSON }) prefs: Record<string, unknown>
|
||||||
|
) {
|
||||||
|
const organizer = await this.dockerOrganizerService.updateViewPreferences({
|
||||||
|
viewId,
|
||||||
|
prefs,
|
||||||
|
});
|
||||||
|
return this.dockerOrganizerService.resolveOrganizer(organizer);
|
||||||
|
}
|
||||||
|
|
||||||
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||||
@UsePermissions({
|
@UsePermissions({
|
||||||
action: AuthAction.READ_ANY,
|
action: AuthAction.READ_ANY,
|
||||||
@@ -146,4 +321,48 @@ export class DockerResolver {
|
|||||||
public async containerUpdateStatuses() {
|
public async containerUpdateStatuses() {
|
||||||
return this.dockerPhpService.getContainerUpdateStatuses();
|
return this.dockerPhpService.getContainerUpdateStatuses();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||||
|
@UsePermissions({
|
||||||
|
action: AuthAction.UPDATE_ANY,
|
||||||
|
resource: Resource.DOCKER,
|
||||||
|
})
|
||||||
|
@Mutation(() => DockerTemplateSyncResult)
|
||||||
|
public async syncDockerTemplatePaths() {
|
||||||
|
return this.dockerTemplateScannerService.scanTemplates();
|
||||||
|
}
|
||||||
|
|
||||||
|
@UseFeatureFlag('ENABLE_NEXT_DOCKER_RELEASE')
|
||||||
|
@UsePermissions({
|
||||||
|
action: AuthAction.UPDATE_ANY,
|
||||||
|
resource: Resource.DOCKER,
|
||||||
|
})
|
||||||
|
@Mutation(() => Boolean, {
|
||||||
|
description:
|
||||||
|
'Reset Docker template mappings to defaults. Use this to recover from corrupted state.',
|
||||||
|
})
|
||||||
|
public async resetDockerTemplateMappings(): Promise<boolean> {
|
||||||
|
const defaultConfig = this.dockerConfigService.defaultConfig();
|
||||||
|
const currentConfig = this.dockerConfigService.getConfig();
|
||||||
|
const resetConfig = {
|
||||||
|
...currentConfig,
|
||||||
|
templateMappings: defaultConfig.templateMappings,
|
||||||
|
skipTemplatePaths: defaultConfig.skipTemplatePaths,
|
||||||
|
};
|
||||||
|
const validated = await this.dockerConfigService.validate(resetConfig);
|
||||||
|
this.dockerConfigService.replaceConfig(validated);
|
||||||
|
await this.dockerService.clearContainerCache();
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
@UsePermissions({
|
||||||
|
action: AuthAction.READ_ANY,
|
||||||
|
resource: Resource.DOCKER,
|
||||||
|
})
|
||||||
|
@Subscription(() => DockerContainerStats, {
|
||||||
|
resolve: (payload) => payload.dockerContainerStats,
|
||||||
|
})
|
||||||
|
public dockerContainerStats() {
|
||||||
|
return this.subscriptionHelper.createTrackedSubscription(PUBSUB_CHANNEL.DOCKER_STATS);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,169 @@
|
|||||||
|
import { CACHE_MANAGER } from '@nestjs/cache-manager';
|
||||||
|
import { Test, TestingModule } from '@nestjs/testing';
|
||||||
|
import { mkdtemp, readFile, rm } from 'fs/promises';
|
||||||
|
import { tmpdir } from 'os';
|
||||||
|
import { join } from 'path';
|
||||||
|
|
||||||
|
import { afterAll, beforeAll, describe, expect, it, vi } from 'vitest';
|
||||||
|
|
||||||
|
import { DockerAutostartService } from '@app/unraid-api/graph/resolvers/docker/docker-autostart.service.js';
|
||||||
|
import { DockerConfigService } from '@app/unraid-api/graph/resolvers/docker/docker-config.service.js';
|
||||||
|
import { DockerLogService } from '@app/unraid-api/graph/resolvers/docker/docker-log.service.js';
|
||||||
|
import { DockerManifestService } from '@app/unraid-api/graph/resolvers/docker/docker-manifest.service.js';
|
||||||
|
import { DockerNetworkService } from '@app/unraid-api/graph/resolvers/docker/docker-network.service.js';
|
||||||
|
import { DockerPortService } from '@app/unraid-api/graph/resolvers/docker/docker-port.service.js';
|
||||||
|
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
|
||||||
|
import { NotificationsService } from '@app/unraid-api/graph/resolvers/notifications/notifications.service.js';
|
||||||
|
|
||||||
|
// Mock dependencies that are not focus of integration
|
||||||
|
const mockNotificationsService = {
|
||||||
|
notifyIfUnique: vi.fn(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockDockerConfigService = {
|
||||||
|
getConfig: vi.fn().mockReturnValue({ templateMappings: {} }),
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockDockerManifestService = {
|
||||||
|
getCachedUpdateStatuses: vi.fn().mockResolvedValue({}),
|
||||||
|
isUpdateAvailableCached: vi.fn().mockResolvedValue(false),
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockCacheManager = {
|
||||||
|
get: vi.fn(),
|
||||||
|
set: vi.fn(),
|
||||||
|
del: vi.fn(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Hoisted mock for paths
|
||||||
|
const { mockPaths } = vi.hoisted(() => ({
|
||||||
|
mockPaths: {
|
||||||
|
'docker-autostart': '',
|
||||||
|
'docker-userprefs': '',
|
||||||
|
'docker-socket': '/var/run/docker.sock',
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock('@app/store/index.js', () => ({
|
||||||
|
getters: {
|
||||||
|
paths: () => mockPaths,
|
||||||
|
emhttp: () => ({ networks: [] }),
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Check for Docker availability
|
||||||
|
let dockerAvailable = false;
|
||||||
|
try {
|
||||||
|
const Docker = (await import('dockerode')).default;
|
||||||
|
const docker = new Docker({ socketPath: '/var/run/docker.sock' });
|
||||||
|
await docker.ping();
|
||||||
|
dockerAvailable = true;
|
||||||
|
} catch {
|
||||||
|
console.warn('Docker not available or not accessible at /var/run/docker.sock');
|
||||||
|
}
|
||||||
|
|
||||||
|
describe.runIf(dockerAvailable)('DockerService Integration', () => {
|
||||||
|
let service: DockerService;
|
||||||
|
let autostartService: DockerAutostartService;
|
||||||
|
let module: TestingModule;
|
||||||
|
let tempDir: string;
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
// Setup temp dir for config files
|
||||||
|
tempDir = await mkdtemp(join(tmpdir(), 'unraid-api-docker-test-'));
|
||||||
|
mockPaths['docker-autostart'] = join(tempDir, 'docker-autostart');
|
||||||
|
mockPaths['docker-userprefs'] = join(tempDir, 'docker-userprefs');
|
||||||
|
|
||||||
|
module = await Test.createTestingModule({
|
||||||
|
providers: [
|
||||||
|
DockerService,
|
||||||
|
DockerAutostartService,
|
||||||
|
DockerLogService,
|
||||||
|
DockerNetworkService,
|
||||||
|
DockerPortService,
|
||||||
|
{ provide: CACHE_MANAGER, useValue: mockCacheManager },
|
||||||
|
{ provide: DockerConfigService, useValue: mockDockerConfigService },
|
||||||
|
{ provide: DockerManifestService, useValue: mockDockerManifestService },
|
||||||
|
{ provide: NotificationsService, useValue: mockNotificationsService },
|
||||||
|
],
|
||||||
|
}).compile();
|
||||||
|
|
||||||
|
service = module.get<DockerService>(DockerService);
|
||||||
|
autostartService = module.get<DockerAutostartService>(DockerAutostartService);
|
||||||
|
});
|
||||||
|
|
||||||
|
afterAll(async () => {
|
||||||
|
if (tempDir) {
|
||||||
|
await rm(tempDir, { recursive: true, force: true });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should fetch containers from docker daemon', async () => {
|
||||||
|
const containers = await service.getContainers({ skipCache: true });
|
||||||
|
expect(Array.isArray(containers)).toBe(true);
|
||||||
|
if (containers.length > 0) {
|
||||||
|
expect(containers[0]).toHaveProperty('id');
|
||||||
|
expect(containers[0]).toHaveProperty('names');
|
||||||
|
expect(containers[0].state).toBeDefined();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should fetch networks from docker daemon', async () => {
|
||||||
|
const networks = await service.getNetworks({ skipCache: true });
|
||||||
|
expect(Array.isArray(networks)).toBe(true);
|
||||||
|
// Default networks (bridge, host, null) should always exist
|
||||||
|
expect(networks.length).toBeGreaterThan(0);
|
||||||
|
const bridge = networks.find((n) => n.name === 'bridge');
|
||||||
|
expect(bridge).toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should manage autostart configuration in temp files', async () => {
|
||||||
|
const containers = await service.getContainers({ skipCache: true });
|
||||||
|
if (containers.length === 0) {
|
||||||
|
console.warn('No containers found, skipping autostart write test');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const target = containers[0];
|
||||||
|
// Ensure name is valid for autostart file (strip /)
|
||||||
|
const primaryName = autostartService.getContainerPrimaryName(target as any);
|
||||||
|
expect(primaryName).toBeTruthy();
|
||||||
|
|
||||||
|
const entry = {
|
||||||
|
id: target.id,
|
||||||
|
autoStart: true,
|
||||||
|
wait: 10,
|
||||||
|
};
|
||||||
|
|
||||||
|
await service.updateAutostartConfiguration([entry], { persistUserPreferences: true });
|
||||||
|
|
||||||
|
// Verify file content
|
||||||
|
try {
|
||||||
|
const content = await readFile(mockPaths['docker-autostart'], 'utf8');
|
||||||
|
expect(content).toContain(primaryName);
|
||||||
|
expect(content).toContain('10');
|
||||||
|
} catch (error: any) {
|
||||||
|
// If file doesn't exist, it might be because logic didn't write anything (e.g. name issue)
|
||||||
|
// But we expect it to write if container exists and we passed valid entry
|
||||||
|
throw new Error(`Failed to read autostart file: ${error.message}`);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should get container logs using dockerode', async () => {
|
||||||
|
const containers = await service.getContainers({ skipCache: true });
|
||||||
|
const running = containers.find((c) => c.state === 'RUNNING'); // Enum value is string 'RUNNING'
|
||||||
|
|
||||||
|
if (!running) {
|
||||||
|
console.warn('No running containers found, skipping log test');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// This test verifies that the execa -> dockerode switch works for logs
|
||||||
|
// If it fails, it likely means the log parsing or dockerode interaction is wrong.
|
||||||
|
const logs = await service.getContainerLogs(running.id, { tail: 10 });
|
||||||
|
expect(logs).toBeDefined();
|
||||||
|
expect(logs.containerId).toBe(running.id);
|
||||||
|
expect(Array.isArray(logs.lines)).toBe(true);
|
||||||
|
// We can't guarantee lines length > 0 if container is silent, but it shouldn't throw.
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -7,8 +7,15 @@ import { beforeEach, describe, expect, it, vi } from 'vitest';
|
|||||||
|
|
||||||
// Import the mocked pubsub parts
|
// Import the mocked pubsub parts
|
||||||
import { pubsub, PUBSUB_CHANNEL } from '@app/core/pubsub.js';
|
import { pubsub, PUBSUB_CHANNEL } from '@app/core/pubsub.js';
|
||||||
|
import { DockerAutostartService } from '@app/unraid-api/graph/resolvers/docker/docker-autostart.service.js';
|
||||||
|
import { DockerConfigService } from '@app/unraid-api/graph/resolvers/docker/docker-config.service.js';
|
||||||
|
import { DockerLogService } from '@app/unraid-api/graph/resolvers/docker/docker-log.service.js';
|
||||||
|
import { DockerManifestService } from '@app/unraid-api/graph/resolvers/docker/docker-manifest.service.js';
|
||||||
|
import { DockerNetworkService } from '@app/unraid-api/graph/resolvers/docker/docker-network.service.js';
|
||||||
|
import { DockerPortService } from '@app/unraid-api/graph/resolvers/docker/docker-port.service.js';
|
||||||
import { ContainerState, DockerContainer } from '@app/unraid-api/graph/resolvers/docker/docker.model.js';
|
import { ContainerState, DockerContainer } from '@app/unraid-api/graph/resolvers/docker/docker.model.js';
|
||||||
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
|
import { DockerService } from '@app/unraid-api/graph/resolvers/docker/docker.service.js';
|
||||||
|
import { NotificationsService } from '@app/unraid-api/graph/resolvers/notifications/notifications.service.js';
|
||||||
|
|
||||||
// Mock pubsub
|
// Mock pubsub
|
||||||
vi.mock('@app/core/pubsub.js', () => ({
|
vi.mock('@app/core/pubsub.js', () => ({
|
||||||
@@ -24,36 +31,58 @@ interface DockerError extends NodeJS.ErrnoException {
|
|||||||
address: string;
|
address: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
const mockContainer = {
|
const { mockDockerInstance, mockListContainers, mockGetContainer, mockListNetworks, mockContainer } =
|
||||||
start: vi.fn(),
|
vi.hoisted(() => {
|
||||||
stop: vi.fn(),
|
const mockContainer = {
|
||||||
};
|
start: vi.fn(),
|
||||||
|
stop: vi.fn(),
|
||||||
|
pause: vi.fn(),
|
||||||
|
unpause: vi.fn(),
|
||||||
|
inspect: vi.fn(),
|
||||||
|
};
|
||||||
|
|
||||||
// Create properly typed mock functions
|
const mockListContainers = vi.fn();
|
||||||
const mockListContainers = vi.fn();
|
const mockGetContainer = vi.fn().mockReturnValue(mockContainer);
|
||||||
const mockGetContainer = vi.fn().mockReturnValue(mockContainer);
|
const mockListNetworks = vi.fn();
|
||||||
const mockListNetworks = vi.fn();
|
|
||||||
|
|
||||||
const mockDockerInstance = {
|
const mockDockerInstance = {
|
||||||
getContainer: mockGetContainer,
|
getContainer: mockGetContainer,
|
||||||
listContainers: mockListContainers,
|
listContainers: mockListContainers,
|
||||||
listNetworks: mockListNetworks,
|
listNetworks: mockListNetworks,
|
||||||
modem: {
|
modem: {
|
||||||
Promise: Promise,
|
Promise: Promise,
|
||||||
protocol: 'http',
|
protocol: 'http',
|
||||||
socketPath: '/var/run/docker.sock',
|
socketPath: '/var/run/docker.sock',
|
||||||
headers: {},
|
headers: {},
|
||||||
sshOptions: {
|
sshOptions: {
|
||||||
agentForward: undefined,
|
agentForward: undefined,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
} as unknown as Docker;
|
} as unknown as Docker;
|
||||||
|
|
||||||
vi.mock('dockerode', () => {
|
return {
|
||||||
return {
|
mockDockerInstance,
|
||||||
default: vi.fn().mockImplementation(() => mockDockerInstance),
|
mockListContainers,
|
||||||
};
|
mockGetContainer,
|
||||||
});
|
mockListNetworks,
|
||||||
|
mockContainer,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
vi.mock('@app/unraid-api/graph/resolvers/docker/utils/docker-client.js', () => ({
|
||||||
|
getDockerClient: vi.fn().mockReturnValue(mockDockerInstance),
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock('execa', () => ({
|
||||||
|
execa: vi.fn(),
|
||||||
|
}));
|
||||||
|
|
||||||
|
const { mockEmhttpGetter } = vi.hoisted(() => ({
|
||||||
|
mockEmhttpGetter: vi.fn().mockReturnValue({
|
||||||
|
networks: [],
|
||||||
|
var: {},
|
||||||
|
}),
|
||||||
|
}));
|
||||||
|
|
||||||
// Mock the store getters
|
// Mock the store getters
|
||||||
vi.mock('@app/store/index.js', () => ({
|
vi.mock('@app/store/index.js', () => ({
|
||||||
@@ -61,15 +90,21 @@ vi.mock('@app/store/index.js', () => ({
|
|||||||
docker: vi.fn().mockReturnValue({ containers: [] }),
|
docker: vi.fn().mockReturnValue({ containers: [] }),
|
||||||
paths: vi.fn().mockReturnValue({
|
paths: vi.fn().mockReturnValue({
|
||||||
'docker-autostart': '/path/to/docker-autostart',
|
'docker-autostart': '/path/to/docker-autostart',
|
||||||
|
'docker-userprefs': '/path/to/docker-userprefs',
|
||||||
'docker-socket': '/var/run/docker.sock',
|
'docker-socket': '/var/run/docker.sock',
|
||||||
'var-run': '/var/run',
|
'var-run': '/var/run',
|
||||||
}),
|
}),
|
||||||
|
emhttp: mockEmhttpGetter,
|
||||||
},
|
},
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// Mock fs/promises
|
// Mock fs/promises (stat only)
|
||||||
|
const { statMock } = vi.hoisted(() => ({
|
||||||
|
statMock: vi.fn().mockResolvedValue({ size: 0 }),
|
||||||
|
}));
|
||||||
|
|
||||||
vi.mock('fs/promises', () => ({
|
vi.mock('fs/promises', () => ({
|
||||||
readFile: vi.fn().mockResolvedValue(''),
|
stat: statMock,
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// Mock Cache Manager
|
// Mock Cache Manager
|
||||||
@@ -79,6 +114,67 @@ const mockCacheManager = {
|
|||||||
del: vi.fn(),
|
del: vi.fn(),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Mock DockerConfigService
|
||||||
|
const mockDockerConfigService = {
|
||||||
|
getConfig: vi.fn().mockReturnValue({
|
||||||
|
updateCheckCronSchedule: '0 6 * * *',
|
||||||
|
templateMappings: {},
|
||||||
|
skipTemplatePaths: [],
|
||||||
|
}),
|
||||||
|
replaceConfig: vi.fn(),
|
||||||
|
validate: vi.fn((config) => Promise.resolve(config)),
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockDockerManifestService = {
|
||||||
|
refreshDigests: vi.fn().mockResolvedValue(true),
|
||||||
|
getCachedUpdateStatuses: vi.fn().mockResolvedValue({}),
|
||||||
|
isUpdateAvailableCached: vi.fn().mockResolvedValue(false),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Mock NotificationsService
|
||||||
|
const mockNotificationsService = {
|
||||||
|
notifyIfUnique: vi.fn().mockResolvedValue(null),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Mock DockerAutostartService
|
||||||
|
const mockDockerAutostartService = {
|
||||||
|
refreshAutoStartEntries: vi.fn().mockResolvedValue(undefined),
|
||||||
|
getAutoStarts: vi.fn().mockResolvedValue([]),
|
||||||
|
getContainerPrimaryName: vi.fn((c) => {
|
||||||
|
if ('Names' in c) return c.Names[0]?.replace(/^\//, '') || null;
|
||||||
|
if ('names' in c) return c.names[0]?.replace(/^\//, '') || null;
|
||||||
|
return null;
|
||||||
|
}),
|
||||||
|
getAutoStartEntry: vi.fn(),
|
||||||
|
updateAutostartConfiguration: vi.fn().mockResolvedValue(undefined),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Mock new services
|
||||||
|
const mockDockerLogService = {
|
||||||
|
getContainerLogSizes: vi.fn().mockResolvedValue(new Map([['test-container', 1024]])),
|
||||||
|
getContainerLogs: vi.fn().mockResolvedValue({ lines: [], cursor: null }),
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockDockerNetworkService = {
|
||||||
|
getNetworks: vi.fn().mockResolvedValue([]),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Use a real-ish mock for DockerPortService since it is used in transformContainer
|
||||||
|
const mockDockerPortService = {
|
||||||
|
deduplicateContainerPorts: vi.fn((ports) => {
|
||||||
|
if (!ports) return [];
|
||||||
|
// Simple dedupe logic for test
|
||||||
|
const seen = new Set();
|
||||||
|
return ports.filter((p) => {
|
||||||
|
const key = `${p.PrivatePort}-${p.PublicPort}-${p.Type}`;
|
||||||
|
if (seen.has(key)) return false;
|
||||||
|
seen.add(key);
|
||||||
|
return true;
|
||||||
|
});
|
||||||
|
}),
|
||||||
|
calculateConflicts: vi.fn().mockReturnValue({ containerPorts: [], lanPorts: [] }),
|
||||||
|
};
|
||||||
|
|
||||||
describe('DockerService', () => {
|
describe('DockerService', () => {
|
||||||
let service: DockerService;
|
let service: DockerService;
|
||||||
|
|
||||||
@@ -88,9 +184,41 @@ describe('DockerService', () => {
|
|||||||
mockListNetworks.mockReset();
|
mockListNetworks.mockReset();
|
||||||
mockContainer.start.mockReset();
|
mockContainer.start.mockReset();
|
||||||
mockContainer.stop.mockReset();
|
mockContainer.stop.mockReset();
|
||||||
|
mockContainer.pause.mockReset();
|
||||||
|
mockContainer.unpause.mockReset();
|
||||||
|
mockContainer.inspect.mockReset();
|
||||||
|
|
||||||
mockCacheManager.get.mockReset();
|
mockCacheManager.get.mockReset();
|
||||||
mockCacheManager.set.mockReset();
|
mockCacheManager.set.mockReset();
|
||||||
mockCacheManager.del.mockReset();
|
mockCacheManager.del.mockReset();
|
||||||
|
statMock.mockReset();
|
||||||
|
statMock.mockResolvedValue({ size: 0 });
|
||||||
|
|
||||||
|
mockEmhttpGetter.mockReset();
|
||||||
|
mockEmhttpGetter.mockReturnValue({
|
||||||
|
networks: [],
|
||||||
|
var: {},
|
||||||
|
});
|
||||||
|
mockDockerConfigService.getConfig.mockReturnValue({
|
||||||
|
updateCheckCronSchedule: '0 6 * * *',
|
||||||
|
templateMappings: {},
|
||||||
|
skipTemplatePaths: [],
|
||||||
|
});
|
||||||
|
mockDockerManifestService.refreshDigests.mockReset();
|
||||||
|
mockDockerManifestService.refreshDigests.mockResolvedValue(true);
|
||||||
|
|
||||||
|
mockDockerAutostartService.refreshAutoStartEntries.mockReset();
|
||||||
|
mockDockerAutostartService.getAutoStarts.mockReset();
|
||||||
|
mockDockerAutostartService.getAutoStartEntry.mockReset();
|
||||||
|
mockDockerAutostartService.updateAutostartConfiguration.mockReset();
|
||||||
|
|
||||||
|
mockDockerLogService.getContainerLogSizes.mockReset();
|
||||||
|
mockDockerLogService.getContainerLogSizes.mockResolvedValue(new Map([['test-container', 1024]]));
|
||||||
|
mockDockerLogService.getContainerLogs.mockReset();
|
||||||
|
|
||||||
|
mockDockerNetworkService.getNetworks.mockReset();
|
||||||
|
mockDockerPortService.deduplicateContainerPorts.mockClear();
|
||||||
|
mockDockerPortService.calculateConflicts.mockReset();
|
||||||
|
|
||||||
const module: TestingModule = await Test.createTestingModule({
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
providers: [
|
providers: [
|
||||||
@@ -99,6 +227,34 @@ describe('DockerService', () => {
|
|||||||
provide: CACHE_MANAGER,
|
provide: CACHE_MANAGER,
|
||||||
useValue: mockCacheManager,
|
useValue: mockCacheManager,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
provide: DockerConfigService,
|
||||||
|
useValue: mockDockerConfigService,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
provide: DockerManifestService,
|
||||||
|
useValue: mockDockerManifestService,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
provide: NotificationsService,
|
||||||
|
useValue: mockNotificationsService,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
provide: DockerAutostartService,
|
||||||
|
useValue: mockDockerAutostartService,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
provide: DockerLogService,
|
||||||
|
useValue: mockDockerLogService,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
provide: DockerNetworkService,
|
||||||
|
useValue: mockDockerNetworkService,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
provide: DockerPortService,
|
||||||
|
useValue: mockDockerPortService,
|
||||||
|
},
|
||||||
],
|
],
|
||||||
}).compile();
|
}).compile();
|
||||||
|
|
||||||
@@ -109,65 +265,6 @@ describe('DockerService', () => {
|
|||||||
expect(service).toBeDefined();
|
expect(service).toBeDefined();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should use separate cache keys for containers with and without size', async () => {
|
|
||||||
const mockContainersWithoutSize = [
|
|
||||||
{
|
|
||||||
Id: 'abc123',
|
|
||||||
Names: ['/test-container'],
|
|
||||||
Image: 'test-image',
|
|
||||||
ImageID: 'test-image-id',
|
|
||||||
Command: 'test',
|
|
||||||
Created: 1234567890,
|
|
||||||
State: 'exited',
|
|
||||||
Status: 'Exited',
|
|
||||||
Ports: [],
|
|
||||||
Labels: {},
|
|
||||||
HostConfig: { NetworkMode: 'bridge' },
|
|
||||||
NetworkSettings: {},
|
|
||||||
Mounts: [],
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
const mockContainersWithSize = [
|
|
||||||
{
|
|
||||||
Id: 'abc123',
|
|
||||||
Names: ['/test-container'],
|
|
||||||
Image: 'test-image',
|
|
||||||
ImageID: 'test-image-id',
|
|
||||||
Command: 'test',
|
|
||||||
Created: 1234567890,
|
|
||||||
State: 'exited',
|
|
||||||
Status: 'Exited',
|
|
||||||
Ports: [],
|
|
||||||
Labels: {},
|
|
||||||
HostConfig: { NetworkMode: 'bridge' },
|
|
||||||
NetworkSettings: {},
|
|
||||||
Mounts: [],
|
|
||||||
SizeRootFs: 1024000,
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
// First call without size
|
|
||||||
mockListContainers.mockResolvedValue(mockContainersWithoutSize);
|
|
||||||
mockCacheManager.get.mockResolvedValue(undefined);
|
|
||||||
|
|
||||||
await service.getContainers({ size: false });
|
|
||||||
|
|
||||||
expect(mockCacheManager.set).toHaveBeenCalledWith('docker_containers', expect.any(Array), 60000);
|
|
||||||
|
|
||||||
// Second call with size
|
|
||||||
mockListContainers.mockResolvedValue(mockContainersWithSize);
|
|
||||||
mockCacheManager.get.mockResolvedValue(undefined);
|
|
||||||
|
|
||||||
await service.getContainers({ size: true });
|
|
||||||
|
|
||||||
expect(mockCacheManager.set).toHaveBeenCalledWith(
|
|
||||||
'docker_containers_with_size',
|
|
||||||
expect.any(Array),
|
|
||||||
60000
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should get containers', async () => {
|
it('should get containers', async () => {
|
||||||
const mockContainers = [
|
const mockContainers = [
|
||||||
{
|
{
|
||||||
@@ -190,308 +287,100 @@ describe('DockerService', () => {
|
|||||||
];
|
];
|
||||||
|
|
||||||
mockListContainers.mockResolvedValue(mockContainers);
|
mockListContainers.mockResolvedValue(mockContainers);
|
||||||
mockCacheManager.get.mockResolvedValue(undefined); // Simulate cache miss
|
mockCacheManager.get.mockResolvedValue(undefined);
|
||||||
|
|
||||||
const result = await service.getContainers({ skipCache: true }); // Skip cache for direct fetch test
|
const result = await service.getContainers({ skipCache: true });
|
||||||
|
|
||||||
expect(result).toEqual([
|
expect(result).toEqual(
|
||||||
|
expect.arrayContaining([
|
||||||
|
expect.objectContaining({
|
||||||
|
id: 'abc123def456',
|
||||||
|
names: ['/test-container'],
|
||||||
|
}),
|
||||||
|
])
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(mockListContainers).toHaveBeenCalled();
|
||||||
|
expect(mockDockerAutostartService.refreshAutoStartEntries).toHaveBeenCalled();
|
||||||
|
expect(mockDockerPortService.deduplicateContainerPorts).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should update auto-start configuration', async () => {
|
||||||
|
mockListContainers.mockResolvedValue([
|
||||||
{
|
{
|
||||||
id: 'abc123def456',
|
Id: 'abc123',
|
||||||
autoStart: false,
|
Names: ['/alpha'],
|
||||||
command: 'test',
|
State: 'running',
|
||||||
created: 1234567890,
|
|
||||||
image: 'test-image',
|
|
||||||
imageId: 'test-image-id',
|
|
||||||
ports: [],
|
|
||||||
sizeRootFs: undefined,
|
|
||||||
state: ContainerState.EXITED,
|
|
||||||
status: 'Exited',
|
|
||||||
labels: {},
|
|
||||||
hostConfig: {
|
|
||||||
networkMode: 'bridge',
|
|
||||||
},
|
|
||||||
networkSettings: {},
|
|
||||||
mounts: [],
|
|
||||||
names: ['/test-container'],
|
|
||||||
},
|
},
|
||||||
]);
|
]);
|
||||||
|
|
||||||
expect(mockListContainers).toHaveBeenCalledWith({
|
const input = [{ id: 'abc123', autoStart: true, wait: 15 }];
|
||||||
all: true,
|
await service.updateAutostartConfiguration(input, { persistUserPreferences: true });
|
||||||
size: false,
|
|
||||||
});
|
|
||||||
expect(mockCacheManager.set).toHaveBeenCalled(); // Ensure cache is set
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should start container', async () => {
|
expect(mockDockerAutostartService.updateAutostartConfiguration).toHaveBeenCalledWith(
|
||||||
const mockContainers = [
|
input,
|
||||||
{
|
expect.any(Array),
|
||||||
Id: 'abc123def456',
|
{ persistUserPreferences: true }
|
||||||
Names: ['/test-container'],
|
|
||||||
Image: 'test-image',
|
|
||||||
ImageID: 'test-image-id',
|
|
||||||
Command: 'test',
|
|
||||||
Created: 1234567890,
|
|
||||||
State: 'running',
|
|
||||||
Status: 'Up 2 hours',
|
|
||||||
Ports: [],
|
|
||||||
Labels: {},
|
|
||||||
HostConfig: {
|
|
||||||
NetworkMode: 'bridge',
|
|
||||||
},
|
|
||||||
NetworkSettings: {},
|
|
||||||
Mounts: [],
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
mockListContainers.mockResolvedValue(mockContainers);
|
|
||||||
mockContainer.start.mockResolvedValue(undefined);
|
|
||||||
mockCacheManager.get.mockResolvedValue(undefined); // Simulate cache miss for getContainers call
|
|
||||||
|
|
||||||
const result = await service.start('abc123def456');
|
|
||||||
|
|
||||||
expect(result).toEqual({
|
|
||||||
id: 'abc123def456',
|
|
||||||
autoStart: false,
|
|
||||||
command: 'test',
|
|
||||||
created: 1234567890,
|
|
||||||
image: 'test-image',
|
|
||||||
imageId: 'test-image-id',
|
|
||||||
ports: [],
|
|
||||||
sizeRootFs: undefined,
|
|
||||||
state: ContainerState.RUNNING,
|
|
||||||
status: 'Up 2 hours',
|
|
||||||
labels: {},
|
|
||||||
hostConfig: {
|
|
||||||
networkMode: 'bridge',
|
|
||||||
},
|
|
||||||
networkSettings: {},
|
|
||||||
mounts: [],
|
|
||||||
names: ['/test-container'],
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(mockContainer.start).toHaveBeenCalled();
|
|
||||||
expect(mockCacheManager.del).toHaveBeenCalledWith(DockerService.CONTAINER_CACHE_KEY);
|
|
||||||
expect(mockListContainers).toHaveBeenCalled();
|
|
||||||
expect(mockCacheManager.set).toHaveBeenCalled();
|
|
||||||
expect(pubsub.publish).toHaveBeenCalledWith(PUBSUB_CHANNEL.INFO, {
|
|
||||||
info: {
|
|
||||||
apps: { installed: 1, running: 1 },
|
|
||||||
},
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should stop container', async () => {
|
|
||||||
const mockContainers = [
|
|
||||||
{
|
|
||||||
Id: 'abc123def456',
|
|
||||||
Names: ['/test-container'],
|
|
||||||
Image: 'test-image',
|
|
||||||
ImageID: 'test-image-id',
|
|
||||||
Command: 'test',
|
|
||||||
Created: 1234567890,
|
|
||||||
State: 'exited',
|
|
||||||
Status: 'Exited',
|
|
||||||
Ports: [],
|
|
||||||
Labels: {},
|
|
||||||
HostConfig: {
|
|
||||||
NetworkMode: 'bridge',
|
|
||||||
},
|
|
||||||
NetworkSettings: {},
|
|
||||||
Mounts: [],
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
mockListContainers.mockResolvedValue(mockContainers);
|
|
||||||
mockContainer.stop.mockResolvedValue(undefined);
|
|
||||||
mockCacheManager.get.mockResolvedValue(undefined); // Simulate cache miss for getContainers calls
|
|
||||||
|
|
||||||
const result = await service.stop('abc123def456');
|
|
||||||
|
|
||||||
expect(result).toEqual({
|
|
||||||
id: 'abc123def456',
|
|
||||||
autoStart: false,
|
|
||||||
command: 'test',
|
|
||||||
created: 1234567890,
|
|
||||||
image: 'test-image',
|
|
||||||
imageId: 'test-image-id',
|
|
||||||
ports: [],
|
|
||||||
sizeRootFs: undefined,
|
|
||||||
state: ContainerState.EXITED,
|
|
||||||
status: 'Exited',
|
|
||||||
labels: {},
|
|
||||||
hostConfig: {
|
|
||||||
networkMode: 'bridge',
|
|
||||||
},
|
|
||||||
networkSettings: {},
|
|
||||||
mounts: [],
|
|
||||||
names: ['/test-container'],
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(mockContainer.stop).toHaveBeenCalledWith({ t: 10 });
|
|
||||||
expect(mockCacheManager.del).toHaveBeenCalledWith(DockerService.CONTAINER_CACHE_KEY);
|
|
||||||
expect(mockListContainers).toHaveBeenCalled();
|
|
||||||
expect(mockCacheManager.set).toHaveBeenCalled();
|
|
||||||
expect(pubsub.publish).toHaveBeenCalledWith(PUBSUB_CHANNEL.INFO, {
|
|
||||||
info: {
|
|
||||||
apps: { installed: 1, running: 0 },
|
|
||||||
},
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should throw error if container not found after start', async () => {
|
|
||||||
mockListContainers.mockResolvedValue([]);
|
|
||||||
mockContainer.start.mockResolvedValue(undefined);
|
|
||||||
mockCacheManager.get.mockResolvedValue(undefined);
|
|
||||||
|
|
||||||
await expect(service.start('not-found')).rejects.toThrow(
|
|
||||||
'Container not-found not found after starting'
|
|
||||||
);
|
);
|
||||||
expect(mockCacheManager.del).toHaveBeenCalledWith(DockerService.CONTAINER_CACHE_KEY);
|
expect(mockCacheManager.del).toHaveBeenCalledWith(DockerService.CONTAINER_CACHE_KEY);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should throw error if container not found after stop', async () => {
|
it('should delegate getContainerLogSizes to DockerLogService', async () => {
|
||||||
mockListContainers.mockResolvedValue([]);
|
const sizes = await service.getContainerLogSizes(['test-container']);
|
||||||
mockContainer.stop.mockResolvedValue(undefined);
|
expect(mockDockerLogService.getContainerLogSizes).toHaveBeenCalledWith(['test-container']);
|
||||||
mockCacheManager.get.mockResolvedValue(undefined);
|
expect(sizes.get('test-container')).toBe(1024);
|
||||||
|
|
||||||
await expect(service.stop('not-found')).rejects.toThrow(
|
|
||||||
'Container not-found not found after stopping'
|
|
||||||
);
|
|
||||||
expect(mockCacheManager.del).toHaveBeenCalledWith(DockerService.CONTAINER_CACHE_KEY);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should get networks', async () => {
|
|
||||||
const mockNetworks = [
|
|
||||||
{
|
|
||||||
Id: 'network1',
|
|
||||||
Name: 'bridge',
|
|
||||||
Created: '2023-01-01T00:00:00Z',
|
|
||||||
Scope: 'local',
|
|
||||||
Driver: 'bridge',
|
|
||||||
EnableIPv6: false,
|
|
||||||
IPAM: {
|
|
||||||
Driver: 'default',
|
|
||||||
Config: [
|
|
||||||
{
|
|
||||||
Subnet: '172.17.0.0/16',
|
|
||||||
Gateway: '172.17.0.1',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
Internal: false,
|
|
||||||
Attachable: false,
|
|
||||||
Ingress: false,
|
|
||||||
ConfigFrom: {
|
|
||||||
Network: '',
|
|
||||||
},
|
|
||||||
ConfigOnly: false,
|
|
||||||
Containers: {},
|
|
||||||
Options: {
|
|
||||||
'com.docker.network.bridge.default_bridge': 'true',
|
|
||||||
'com.docker.network.bridge.enable_icc': 'true',
|
|
||||||
'com.docker.network.bridge.enable_ip_masquerade': 'true',
|
|
||||||
'com.docker.network.bridge.host_binding_ipv4': '0.0.0.0',
|
|
||||||
'com.docker.network.bridge.name': 'docker0',
|
|
||||||
'com.docker.network.driver.mtu': '1500',
|
|
||||||
},
|
|
||||||
Labels: {},
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
mockListNetworks.mockResolvedValue(mockNetworks);
|
|
||||||
mockCacheManager.get.mockResolvedValue(undefined); // Simulate cache miss
|
|
||||||
|
|
||||||
const result = await service.getNetworks({ skipCache: true }); // Skip cache for direct fetch test
|
|
||||||
|
|
||||||
expect(result).toMatchInlineSnapshot(`
|
|
||||||
[
|
|
||||||
{
|
|
||||||
"attachable": false,
|
|
||||||
"configFrom": {
|
|
||||||
"Network": "",
|
|
||||||
},
|
|
||||||
"configOnly": false,
|
|
||||||
"containers": {},
|
|
||||||
"created": "2023-01-01T00:00:00Z",
|
|
||||||
"driver": "bridge",
|
|
||||||
"enableIPv6": false,
|
|
||||||
"id": "network1",
|
|
||||||
"ingress": false,
|
|
||||||
"internal": false,
|
|
||||||
"ipam": {
|
|
||||||
"Config": [
|
|
||||||
{
|
|
||||||
"Gateway": "172.17.0.1",
|
|
||||||
"Subnet": "172.17.0.0/16",
|
|
||||||
},
|
|
||||||
],
|
|
||||||
"Driver": "default",
|
|
||||||
},
|
|
||||||
"labels": {},
|
|
||||||
"name": "bridge",
|
|
||||||
"options": {
|
|
||||||
"com.docker.network.bridge.default_bridge": "true",
|
|
||||||
"com.docker.network.bridge.enable_icc": "true",
|
|
||||||
"com.docker.network.bridge.enable_ip_masquerade": "true",
|
|
||||||
"com.docker.network.bridge.host_binding_ipv4": "0.0.0.0",
|
|
||||||
"com.docker.network.bridge.name": "docker0",
|
|
||||||
"com.docker.network.driver.mtu": "1500",
|
|
||||||
},
|
|
||||||
"scope": "local",
|
|
||||||
},
|
|
||||||
]
|
|
||||||
`);
|
|
||||||
|
|
||||||
expect(mockListNetworks).toHaveBeenCalled();
|
|
||||||
expect(mockCacheManager.set).toHaveBeenCalled(); // Ensure cache is set
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should handle empty networks list', async () => {
|
|
||||||
mockListNetworks.mockResolvedValue([]);
|
|
||||||
mockCacheManager.get.mockResolvedValue(undefined); // Simulate cache miss
|
|
||||||
|
|
||||||
const result = await service.getNetworks({ skipCache: true }); // Skip cache for direct fetch test
|
|
||||||
|
|
||||||
expect(result).toEqual([]);
|
|
||||||
expect(mockListNetworks).toHaveBeenCalled();
|
|
||||||
expect(mockCacheManager.set).toHaveBeenCalled(); // Ensure cache is set
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should handle docker error when getting networks', async () => {
|
|
||||||
const error = new Error('Docker error') as DockerError;
|
|
||||||
error.code = 'ENOENT';
|
|
||||||
error.address = '/var/run/docker.sock';
|
|
||||||
mockListNetworks.mockRejectedValue(error);
|
|
||||||
mockCacheManager.get.mockResolvedValue(undefined); // Simulate cache miss
|
|
||||||
|
|
||||||
await expect(service.getNetworks({ skipCache: true })).rejects.toThrow(
|
|
||||||
'Docker socket unavailable.'
|
|
||||||
);
|
|
||||||
expect(mockListNetworks).toHaveBeenCalled();
|
|
||||||
expect(mockCacheManager.set).not.toHaveBeenCalled(); // Ensure cache is NOT set on error
|
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('getAppInfo', () => {
|
describe('getAppInfo', () => {
|
||||||
// Common mock containers for these tests
|
|
||||||
const mockContainersForMethods = [
|
const mockContainersForMethods = [
|
||||||
{ id: 'abc1', state: ContainerState.RUNNING },
|
{ id: 'abc1', state: ContainerState.RUNNING },
|
||||||
{ id: 'def2', state: ContainerState.EXITED },
|
{ id: 'def2', state: ContainerState.EXITED },
|
||||||
] as DockerContainer[];
|
] as DockerContainer[];
|
||||||
|
|
||||||
it('should return correct app info object', async () => {
|
it('should return correct app info object', async () => {
|
||||||
// Mock cache response for getContainers call
|
|
||||||
mockCacheManager.get.mockResolvedValue(mockContainersForMethods);
|
mockCacheManager.get.mockResolvedValue(mockContainersForMethods);
|
||||||
|
|
||||||
const result = await service.getAppInfo(); // Call the renamed method
|
const result = await service.getAppInfo();
|
||||||
expect(result).toEqual({
|
expect(result).toEqual({
|
||||||
info: {
|
info: {
|
||||||
apps: { installed: 2, running: 1 },
|
apps: { installed: 2, running: 1 },
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
// getContainers should now be called only ONCE from cache
|
|
||||||
expect(mockCacheManager.get).toHaveBeenCalledTimes(1);
|
|
||||||
expect(mockCacheManager.get).toHaveBeenCalledWith(DockerService.CONTAINER_CACHE_KEY);
|
expect(mockCacheManager.get).toHaveBeenCalledWith(DockerService.CONTAINER_CACHE_KEY);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('transformContainer', () => {
|
||||||
|
it('deduplicates ports that only differ by bound IP addresses', () => {
|
||||||
|
mockEmhttpGetter.mockReturnValue({
|
||||||
|
networks: [{ ipaddr: ['192.168.0.10'] }],
|
||||||
|
var: {},
|
||||||
|
});
|
||||||
|
|
||||||
|
const container = {
|
||||||
|
Id: 'duplicate-ports',
|
||||||
|
Names: ['/duplicate-ports'],
|
||||||
|
Image: 'test-image',
|
||||||
|
ImageID: 'sha256:123',
|
||||||
|
Command: 'test',
|
||||||
|
Created: 1700000000,
|
||||||
|
State: 'running',
|
||||||
|
Status: 'Up 2 hours',
|
||||||
|
Ports: [
|
||||||
|
{ IP: '0.0.0.0', PrivatePort: 8080, PublicPort: 8080, Type: 'tcp' },
|
||||||
|
{ IP: '::', PrivatePort: 8080, PublicPort: 8080, Type: 'tcp' },
|
||||||
|
{ IP: '0.0.0.0', PrivatePort: 5000, PublicPort: 5000, Type: 'udp' },
|
||||||
|
],
|
||||||
|
Labels: {},
|
||||||
|
HostConfig: { NetworkMode: 'bridge' },
|
||||||
|
NetworkSettings: { Networks: {} },
|
||||||
|
Mounts: [],
|
||||||
|
} as Docker.ContainerInfo;
|
||||||
|
|
||||||
|
service.transformContainer(container);
|
||||||
|
expect(mockDockerPortService.deduplicateContainerPorts).toHaveBeenCalledWith(
|
||||||
|
container.Ports
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,20 +1,30 @@
|
|||||||
import { CACHE_MANAGER } from '@nestjs/cache-manager';
|
import { CACHE_MANAGER } from '@nestjs/cache-manager';
|
||||||
import { Inject, Injectable, Logger, OnModuleInit } from '@nestjs/common';
|
import { Inject, Injectable, Logger } from '@nestjs/common';
|
||||||
import { readFile } from 'fs/promises';
|
|
||||||
|
|
||||||
import { type Cache } from 'cache-manager';
|
import { type Cache } from 'cache-manager';
|
||||||
import Docker from 'dockerode';
|
import Docker from 'dockerode';
|
||||||
|
import { execa } from 'execa';
|
||||||
|
|
||||||
import { pubsub, PUBSUB_CHANNEL } from '@app/core/pubsub.js';
|
import { pubsub, PUBSUB_CHANNEL } from '@app/core/pubsub.js';
|
||||||
import { catchHandlers } from '@app/core/utils/misc/catch-handlers.js';
|
import { catchHandlers } from '@app/core/utils/misc/catch-handlers.js';
|
||||||
import { sleep } from '@app/core/utils/misc/sleep.js';
|
import { sleep } from '@app/core/utils/misc/sleep.js';
|
||||||
import { getters } from '@app/store/index.js';
|
import { getLanIp } from '@app/core/utils/network.js';
|
||||||
|
import { DockerAutostartService } from '@app/unraid-api/graph/resolvers/docker/docker-autostart.service.js';
|
||||||
|
import { DockerConfigService } from '@app/unraid-api/graph/resolvers/docker/docker-config.service.js';
|
||||||
|
import { DockerLogService } from '@app/unraid-api/graph/resolvers/docker/docker-log.service.js';
|
||||||
|
import { DockerManifestService } from '@app/unraid-api/graph/resolvers/docker/docker-manifest.service.js';
|
||||||
|
import { DockerNetworkService } from '@app/unraid-api/graph/resolvers/docker/docker-network.service.js';
|
||||||
|
import { DockerPortService } from '@app/unraid-api/graph/resolvers/docker/docker-port.service.js';
|
||||||
import {
|
import {
|
||||||
ContainerPortType,
|
ContainerPortType,
|
||||||
ContainerState,
|
ContainerState,
|
||||||
|
DockerAutostartEntryInput,
|
||||||
DockerContainer,
|
DockerContainer,
|
||||||
|
DockerContainerLogs,
|
||||||
DockerNetwork,
|
DockerNetwork,
|
||||||
|
DockerPortConflicts,
|
||||||
} from '@app/unraid-api/graph/resolvers/docker/docker.model.js';
|
} from '@app/unraid-api/graph/resolvers/docker/docker.model.js';
|
||||||
|
import { getDockerClient } from '@app/unraid-api/graph/resolvers/docker/utils/docker-client.js';
|
||||||
|
|
||||||
interface ContainerListingOptions extends Docker.ContainerListOptions {
|
interface ContainerListingOptions extends Docker.ContainerListOptions {
|
||||||
skipCache: boolean;
|
skipCache: boolean;
|
||||||
@@ -27,25 +37,26 @@ interface NetworkListingOptions {
|
|||||||
@Injectable()
|
@Injectable()
|
||||||
export class DockerService {
|
export class DockerService {
|
||||||
private client: Docker;
|
private client: Docker;
|
||||||
private autoStarts: string[] = [];
|
|
||||||
private readonly logger = new Logger(DockerService.name);
|
private readonly logger = new Logger(DockerService.name);
|
||||||
|
|
||||||
public static readonly CONTAINER_CACHE_KEY = 'docker_containers';
|
public static readonly CONTAINER_CACHE_KEY = 'docker_containers';
|
||||||
public static readonly CONTAINER_WITH_SIZE_CACHE_KEY = 'docker_containers_with_size';
|
public static readonly CONTAINER_WITH_SIZE_CACHE_KEY = 'docker_containers_with_size';
|
||||||
public static readonly NETWORK_CACHE_KEY = 'docker_networks';
|
public static readonly NETWORK_CACHE_KEY = 'docker_networks';
|
||||||
public static readonly CACHE_TTL_SECONDS = 60; // Cache for 60 seconds
|
public static readonly CACHE_TTL_SECONDS = 60;
|
||||||
|
|
||||||
constructor(@Inject(CACHE_MANAGER) private cacheManager: Cache) {
|
constructor(
|
||||||
this.client = this.getDockerClient();
|
@Inject(CACHE_MANAGER) private cacheManager: Cache,
|
||||||
|
private readonly dockerConfigService: DockerConfigService,
|
||||||
|
private readonly dockerManifestService: DockerManifestService,
|
||||||
|
private readonly autostartService: DockerAutostartService,
|
||||||
|
private readonly dockerLogService: DockerLogService,
|
||||||
|
private readonly dockerNetworkService: DockerNetworkService,
|
||||||
|
private readonly dockerPortService: DockerPortService
|
||||||
|
) {
|
||||||
|
this.client = getDockerClient();
|
||||||
}
|
}
|
||||||
|
|
||||||
public getDockerClient() {
|
public async getAppInfo() {
|
||||||
return new Docker({
|
|
||||||
socketPath: '/var/run/docker.sock',
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async getAppInfo() {
|
|
||||||
const containers = await this.getContainers({ skipCache: false });
|
const containers = await this.getContainers({ skipCache: false });
|
||||||
const installedCount = containers.length;
|
const installedCount = containers.length;
|
||||||
const runningCount = containers.filter(
|
const runningCount = containers.filter(
|
||||||
@@ -65,31 +76,47 @@ export class DockerService {
|
|||||||
* @see https://github.com/limetech/webgui/issues/502#issue-480992547
|
* @see https://github.com/limetech/webgui/issues/502#issue-480992547
|
||||||
*/
|
*/
|
||||||
public async getAutoStarts(): Promise<string[]> {
|
public async getAutoStarts(): Promise<string[]> {
|
||||||
const autoStartFile = await readFile(getters.paths()['docker-autostart'], 'utf8')
|
return this.autostartService.getAutoStarts();
|
||||||
.then((file) => file.toString())
|
|
||||||
.catch(() => '');
|
|
||||||
return autoStartFile.split('\n');
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public transformContainer(container: Docker.ContainerInfo): DockerContainer {
|
public transformContainer(container: Docker.ContainerInfo): Omit<DockerContainer, 'isOrphaned'> {
|
||||||
const sizeValue = (container as Docker.ContainerInfo & { SizeRootFs?: number }).SizeRootFs;
|
const sizeValue = (container as Docker.ContainerInfo & { SizeRootFs?: number }).SizeRootFs;
|
||||||
|
const primaryName = this.autostartService.getContainerPrimaryName(container) ?? '';
|
||||||
|
const autoStartEntry = primaryName
|
||||||
|
? this.autostartService.getAutoStartEntry(primaryName)
|
||||||
|
: undefined;
|
||||||
|
const lanIp = getLanIp();
|
||||||
|
const lanPortStrings: string[] = [];
|
||||||
|
const uniquePorts = this.dockerPortService.deduplicateContainerPorts(container.Ports);
|
||||||
|
|
||||||
const transformed: DockerContainer = {
|
const transformedPorts = uniquePorts.map((port) => {
|
||||||
|
if (port.PublicPort) {
|
||||||
|
const lanPort = lanIp ? `${lanIp}:${port.PublicPort}` : `${port.PublicPort}`;
|
||||||
|
if (lanPort) {
|
||||||
|
lanPortStrings.push(lanPort);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
ip: port.IP || '',
|
||||||
|
privatePort: port.PrivatePort,
|
||||||
|
publicPort: port.PublicPort,
|
||||||
|
type:
|
||||||
|
ContainerPortType[
|
||||||
|
(port.Type || 'tcp').toUpperCase() as keyof typeof ContainerPortType
|
||||||
|
] || ContainerPortType.TCP,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
const transformed: Omit<DockerContainer, 'isOrphaned'> = {
|
||||||
id: container.Id,
|
id: container.Id,
|
||||||
names: container.Names,
|
names: container.Names,
|
||||||
image: container.Image,
|
image: container.Image,
|
||||||
imageId: container.ImageID,
|
imageId: container.ImageID,
|
||||||
command: container.Command,
|
command: container.Command,
|
||||||
created: container.Created,
|
created: container.Created,
|
||||||
ports: container.Ports.map((port) => ({
|
ports: transformedPorts,
|
||||||
ip: port.IP || '',
|
|
||||||
privatePort: port.PrivatePort,
|
|
||||||
publicPort: port.PublicPort,
|
|
||||||
type:
|
|
||||||
ContainerPortType[port.Type.toUpperCase() as keyof typeof ContainerPortType] ||
|
|
||||||
ContainerPortType.TCP,
|
|
||||||
})),
|
|
||||||
sizeRootFs: sizeValue,
|
sizeRootFs: sizeValue,
|
||||||
|
sizeRw: (container as Docker.ContainerInfo & { SizeRw?: number }).SizeRw,
|
||||||
labels: container.Labels ?? {},
|
labels: container.Labels ?? {},
|
||||||
state:
|
state:
|
||||||
typeof container.State === 'string'
|
typeof container.State === 'string'
|
||||||
@@ -102,9 +129,15 @@ export class DockerService {
|
|||||||
},
|
},
|
||||||
networkSettings: container.NetworkSettings,
|
networkSettings: container.NetworkSettings,
|
||||||
mounts: container.Mounts,
|
mounts: container.Mounts,
|
||||||
autoStart: this.autoStarts.includes(container.Names[0].split('/')[1]),
|
autoStart: Boolean(autoStartEntry),
|
||||||
|
autoStartOrder: autoStartEntry?.order,
|
||||||
|
autoStartWait: autoStartEntry?.wait,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
if (lanPortStrings.length > 0) {
|
||||||
|
transformed.lanIpPorts = lanPortStrings;
|
||||||
|
}
|
||||||
|
|
||||||
return transformed;
|
return transformed;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -129,66 +162,65 @@ export class DockerService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
this.logger.debug(`Updating docker container cache (${size ? 'with' : 'without'} size)`);
|
this.logger.debug(`Updating docker container cache (${size ? 'with' : 'without'} size)`);
|
||||||
const rawContainers =
|
let rawContainers: Docker.ContainerInfo[] = [];
|
||||||
(await this.client
|
try {
|
||||||
.listContainers({
|
rawContainers = await this.client.listContainers({
|
||||||
all,
|
all,
|
||||||
size,
|
size,
|
||||||
...listOptions,
|
...listOptions,
|
||||||
})
|
});
|
||||||
.catch(catchHandlers.docker)) ?? [];
|
} catch (error) {
|
||||||
|
this.handleDockerListError(error);
|
||||||
|
}
|
||||||
|
|
||||||
this.autoStarts = await this.getAutoStarts();
|
await this.autostartService.refreshAutoStartEntries();
|
||||||
const containers = rawContainers.map((container) => this.transformContainer(container));
|
const containers = rawContainers.map((container) => this.transformContainer(container));
|
||||||
|
|
||||||
await this.cacheManager.set(cacheKey, containers, DockerService.CACHE_TTL_SECONDS * 1000);
|
const config = this.dockerConfigService.getConfig();
|
||||||
return containers;
|
const containersWithTemplatePaths = containers.map((c) => {
|
||||||
|
const containerName = c.names[0]?.replace(/^\//, '').toLowerCase() ?? '';
|
||||||
|
const templatePath = config.templateMappings?.[containerName] || undefined;
|
||||||
|
return {
|
||||||
|
...c,
|
||||||
|
templatePath,
|
||||||
|
isOrphaned: !templatePath,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
await this.cacheManager.set(
|
||||||
|
cacheKey,
|
||||||
|
containersWithTemplatePaths,
|
||||||
|
DockerService.CACHE_TTL_SECONDS * 1000
|
||||||
|
);
|
||||||
|
return containersWithTemplatePaths;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getPortConflicts({
|
||||||
|
skipCache = false,
|
||||||
|
}: {
|
||||||
|
skipCache?: boolean;
|
||||||
|
} = {}): Promise<DockerPortConflicts> {
|
||||||
|
const containers = await this.getContainers({ skipCache });
|
||||||
|
return this.dockerPortService.calculateConflicts(containers);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getContainerLogSizes(containerNames: string[]): Promise<Map<string, number>> {
|
||||||
|
return this.dockerLogService.getContainerLogSizes(containerNames);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getContainerLogs(
|
||||||
|
id: string,
|
||||||
|
options?: { since?: Date | null; tail?: number | null }
|
||||||
|
): Promise<DockerContainerLogs> {
|
||||||
|
return this.dockerLogService.getContainerLogs(id, options);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get all Docker networks
|
* Get all Docker networks
|
||||||
* @returns All the in/active Docker networks on the system.
|
* @returns All the in/active Docker networks on the system.
|
||||||
*/
|
*/
|
||||||
public async getNetworks({ skipCache }: NetworkListingOptions): Promise<DockerNetwork[]> {
|
public async getNetworks(options: NetworkListingOptions): Promise<DockerNetwork[]> {
|
||||||
if (!skipCache) {
|
return this.dockerNetworkService.getNetworks(options);
|
||||||
const cachedNetworks = await this.cacheManager.get<DockerNetwork[]>(
|
|
||||||
DockerService.NETWORK_CACHE_KEY
|
|
||||||
);
|
|
||||||
if (cachedNetworks) {
|
|
||||||
this.logger.debug('Using docker network cache');
|
|
||||||
return cachedNetworks;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
this.logger.debug('Updating docker network cache');
|
|
||||||
const rawNetworks = await this.client.listNetworks().catch(catchHandlers.docker);
|
|
||||||
const networks = rawNetworks.map(
|
|
||||||
(network) =>
|
|
||||||
({
|
|
||||||
name: network.Name || '',
|
|
||||||
id: network.Id || '',
|
|
||||||
created: network.Created || '',
|
|
||||||
scope: network.Scope || '',
|
|
||||||
driver: network.Driver || '',
|
|
||||||
enableIPv6: network.EnableIPv6 || false,
|
|
||||||
ipam: network.IPAM || {},
|
|
||||||
internal: network.Internal || false,
|
|
||||||
attachable: network.Attachable || false,
|
|
||||||
ingress: network.Ingress || false,
|
|
||||||
configFrom: network.ConfigFrom || {},
|
|
||||||
configOnly: network.ConfigOnly || false,
|
|
||||||
containers: network.Containers || {},
|
|
||||||
options: network.Options || {},
|
|
||||||
labels: network.Labels || {},
|
|
||||||
}) as DockerNetwork
|
|
||||||
);
|
|
||||||
|
|
||||||
await this.cacheManager.set(
|
|
||||||
DockerService.NETWORK_CACHE_KEY,
|
|
||||||
networks,
|
|
||||||
DockerService.CACHE_TTL_SECONDS * 1000
|
|
||||||
);
|
|
||||||
return networks;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public async clearContainerCache(): Promise<void> {
|
public async clearContainerCache(): Promise<void> {
|
||||||
@@ -214,6 +246,45 @@ export class DockerService {
|
|||||||
return updatedContainer;
|
return updatedContainer;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public async removeContainer(id: string, options?: { withImage?: boolean }): Promise<boolean> {
|
||||||
|
const container = this.client.getContainer(id);
|
||||||
|
try {
|
||||||
|
const inspectData = options?.withImage ? await container.inspect() : null;
|
||||||
|
const imageId = inspectData?.Image;
|
||||||
|
|
||||||
|
await container.remove({ force: true });
|
||||||
|
this.logger.debug(`Removed container ${id}`);
|
||||||
|
|
||||||
|
if (options?.withImage && imageId) {
|
||||||
|
try {
|
||||||
|
const image = this.client.getImage(imageId);
|
||||||
|
await image.remove({ force: true });
|
||||||
|
this.logger.debug(`Removed image ${imageId} for container ${id}`);
|
||||||
|
} catch (imageError) {
|
||||||
|
this.logger.warn(`Failed to remove image ${imageId}:`, imageError);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
await this.clearContainerCache();
|
||||||
|
this.logger.debug(`Invalidated container caches after removing ${id}`);
|
||||||
|
const appInfo = await this.getAppInfo();
|
||||||
|
await pubsub.publish(PUBSUB_CHANNEL.INFO, appInfo);
|
||||||
|
return true;
|
||||||
|
} catch (error) {
|
||||||
|
this.logger.error(`Failed to remove container ${id}:`, error);
|
||||||
|
throw new Error(`Failed to remove container ${id}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public async updateAutostartConfiguration(
|
||||||
|
entries: DockerAutostartEntryInput[],
|
||||||
|
options?: { persistUserPreferences?: boolean }
|
||||||
|
): Promise<void> {
|
||||||
|
const containers = await this.getContainers({ skipCache: true });
|
||||||
|
await this.autostartService.updateAutostartConfiguration(entries, containers, options);
|
||||||
|
await this.clearContainerCache();
|
||||||
|
}
|
||||||
|
|
||||||
public async stop(id: string): Promise<DockerContainer> {
|
public async stop(id: string): Promise<DockerContainer> {
|
||||||
const container = this.client.getContainer(id);
|
const container = this.client.getContainer(id);
|
||||||
await container.stop({ t: 10 });
|
await container.stop({ t: 10 });
|
||||||
@@ -243,4 +314,162 @@ export class DockerService {
|
|||||||
await pubsub.publish(PUBSUB_CHANNEL.INFO, appInfo);
|
await pubsub.publish(PUBSUB_CHANNEL.INFO, appInfo);
|
||||||
return updatedContainer;
|
return updatedContainer;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public async pause(id: string): Promise<DockerContainer> {
|
||||||
|
const container = this.client.getContainer(id);
|
||||||
|
await container.pause();
|
||||||
|
await this.cacheManager.del(DockerService.CONTAINER_CACHE_KEY);
|
||||||
|
this.logger.debug(`Invalidated container cache after pausing ${id}`);
|
||||||
|
|
||||||
|
let containers: DockerContainer[];
|
||||||
|
let updatedContainer: DockerContainer | undefined;
|
||||||
|
for (let i = 0; i < 5; i++) {
|
||||||
|
await sleep(500);
|
||||||
|
containers = await this.getContainers({ skipCache: true });
|
||||||
|
updatedContainer = containers.find((c) => c.id === id);
|
||||||
|
this.logger.debug(
|
||||||
|
`Container ${id} state after pause attempt ${i + 1}: ${updatedContainer?.state}`
|
||||||
|
);
|
||||||
|
if (updatedContainer?.state === ContainerState.PAUSED) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!updatedContainer) {
|
||||||
|
throw new Error(`Container ${id} not found after pausing`);
|
||||||
|
}
|
||||||
|
const appInfo = await this.getAppInfo();
|
||||||
|
await pubsub.publish(PUBSUB_CHANNEL.INFO, appInfo);
|
||||||
|
return updatedContainer;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async unpause(id: string): Promise<DockerContainer> {
|
||||||
|
const container = this.client.getContainer(id);
|
||||||
|
await container.unpause();
|
||||||
|
await this.cacheManager.del(DockerService.CONTAINER_CACHE_KEY);
|
||||||
|
this.logger.debug(`Invalidated container cache after unpausing ${id}`);
|
||||||
|
|
||||||
|
let containers: DockerContainer[];
|
||||||
|
let updatedContainer: DockerContainer | undefined;
|
||||||
|
for (let i = 0; i < 5; i++) {
|
||||||
|
await sleep(500);
|
||||||
|
containers = await this.getContainers({ skipCache: true });
|
||||||
|
updatedContainer = containers.find((c) => c.id === id);
|
||||||
|
this.logger.debug(
|
||||||
|
`Container ${id} state after unpause attempt ${i + 1}: ${updatedContainer?.state}`
|
||||||
|
);
|
||||||
|
if (updatedContainer?.state === ContainerState.RUNNING) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!updatedContainer) {
|
||||||
|
throw new Error(`Container ${id} not found after unpausing`);
|
||||||
|
}
|
||||||
|
const appInfo = await this.getAppInfo();
|
||||||
|
await pubsub.publish(PUBSUB_CHANNEL.INFO, appInfo);
|
||||||
|
return updatedContainer;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async updateContainer(id: string): Promise<DockerContainer> {
|
||||||
|
const containers = await this.getContainers({ skipCache: true });
|
||||||
|
const container = containers.find((c) => c.id === id);
|
||||||
|
if (!container) {
|
||||||
|
throw new Error(`Container ${id} not found`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const containerName = container.names?.[0]?.replace(/^\//, '');
|
||||||
|
if (!containerName) {
|
||||||
|
throw new Error(`Container ${id} has no name`);
|
||||||
|
}
|
||||||
|
|
||||||
|
this.logger.log(`Updating container ${containerName} (${id})`);
|
||||||
|
|
||||||
|
try {
|
||||||
|
await execa(
|
||||||
|
'/usr/local/emhttp/plugins/dynamix.docker.manager/scripts/update_container',
|
||||||
|
[encodeURIComponent(containerName)],
|
||||||
|
{ shell: 'bash' }
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
this.logger.error(`Failed to update container ${containerName}:`, error);
|
||||||
|
throw new Error(`Failed to update container ${containerName}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
await this.clearContainerCache();
|
||||||
|
this.logger.debug(`Invalidated container caches after updating ${id}`);
|
||||||
|
|
||||||
|
const updatedContainers = await this.getContainers({ skipCache: true });
|
||||||
|
const updatedContainer = updatedContainers.find(
|
||||||
|
(c) => c.names?.some((name) => name.replace(/^\//, '') === containerName) || c.id === id
|
||||||
|
);
|
||||||
|
if (!updatedContainer) {
|
||||||
|
throw new Error(`Container ${id} not found after update`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const appInfo = await this.getAppInfo();
|
||||||
|
await pubsub.publish(PUBSUB_CHANNEL.INFO, appInfo);
|
||||||
|
return updatedContainer;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async updateContainers(ids: string[]): Promise<DockerContainer[]> {
|
||||||
|
const uniqueIds = Array.from(new Set(ids.filter((id) => typeof id === 'string' && id.length)));
|
||||||
|
const updatedContainers: DockerContainer[] = [];
|
||||||
|
for (const id of uniqueIds) {
|
||||||
|
const updated = await this.updateContainer(id);
|
||||||
|
updatedContainers.push(updated);
|
||||||
|
}
|
||||||
|
return updatedContainers;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Updates every container with an available update. Mirrors the legacy webgui "Update All" flow.
|
||||||
|
*/
|
||||||
|
public async updateAllContainers(): Promise<DockerContainer[]> {
|
||||||
|
const containers = await this.getContainers({ skipCache: true });
|
||||||
|
if (!containers.length) {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
const cachedStatuses = await this.dockerManifestService.getCachedUpdateStatuses();
|
||||||
|
const idsWithUpdates: string[] = [];
|
||||||
|
|
||||||
|
for (const container of containers) {
|
||||||
|
if (!container.image) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const hasUpdate = await this.dockerManifestService.isUpdateAvailableCached(
|
||||||
|
container.image,
|
||||||
|
cachedStatuses
|
||||||
|
);
|
||||||
|
if (hasUpdate) {
|
||||||
|
idsWithUpdates.push(container.id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!idsWithUpdates.length) {
|
||||||
|
this.logger.log('Update-all requested but no containers have available updates');
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
this.logger.log(`Updating ${idsWithUpdates.length} container(s) via updateAllContainers`);
|
||||||
|
return this.updateContainers(idsWithUpdates);
|
||||||
|
}
|
||||||
|
|
||||||
|
private handleDockerListError(error: unknown): never {
|
||||||
|
const message = this.getDockerErrorMessage(error);
|
||||||
|
this.logger.warn(`Docker container query failed: ${message}`);
|
||||||
|
catchHandlers.docker(error as NodeJS.ErrnoException);
|
||||||
|
throw error instanceof Error ? error : new Error('Docker list error');
|
||||||
|
}
|
||||||
|
|
||||||
|
private getDockerErrorMessage(error: unknown): string {
|
||||||
|
if (error instanceof Error && error.message) {
|
||||||
|
return error.message;
|
||||||
|
}
|
||||||
|
if (typeof error === 'string' && error.length) {
|
||||||
|
return error;
|
||||||
|
}
|
||||||
|
return 'Unknown error occurred.';
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ import { Test } from '@nestjs/testing';
|
|||||||
|
|
||||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||||
|
|
||||||
|
import { DockerTemplateIconService } from '@app/unraid-api/graph/resolvers/docker/docker-template-icon.service.js';
|
||||||
import {
|
import {
|
||||||
ContainerPortType,
|
ContainerPortType,
|
||||||
ContainerState,
|
ContainerState,
|
||||||
@@ -38,6 +39,7 @@ describe('containerToResource', () => {
|
|||||||
labels: {
|
labels: {
|
||||||
'com.docker.compose.service': 'web',
|
'com.docker.compose.service': 'web',
|
||||||
},
|
},
|
||||||
|
isOrphaned: false,
|
||||||
};
|
};
|
||||||
|
|
||||||
const result = containerToResource(container);
|
const result = containerToResource(container);
|
||||||
@@ -62,6 +64,7 @@ describe('containerToResource', () => {
|
|||||||
state: ContainerState.EXITED,
|
state: ContainerState.EXITED,
|
||||||
status: 'Exited (0) 1 hour ago',
|
status: 'Exited (0) 1 hour ago',
|
||||||
autoStart: false,
|
autoStart: false,
|
||||||
|
isOrphaned: false,
|
||||||
};
|
};
|
||||||
|
|
||||||
const result = containerToResource(container);
|
const result = containerToResource(container);
|
||||||
@@ -83,6 +86,7 @@ describe('containerToResource', () => {
|
|||||||
state: ContainerState.EXITED,
|
state: ContainerState.EXITED,
|
||||||
status: 'Exited (0) 5 minutes ago',
|
status: 'Exited (0) 5 minutes ago',
|
||||||
autoStart: false,
|
autoStart: false,
|
||||||
|
isOrphaned: false,
|
||||||
};
|
};
|
||||||
|
|
||||||
const result = containerToResource(container);
|
const result = containerToResource(container);
|
||||||
@@ -124,6 +128,7 @@ describe('containerToResource', () => {
|
|||||||
maintainer: 'dev-team',
|
maintainer: 'dev-team',
|
||||||
version: '1.0.0',
|
version: '1.0.0',
|
||||||
},
|
},
|
||||||
|
isOrphaned: false,
|
||||||
};
|
};
|
||||||
|
|
||||||
const result = containerToResource(container);
|
const result = containerToResource(container);
|
||||||
@@ -216,6 +221,12 @@ describe('DockerOrganizerService', () => {
|
|||||||
]),
|
]),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
provide: DockerTemplateIconService,
|
||||||
|
useValue: {
|
||||||
|
getIconsForContainers: vi.fn().mockResolvedValue(new Map()),
|
||||||
|
},
|
||||||
|
},
|
||||||
],
|
],
|
||||||
}).compile();
|
}).compile();
|
||||||
|
|
||||||
@@ -674,16 +685,31 @@ describe('DockerOrganizerService', () => {
|
|||||||
const TO_DELETE = ['entryB', 'entryD'];
|
const TO_DELETE = ['entryB', 'entryD'];
|
||||||
const EXPECTED_REMAINING = ['entryA', 'entryC'];
|
const EXPECTED_REMAINING = ['entryA', 'entryC'];
|
||||||
|
|
||||||
|
// Mock getContainers to return containers matching our test entries
|
||||||
|
const mockContainers = ENTRIES.map((entryId, i) => ({
|
||||||
|
id: `container-${entryId}`,
|
||||||
|
names: [`/${entryId}`],
|
||||||
|
image: 'test:latest',
|
||||||
|
imageId: `sha256:${i}`,
|
||||||
|
command: 'test',
|
||||||
|
created: 1640995200 + i,
|
||||||
|
ports: [],
|
||||||
|
state: 'running',
|
||||||
|
status: 'Up 1 hour',
|
||||||
|
autoStart: true,
|
||||||
|
}));
|
||||||
|
(dockerService.getContainers as any).mockResolvedValue(mockContainers);
|
||||||
|
|
||||||
const organizerWithOrdering = createTestOrganizer();
|
const organizerWithOrdering = createTestOrganizer();
|
||||||
const rootFolder = getRootFolder(organizerWithOrdering);
|
const rootFolder = getRootFolder(organizerWithOrdering);
|
||||||
rootFolder.children = [...ENTRIES];
|
rootFolder.children = [...ENTRIES];
|
||||||
|
|
||||||
// Create the test entries
|
// Create refs pointing to the container names (which will be /{entryId})
|
||||||
ENTRIES.forEach((entryId) => {
|
ENTRIES.forEach((entryId) => {
|
||||||
organizerWithOrdering.views.default.entries[entryId] = {
|
organizerWithOrdering.views.default.entries[entryId] = {
|
||||||
id: entryId,
|
id: entryId,
|
||||||
type: 'ref',
|
type: 'ref',
|
||||||
target: `target_${entryId}`,
|
target: `/${entryId}`,
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -9,10 +9,13 @@ import { DockerOrganizerConfigService } from '@app/unraid-api/graph/resolvers/do
|
|||||||
import {
|
import {
|
||||||
addMissingResourcesToView,
|
addMissingResourcesToView,
|
||||||
createFolderInView,
|
createFolderInView,
|
||||||
|
createFolderWithItems,
|
||||||
DEFAULT_ORGANIZER_ROOT_ID,
|
DEFAULT_ORGANIZER_ROOT_ID,
|
||||||
DEFAULT_ORGANIZER_VIEW_ID,
|
DEFAULT_ORGANIZER_VIEW_ID,
|
||||||
deleteOrganizerEntries,
|
deleteOrganizerEntries,
|
||||||
moveEntriesToFolder,
|
moveEntriesToFolder,
|
||||||
|
moveItemsToPosition,
|
||||||
|
renameFolder,
|
||||||
resolveOrganizer,
|
resolveOrganizer,
|
||||||
setFolderChildrenInView,
|
setFolderChildrenInView,
|
||||||
} from '@app/unraid-api/organizer/organizer.js';
|
} from '@app/unraid-api/organizer/organizer.js';
|
||||||
@@ -51,8 +54,14 @@ export class DockerOrganizerService {
|
|||||||
private readonly dockerService: DockerService
|
private readonly dockerService: DockerService
|
||||||
) {}
|
) {}
|
||||||
|
|
||||||
async getResources(opts?: ContainerListOptions): Promise<OrganizerV1['resources']> {
|
async getResources(
|
||||||
const containers = await this.dockerService.getContainers(opts);
|
opts?: Partial<ContainerListOptions> & { skipCache?: boolean }
|
||||||
|
): Promise<OrganizerV1['resources']> {
|
||||||
|
const { skipCache = false, ...listOptions } = opts ?? {};
|
||||||
|
const containers = await this.dockerService.getContainers({
|
||||||
|
skipCache,
|
||||||
|
...(listOptions as any),
|
||||||
|
});
|
||||||
return containerListToResourcesObject(containers);
|
return containerListToResourcesObject(containers);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -74,17 +83,20 @@ export class DockerOrganizerService {
|
|||||||
return newOrganizer;
|
return newOrganizer;
|
||||||
}
|
}
|
||||||
|
|
||||||
async syncAndGetOrganizer(): Promise<OrganizerV1> {
|
async syncAndGetOrganizer(opts?: { skipCache?: boolean }): Promise<OrganizerV1> {
|
||||||
let organizer = this.dockerConfigService.getConfig();
|
let organizer = this.dockerConfigService.getConfig();
|
||||||
organizer.resources = await this.getResources();
|
organizer.resources = await this.getResources(opts);
|
||||||
organizer = await this.syncDefaultView(organizer, organizer.resources);
|
organizer = await this.syncDefaultView(organizer, organizer.resources);
|
||||||
organizer = await this.dockerConfigService.validate(organizer);
|
organizer = await this.dockerConfigService.validate(organizer);
|
||||||
this.dockerConfigService.replaceConfig(organizer);
|
this.dockerConfigService.replaceConfig(organizer);
|
||||||
return organizer;
|
return organizer;
|
||||||
}
|
}
|
||||||
|
|
||||||
async resolveOrganizer(organizer?: OrganizerV1): Promise<ResolvedOrganizerV1> {
|
async resolveOrganizer(
|
||||||
organizer ??= await this.syncAndGetOrganizer();
|
organizer?: OrganizerV1,
|
||||||
|
opts?: { skipCache?: boolean }
|
||||||
|
): Promise<ResolvedOrganizerV1> {
|
||||||
|
organizer ??= await this.syncAndGetOrganizer(opts);
|
||||||
return resolveOrganizer(organizer);
|
return resolveOrganizer(organizer);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -192,7 +204,10 @@ export class DockerOrganizerService {
|
|||||||
const newOrganizer = structuredClone(organizer);
|
const newOrganizer = structuredClone(organizer);
|
||||||
|
|
||||||
deleteOrganizerEntries(newOrganizer.views.default, entryIds, { mutate: true });
|
deleteOrganizerEntries(newOrganizer.views.default, entryIds, { mutate: true });
|
||||||
addMissingResourcesToView(newOrganizer.resources, newOrganizer.views.default);
|
newOrganizer.views.default = addMissingResourcesToView(
|
||||||
|
newOrganizer.resources,
|
||||||
|
newOrganizer.views.default
|
||||||
|
);
|
||||||
|
|
||||||
const validated = await this.dockerConfigService.validate(newOrganizer);
|
const validated = await this.dockerConfigService.validate(newOrganizer);
|
||||||
this.dockerConfigService.replaceConfig(validated);
|
this.dockerConfigService.replaceConfig(validated);
|
||||||
@@ -222,4 +237,119 @@ export class DockerOrganizerService {
|
|||||||
this.dockerConfigService.replaceConfig(validated);
|
this.dockerConfigService.replaceConfig(validated);
|
||||||
return validated;
|
return validated;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async moveItemsToPosition(params: {
|
||||||
|
sourceEntryIds: string[];
|
||||||
|
destinationFolderId: string;
|
||||||
|
position: number;
|
||||||
|
}): Promise<OrganizerV1> {
|
||||||
|
const { sourceEntryIds, destinationFolderId, position } = params;
|
||||||
|
const organizer = await this.syncAndGetOrganizer();
|
||||||
|
const newOrganizer = structuredClone(organizer);
|
||||||
|
|
||||||
|
const defaultView = newOrganizer.views.default;
|
||||||
|
if (!defaultView) {
|
||||||
|
throw new AppError('Default view not found');
|
||||||
|
}
|
||||||
|
|
||||||
|
newOrganizer.views.default = moveItemsToPosition({
|
||||||
|
view: defaultView,
|
||||||
|
sourceEntryIds: new Set(sourceEntryIds),
|
||||||
|
destinationFolderId,
|
||||||
|
position,
|
||||||
|
resources: newOrganizer.resources,
|
||||||
|
});
|
||||||
|
|
||||||
|
const validated = await this.dockerConfigService.validate(newOrganizer);
|
||||||
|
this.dockerConfigService.replaceConfig(validated);
|
||||||
|
return validated;
|
||||||
|
}
|
||||||
|
|
||||||
|
async renameFolderById(params: { folderId: string; newName: string }): Promise<OrganizerV1> {
|
||||||
|
const { folderId, newName } = params;
|
||||||
|
const organizer = await this.syncAndGetOrganizer();
|
||||||
|
const newOrganizer = structuredClone(organizer);
|
||||||
|
|
||||||
|
const defaultView = newOrganizer.views.default;
|
||||||
|
if (!defaultView) {
|
||||||
|
throw new AppError('Default view not found');
|
||||||
|
}
|
||||||
|
|
||||||
|
newOrganizer.views.default = renameFolder({
|
||||||
|
view: defaultView,
|
||||||
|
folderId,
|
||||||
|
newName,
|
||||||
|
});
|
||||||
|
|
||||||
|
const validated = await this.dockerConfigService.validate(newOrganizer);
|
||||||
|
this.dockerConfigService.replaceConfig(validated);
|
||||||
|
return validated;
|
||||||
|
}
|
||||||
|
|
||||||
|
async createFolderWithItems(params: {
|
||||||
|
name: string;
|
||||||
|
parentId?: string;
|
||||||
|
sourceEntryIds?: string[];
|
||||||
|
position?: number;
|
||||||
|
}): Promise<OrganizerV1> {
|
||||||
|
const { name, parentId = DEFAULT_ORGANIZER_ROOT_ID, sourceEntryIds = [], position } = params;
|
||||||
|
|
||||||
|
if (name === DEFAULT_ORGANIZER_ROOT_ID) {
|
||||||
|
throw new AppError(`Folder name '${name}' is reserved`);
|
||||||
|
} else if (name === parentId) {
|
||||||
|
throw new AppError(`Folder ID '${name}' cannot be the same as the parent ID`);
|
||||||
|
} else if (!name) {
|
||||||
|
throw new AppError(`Folder name cannot be empty`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const organizer = await this.syncAndGetOrganizer();
|
||||||
|
const defaultView = organizer.views.default;
|
||||||
|
if (!defaultView) {
|
||||||
|
throw new AppError('Default view not found');
|
||||||
|
}
|
||||||
|
|
||||||
|
const parentEntry = defaultView.entries[parentId];
|
||||||
|
if (!parentEntry || parentEntry.type !== 'folder') {
|
||||||
|
throw new AppError(`Parent '${parentId}' not found or is not a folder`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (parentEntry.children.includes(name)) {
|
||||||
|
return organizer;
|
||||||
|
}
|
||||||
|
|
||||||
|
const newOrganizer = structuredClone(organizer);
|
||||||
|
newOrganizer.views.default = createFolderWithItems({
|
||||||
|
view: defaultView,
|
||||||
|
folderId: name,
|
||||||
|
folderName: name,
|
||||||
|
parentId,
|
||||||
|
sourceEntryIds,
|
||||||
|
position,
|
||||||
|
resources: newOrganizer.resources,
|
||||||
|
});
|
||||||
|
|
||||||
|
const validated = await this.dockerConfigService.validate(newOrganizer);
|
||||||
|
this.dockerConfigService.replaceConfig(validated);
|
||||||
|
return validated;
|
||||||
|
}
|
||||||
|
|
||||||
|
async updateViewPreferences(params: {
|
||||||
|
viewId?: string;
|
||||||
|
prefs: Record<string, unknown>;
|
||||||
|
}): Promise<OrganizerV1> {
|
||||||
|
const { viewId = DEFAULT_ORGANIZER_VIEW_ID, prefs } = params;
|
||||||
|
const organizer = await this.syncAndGetOrganizer();
|
||||||
|
const newOrganizer = structuredClone(organizer);
|
||||||
|
|
||||||
|
const view = newOrganizer.views[viewId];
|
||||||
|
if (!view) {
|
||||||
|
throw new AppError(`View '${viewId}' not found`);
|
||||||
|
}
|
||||||
|
|
||||||
|
view.prefs = prefs;
|
||||||
|
|
||||||
|
const validated = await this.dockerConfigService.validate(newOrganizer);
|
||||||
|
this.dockerConfigService.replaceConfig(validated);
|
||||||
|
return validated;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,12 @@
|
|||||||
|
import Docker from 'dockerode';
|
||||||
|
|
||||||
|
let instance: Docker | undefined;
|
||||||
|
|
||||||
|
export function getDockerClient(): Docker {
|
||||||
|
if (!instance) {
|
||||||
|
instance = new Docker({
|
||||||
|
socketPath: '/var/run/docker.sock',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return instance;
|
||||||
|
}
|
||||||
@@ -24,6 +24,11 @@ export class VmMutations {}
|
|||||||
})
|
})
|
||||||
export class ApiKeyMutations {}
|
export class ApiKeyMutations {}
|
||||||
|
|
||||||
|
@ObjectType({
|
||||||
|
description: 'Customization related mutations',
|
||||||
|
})
|
||||||
|
export class CustomizationMutations {}
|
||||||
|
|
||||||
@ObjectType({
|
@ObjectType({
|
||||||
description: 'Parity check related mutations, WIP, response types and functionaliy will change',
|
description: 'Parity check related mutations, WIP, response types and functionaliy will change',
|
||||||
})
|
})
|
||||||
@@ -54,6 +59,9 @@ export class RootMutations {
|
|||||||
@Field(() => ApiKeyMutations, { description: 'API Key related mutations' })
|
@Field(() => ApiKeyMutations, { description: 'API Key related mutations' })
|
||||||
apiKey: ApiKeyMutations = new ApiKeyMutations();
|
apiKey: ApiKeyMutations = new ApiKeyMutations();
|
||||||
|
|
||||||
|
@Field(() => CustomizationMutations, { description: 'Customization related mutations' })
|
||||||
|
customization: CustomizationMutations = new CustomizationMutations();
|
||||||
|
|
||||||
@Field(() => ParityCheckMutations, { description: 'Parity check related mutations' })
|
@Field(() => ParityCheckMutations, { description: 'Parity check related mutations' })
|
||||||
parityCheck: ParityCheckMutations = new ParityCheckMutations();
|
parityCheck: ParityCheckMutations = new ParityCheckMutations();
|
||||||
|
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import { Mutation, Resolver } from '@nestjs/graphql';
|
|||||||
import {
|
import {
|
||||||
ApiKeyMutations,
|
ApiKeyMutations,
|
||||||
ArrayMutations,
|
ArrayMutations,
|
||||||
|
CustomizationMutations,
|
||||||
DockerMutations,
|
DockerMutations,
|
||||||
ParityCheckMutations,
|
ParityCheckMutations,
|
||||||
RCloneMutations,
|
RCloneMutations,
|
||||||
@@ -37,6 +38,11 @@ export class RootMutationsResolver {
|
|||||||
return new ApiKeyMutations();
|
return new ApiKeyMutations();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Mutation(() => CustomizationMutations, { name: 'customization' })
|
||||||
|
customization(): CustomizationMutations {
|
||||||
|
return new CustomizationMutations();
|
||||||
|
}
|
||||||
|
|
||||||
@Mutation(() => RCloneMutations, { name: 'rclone' })
|
@Mutation(() => RCloneMutations, { name: 'rclone' })
|
||||||
rclone(): RCloneMutations {
|
rclone(): RCloneMutations {
|
||||||
return new RCloneMutations();
|
return new RCloneMutations();
|
||||||
|
|||||||
@@ -164,4 +164,10 @@ export class Notifications extends Node {
|
|||||||
@Field(() => [Notification])
|
@Field(() => [Notification])
|
||||||
@IsNotEmpty()
|
@IsNotEmpty()
|
||||||
list!: Notification[];
|
list!: Notification[];
|
||||||
|
|
||||||
|
@Field(() => [Notification], {
|
||||||
|
description: 'Deduplicated list of unread warning and alert notifications, sorted latest first.',
|
||||||
|
})
|
||||||
|
@IsNotEmpty()
|
||||||
|
warningsAndAlerts!: Notification[];
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,9 @@
|
|||||||
|
import { Module } from '@nestjs/common';
|
||||||
|
|
||||||
|
import { NotificationsService } from '@app/unraid-api/graph/resolvers/notifications/notifications.service.js';
|
||||||
|
|
||||||
|
@Module({
|
||||||
|
providers: [NotificationsService],
|
||||||
|
exports: [NotificationsService],
|
||||||
|
})
|
||||||
|
export class NotificationsModule {}
|
||||||
@@ -49,6 +49,13 @@ export class NotificationsResolver {
|
|||||||
return await this.notificationsService.getNotifications(filters);
|
return await this.notificationsService.getNotifications(filters);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ResolveField(() => [Notification], {
|
||||||
|
description: 'Deduplicated list of unread warning and alert notifications.',
|
||||||
|
})
|
||||||
|
public async warningsAndAlerts(): Promise<Notification[]> {
|
||||||
|
return this.notificationsService.getWarningsAndAlerts();
|
||||||
|
}
|
||||||
|
|
||||||
/**============================================
|
/**============================================
|
||||||
* Mutations
|
* Mutations
|
||||||
*=============================================**/
|
*=============================================**/
|
||||||
@@ -96,6 +103,18 @@ export class NotificationsResolver {
|
|||||||
return this.notificationsService.getOverview();
|
return this.notificationsService.getOverview();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Mutation(() => Notification, {
|
||||||
|
nullable: true,
|
||||||
|
description:
|
||||||
|
'Creates a notification if an equivalent unread notification does not already exist.',
|
||||||
|
})
|
||||||
|
public notifyIfUnique(
|
||||||
|
@Args('input', { type: () => NotificationData })
|
||||||
|
data: NotificationData
|
||||||
|
): Promise<Notification | null> {
|
||||||
|
return this.notificationsService.notifyIfUnique(data);
|
||||||
|
}
|
||||||
|
|
||||||
@Mutation(() => NotificationOverview)
|
@Mutation(() => NotificationOverview)
|
||||||
public async archiveAll(
|
public async archiveAll(
|
||||||
@Args('importance', { type: () => NotificationImportance, nullable: true })
|
@Args('importance', { type: () => NotificationImportance, nullable: true })
|
||||||
@@ -163,4 +182,13 @@ export class NotificationsResolver {
|
|||||||
async notificationsOverview() {
|
async notificationsOverview() {
|
||||||
return createSubscription(PUBSUB_CHANNEL.NOTIFICATION_OVERVIEW);
|
return createSubscription(PUBSUB_CHANNEL.NOTIFICATION_OVERVIEW);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Subscription(() => [Notification])
|
||||||
|
@UsePermissions({
|
||||||
|
action: AuthAction.READ_ANY,
|
||||||
|
resource: Resource.NOTIFICATIONS,
|
||||||
|
})
|
||||||
|
async notificationsWarningsAndAlerts() {
|
||||||
|
return createSubscription(PUBSUB_CHANNEL.NOTIFICATION_WARNINGS_AND_ALERTS);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -289,6 +289,112 @@ describe.sequential('NotificationsService', () => {
|
|||||||
expect(loaded.length).toEqual(3);
|
expect(loaded.length).toEqual(3);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('getWarningsAndAlerts', () => {
|
||||||
|
it('deduplicates unread warning and alert notifications', async ({ expect }) => {
|
||||||
|
const duplicateData = {
|
||||||
|
title: 'Array Status',
|
||||||
|
subject: 'Disk 1 is getting warm',
|
||||||
|
description: 'Disk temperature has exceeded threshold.',
|
||||||
|
importance: NotificationImportance.WARNING,
|
||||||
|
} as const;
|
||||||
|
|
||||||
|
// Create duplicate warnings and an alert with different content
|
||||||
|
await createNotification(duplicateData);
|
||||||
|
await createNotification(duplicateData);
|
||||||
|
await createNotification({
|
||||||
|
title: 'UPS Disconnected',
|
||||||
|
subject: 'The UPS connection has been lost',
|
||||||
|
description: 'Reconnect the UPS to restore protection.',
|
||||||
|
importance: NotificationImportance.ALERT,
|
||||||
|
});
|
||||||
|
await createNotification({
|
||||||
|
title: 'Parity Check Complete',
|
||||||
|
subject: 'A parity check has completed successfully',
|
||||||
|
description: 'No sync errors were detected.',
|
||||||
|
importance: NotificationImportance.INFO,
|
||||||
|
});
|
||||||
|
|
||||||
|
const results = await service.getWarningsAndAlerts();
|
||||||
|
const warningMatches = results.filter(
|
||||||
|
(notification) => notification.subject === duplicateData.subject
|
||||||
|
);
|
||||||
|
const alertMatches = results.filter((notification) =>
|
||||||
|
notification.subject.includes('UPS connection')
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(results.length).toEqual(2);
|
||||||
|
expect(warningMatches).toHaveLength(1);
|
||||||
|
expect(alertMatches).toHaveLength(1);
|
||||||
|
expect(
|
||||||
|
results.every((notification) => notification.importance !== NotificationImportance.INFO)
|
||||||
|
).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('respects the provided limit', async ({ expect }) => {
|
||||||
|
const limit = 2;
|
||||||
|
await createNotification({
|
||||||
|
title: 'Array Warning',
|
||||||
|
subject: 'Disk 2 is getting warm',
|
||||||
|
description: 'Disk temperature has exceeded threshold.',
|
||||||
|
importance: NotificationImportance.WARNING,
|
||||||
|
});
|
||||||
|
await createNotification({
|
||||||
|
title: 'Network Down',
|
||||||
|
subject: 'Ethernet link is down',
|
||||||
|
description: 'Physical link failure detected.',
|
||||||
|
importance: NotificationImportance.ALERT,
|
||||||
|
});
|
||||||
|
await createNotification({
|
||||||
|
title: 'Critical Temperature',
|
||||||
|
subject: 'CPU temperature exceeded',
|
||||||
|
description: 'CPU temperature has exceeded safe operating limits.',
|
||||||
|
importance: NotificationImportance.ALERT,
|
||||||
|
});
|
||||||
|
|
||||||
|
const results = await service.getWarningsAndAlerts(limit);
|
||||||
|
expect(results.length).toEqual(limit);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('notifyIfUnique', () => {
|
||||||
|
const duplicateData: NotificationData = {
|
||||||
|
title: 'Docker Query Failure',
|
||||||
|
subject: 'Failed to fetch containers from Docker',
|
||||||
|
description: 'Please verify that the Docker service is running.',
|
||||||
|
importance: NotificationImportance.ALERT,
|
||||||
|
};
|
||||||
|
|
||||||
|
it('skips creating duplicate unread notifications', async ({ expect }) => {
|
||||||
|
const created = await service.notifyIfUnique(duplicateData);
|
||||||
|
expect(created).toBeDefined();
|
||||||
|
|
||||||
|
const skipped = await service.notifyIfUnique(duplicateData);
|
||||||
|
expect(skipped).toBeNull();
|
||||||
|
|
||||||
|
const notifications = await service.getNotifications({
|
||||||
|
type: NotificationType.UNREAD,
|
||||||
|
limit: 50,
|
||||||
|
offset: 0,
|
||||||
|
});
|
||||||
|
expect(
|
||||||
|
notifications.filter((notification) => notification.title === duplicateData.title)
|
||||||
|
).toHaveLength(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('creates new notification when no duplicate exists', async ({ expect }) => {
|
||||||
|
const uniqueData: NotificationData = {
|
||||||
|
title: 'UPS Disconnected',
|
||||||
|
subject: 'UPS connection lost',
|
||||||
|
description: 'Reconnect the UPS to restore protection.',
|
||||||
|
importance: NotificationImportance.WARNING,
|
||||||
|
};
|
||||||
|
|
||||||
|
const notification = await service.notifyIfUnique(uniqueData);
|
||||||
|
expect(notification).toBeDefined();
|
||||||
|
expect(notification?.title).toEqual(uniqueData.title);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
/**--------------------------------------------
|
/**--------------------------------------------
|
||||||
* CRUD: Update Tests
|
* CRUD: Update Tests
|
||||||
*---------------------------------------------**/
|
*---------------------------------------------**/
|
||||||
|
|||||||
@@ -121,6 +121,7 @@ export class NotificationsService {
|
|||||||
pubsub.publish(PUBSUB_CHANNEL.NOTIFICATION_ADDED, {
|
pubsub.publish(PUBSUB_CHANNEL.NOTIFICATION_ADDED, {
|
||||||
notificationAdded: notification,
|
notificationAdded: notification,
|
||||||
});
|
});
|
||||||
|
void this.publishWarningsAndAlerts();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -142,6 +143,20 @@ export class NotificationsService {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private async publishWarningsAndAlerts() {
|
||||||
|
try {
|
||||||
|
const warningsAndAlerts = await this.getWarningsAndAlerts();
|
||||||
|
await pubsub.publish(PUBSUB_CHANNEL.NOTIFICATION_WARNINGS_AND_ALERTS, {
|
||||||
|
notificationsWarningsAndAlerts: warningsAndAlerts,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
this.logger.error(
|
||||||
|
'[publishWarningsAndAlerts] Failed to broadcast warnings and alerts snapshot',
|
||||||
|
error as Error
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private increment(importance: NotificationImportance, collector: NotificationCounts) {
|
private increment(importance: NotificationImportance, collector: NotificationCounts) {
|
||||||
collector[importance.toLowerCase()] += 1;
|
collector[importance.toLowerCase()] += 1;
|
||||||
collector['total'] += 1;
|
collector['total'] += 1;
|
||||||
@@ -214,6 +229,8 @@ export class NotificationsService {
|
|||||||
await writeFile(path, ini);
|
await writeFile(path, ini);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void this.publishWarningsAndAlerts();
|
||||||
|
|
||||||
return this.notificationFileToGqlNotification({ id, type: NotificationType.UNREAD }, fileData);
|
return this.notificationFileToGqlNotification({ id, type: NotificationType.UNREAD }, fileData);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -300,6 +317,9 @@ export class NotificationsService {
|
|||||||
|
|
||||||
this.decrement(notification.importance, NotificationsService.overview[type.toLowerCase()]);
|
this.decrement(notification.importance, NotificationsService.overview[type.toLowerCase()]);
|
||||||
await this.publishOverview();
|
await this.publishOverview();
|
||||||
|
if (type === NotificationType.UNREAD) {
|
||||||
|
void this.publishWarningsAndAlerts();
|
||||||
|
}
|
||||||
|
|
||||||
// return both the overview & the deleted notification
|
// return both the overview & the deleted notification
|
||||||
// this helps us reference the deleted notification in-memory if we want
|
// this helps us reference the deleted notification in-memory if we want
|
||||||
@@ -320,6 +340,10 @@ export class NotificationsService {
|
|||||||
warning: 0,
|
warning: 0,
|
||||||
total: 0,
|
total: 0,
|
||||||
};
|
};
|
||||||
|
await this.publishOverview();
|
||||||
|
if (type === NotificationType.UNREAD) {
|
||||||
|
void this.publishWarningsAndAlerts();
|
||||||
|
}
|
||||||
return this.getOverview();
|
return this.getOverview();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -433,6 +457,8 @@ export class NotificationsService {
|
|||||||
});
|
});
|
||||||
await moveToArchive(notification);
|
await moveToArchive(notification);
|
||||||
|
|
||||||
|
void this.publishWarningsAndAlerts();
|
||||||
|
|
||||||
return {
|
return {
|
||||||
...notification,
|
...notification,
|
||||||
type: NotificationType.ARCHIVE,
|
type: NotificationType.ARCHIVE,
|
||||||
@@ -458,6 +484,7 @@ export class NotificationsService {
|
|||||||
});
|
});
|
||||||
|
|
||||||
await moveToUnread(notification);
|
await moveToUnread(notification);
|
||||||
|
void this.publishWarningsAndAlerts();
|
||||||
return {
|
return {
|
||||||
...notification,
|
...notification,
|
||||||
type: NotificationType.UNREAD,
|
type: NotificationType.UNREAD,
|
||||||
@@ -482,6 +509,7 @@ export class NotificationsService {
|
|||||||
});
|
});
|
||||||
|
|
||||||
const stats = await batchProcess(notifications, archive);
|
const stats = await batchProcess(notifications, archive);
|
||||||
|
void this.publishWarningsAndAlerts();
|
||||||
return { ...stats, overview: overviewSnapshot };
|
return { ...stats, overview: overviewSnapshot };
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -504,6 +532,7 @@ export class NotificationsService {
|
|||||||
});
|
});
|
||||||
|
|
||||||
const stats = await batchProcess(notifications, unArchive);
|
const stats = await batchProcess(notifications, unArchive);
|
||||||
|
void this.publishWarningsAndAlerts();
|
||||||
return { ...stats, overview: overviewSnapshot };
|
return { ...stats, overview: overviewSnapshot };
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -567,6 +596,64 @@ export class NotificationsService {
|
|||||||
return notifications;
|
return notifications;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a notification only if an equivalent unread notification does not already exist.
|
||||||
|
*
|
||||||
|
* @param data The notification data to create.
|
||||||
|
* @returns The created notification, or null if a duplicate was detected.
|
||||||
|
*/
|
||||||
|
public async notifyIfUnique(data: NotificationData): Promise<Notification | null> {
|
||||||
|
const fingerprint = this.getNotificationFingerprintFromData(data);
|
||||||
|
const hasDuplicate = await this.hasUnreadNotificationWithFingerprint(fingerprint);
|
||||||
|
|
||||||
|
if (hasDuplicate) {
|
||||||
|
this.logger.verbose(
|
||||||
|
`[notifyIfUnique] Skipping notification creation for duplicate fingerprint: ${fingerprint}`
|
||||||
|
);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.createNotification(data);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a deduplicated list of unread warning and alert notifications.
|
||||||
|
*
|
||||||
|
* Deduplication is based on the combination of importance, title, subject, description, and link.
|
||||||
|
* This ensures repeated notifications with the same user-facing content are only shown once, while
|
||||||
|
* still prioritizing the most recent occurrence of each unique notification.
|
||||||
|
*
|
||||||
|
* @param limit Maximum number of unique notifications to return. Default: 50.
|
||||||
|
*/
|
||||||
|
public async getWarningsAndAlerts(limit = 50): Promise<Notification[]> {
|
||||||
|
const notifications = await this.loadUnreadNotifications();
|
||||||
|
const deduped: Notification[] = [];
|
||||||
|
const seen = new Set<string>();
|
||||||
|
|
||||||
|
for (const notification of notifications) {
|
||||||
|
if (
|
||||||
|
notification.importance !== NotificationImportance.ALERT &&
|
||||||
|
notification.importance !== NotificationImportance.WARNING
|
||||||
|
) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const key = this.getDeduplicationKey(notification);
|
||||||
|
if (seen.has(key)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
seen.add(key);
|
||||||
|
deduped.push(notification);
|
||||||
|
|
||||||
|
if (deduped.length >= limit) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return deduped;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Given a path to a folder, returns the full (absolute) paths of the folder's top-level contents.
|
* Given a path to a folder, returns the full (absolute) paths of the folder's top-level contents.
|
||||||
* Sorted latest-first by default.
|
* Sorted latest-first by default.
|
||||||
@@ -787,8 +874,57 @@ export class NotificationsService {
|
|||||||
* Helpers
|
* Helpers
|
||||||
*------------------------------------------------------------------------**/
|
*------------------------------------------------------------------------**/
|
||||||
|
|
||||||
|
private async loadUnreadNotifications(): Promise<Notification[]> {
|
||||||
|
const { UNREAD } = this.paths();
|
||||||
|
const files = await this.listFilesInFolder(UNREAD);
|
||||||
|
const [notifications] = await this.loadNotificationsFromPaths(files, {
|
||||||
|
type: NotificationType.UNREAD,
|
||||||
|
});
|
||||||
|
return notifications;
|
||||||
|
}
|
||||||
|
|
||||||
|
private async hasUnreadNotificationWithFingerprint(fingerprint: string): Promise<boolean> {
|
||||||
|
const notifications = await this.loadUnreadNotifications();
|
||||||
|
return notifications.some(
|
||||||
|
(notification) => this.getDeduplicationKey(notification) === fingerprint
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
private sortLatestFirst(a: Notification, b: Notification) {
|
private sortLatestFirst(a: Notification, b: Notification) {
|
||||||
const defaultTimestamp = 0;
|
const defaultTimestamp = 0;
|
||||||
return Number(b.timestamp ?? defaultTimestamp) - Number(a.timestamp ?? defaultTimestamp);
|
return Number(b.timestamp ?? defaultTimestamp) - Number(a.timestamp ?? defaultTimestamp);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private getDeduplicationKey(notification: Notification): string {
|
||||||
|
return this.getNotificationFingerprint(notification);
|
||||||
|
}
|
||||||
|
|
||||||
|
private getNotificationFingerprintFromData(data: NotificationData): string {
|
||||||
|
return this.getNotificationFingerprint({
|
||||||
|
importance: data.importance,
|
||||||
|
title: data.title,
|
||||||
|
subject: data.subject,
|
||||||
|
description: data.description,
|
||||||
|
link: data.link,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
private getNotificationFingerprint({
|
||||||
|
importance,
|
||||||
|
title,
|
||||||
|
subject,
|
||||||
|
description,
|
||||||
|
link,
|
||||||
|
}: Pick<Notification, 'importance' | 'title' | 'subject' | 'description'> & {
|
||||||
|
link?: string | null;
|
||||||
|
}): string {
|
||||||
|
const makePart = (value?: string | null) => (value ?? '').trim();
|
||||||
|
return [
|
||||||
|
importance,
|
||||||
|
makePart(title),
|
||||||
|
makePart(subject),
|
||||||
|
makePart(description),
|
||||||
|
makePart(link),
|
||||||
|
].join('|');
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -15,8 +15,8 @@ import { InfoModule } from '@app/unraid-api/graph/resolvers/info/info.module.js'
|
|||||||
import { LogsModule } from '@app/unraid-api/graph/resolvers/logs/logs.module.js';
|
import { LogsModule } from '@app/unraid-api/graph/resolvers/logs/logs.module.js';
|
||||||
import { MetricsModule } from '@app/unraid-api/graph/resolvers/metrics/metrics.module.js';
|
import { MetricsModule } from '@app/unraid-api/graph/resolvers/metrics/metrics.module.js';
|
||||||
import { RootMutationsResolver } from '@app/unraid-api/graph/resolvers/mutation/mutation.resolver.js';
|
import { RootMutationsResolver } from '@app/unraid-api/graph/resolvers/mutation/mutation.resolver.js';
|
||||||
|
import { NotificationsModule } from '@app/unraid-api/graph/resolvers/notifications/notifications.module.js';
|
||||||
import { NotificationsResolver } from '@app/unraid-api/graph/resolvers/notifications/notifications.resolver.js';
|
import { NotificationsResolver } from '@app/unraid-api/graph/resolvers/notifications/notifications.resolver.js';
|
||||||
import { NotificationsService } from '@app/unraid-api/graph/resolvers/notifications/notifications.service.js';
|
|
||||||
import { OnlineResolver } from '@app/unraid-api/graph/resolvers/online/online.resolver.js';
|
import { OnlineResolver } from '@app/unraid-api/graph/resolvers/online/online.resolver.js';
|
||||||
import { OwnerResolver } from '@app/unraid-api/graph/resolvers/owner/owner.resolver.js';
|
import { OwnerResolver } from '@app/unraid-api/graph/resolvers/owner/owner.resolver.js';
|
||||||
import { RCloneModule } from '@app/unraid-api/graph/resolvers/rclone/rclone.module.js';
|
import { RCloneModule } from '@app/unraid-api/graph/resolvers/rclone/rclone.module.js';
|
||||||
@@ -47,6 +47,7 @@ import { MeResolver } from '@app/unraid-api/graph/user/user.resolver.js';
|
|||||||
FlashBackupModule,
|
FlashBackupModule,
|
||||||
InfoModule,
|
InfoModule,
|
||||||
LogsModule,
|
LogsModule,
|
||||||
|
NotificationsModule,
|
||||||
RCloneModule,
|
RCloneModule,
|
||||||
SettingsModule,
|
SettingsModule,
|
||||||
SsoModule,
|
SsoModule,
|
||||||
@@ -58,7 +59,6 @@ import { MeResolver } from '@app/unraid-api/graph/user/user.resolver.js';
|
|||||||
FlashResolver,
|
FlashResolver,
|
||||||
MeResolver,
|
MeResolver,
|
||||||
NotificationsResolver,
|
NotificationsResolver,
|
||||||
NotificationsService,
|
|
||||||
OnlineResolver,
|
OnlineResolver,
|
||||||
OwnerResolver,
|
OwnerResolver,
|
||||||
RegistrationResolver,
|
RegistrationResolver,
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
import { forwardRef, Module } from '@nestjs/common';
|
import { forwardRef, Module } from '@nestjs/common';
|
||||||
|
import { ConfigModule } from '@nestjs/config';
|
||||||
|
|
||||||
import { UserSettingsModule } from '@unraid/shared/services/user-settings.js';
|
import { UserSettingsModule } from '@unraid/shared/services/user-settings.js';
|
||||||
|
|
||||||
@@ -7,7 +8,7 @@ import { OidcConfigPersistence } from '@app/unraid-api/graph/resolvers/sso/core/
|
|||||||
import { OidcValidationService } from '@app/unraid-api/graph/resolvers/sso/core/oidc-validation.service.js';
|
import { OidcValidationService } from '@app/unraid-api/graph/resolvers/sso/core/oidc-validation.service.js';
|
||||||
|
|
||||||
@Module({
|
@Module({
|
||||||
imports: [UserSettingsModule, forwardRef(() => OidcClientModule)],
|
imports: [ConfigModule, UserSettingsModule, forwardRef(() => OidcClientModule)],
|
||||||
providers: [OidcConfigPersistence, OidcValidationService],
|
providers: [OidcConfigPersistence, OidcValidationService],
|
||||||
exports: [OidcConfigPersistence, OidcValidationService],
|
exports: [OidcConfigPersistence, OidcValidationService],
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ describe('UPSResolver', () => {
|
|||||||
MODEL: 'Test UPS',
|
MODEL: 'Test UPS',
|
||||||
STATUS: 'Online',
|
STATUS: 'Online',
|
||||||
BCHARGE: '100',
|
BCHARGE: '100',
|
||||||
TIMELEFT: '3600',
|
TIMELEFT: '60', // 60 minutes (apcupsd format)
|
||||||
LINEV: '120.5',
|
LINEV: '120.5',
|
||||||
OUTPUTV: '120.5',
|
OUTPUTV: '120.5',
|
||||||
LOADPCT: '25',
|
LOADPCT: '25',
|
||||||
|
|||||||
@@ -21,7 +21,8 @@ export class UPSResolver {
|
|||||||
status: upsData.STATUS || 'Online',
|
status: upsData.STATUS || 'Online',
|
||||||
battery: {
|
battery: {
|
||||||
chargeLevel: parseInt(upsData.BCHARGE || '100', 10),
|
chargeLevel: parseInt(upsData.BCHARGE || '100', 10),
|
||||||
estimatedRuntime: parseInt(upsData.TIMELEFT || '3600', 10),
|
// Convert TIMELEFT from minutes (apcupsd format) to seconds
|
||||||
|
estimatedRuntime: Math.round(parseFloat(upsData.TIMELEFT || '60') * 60),
|
||||||
health: 'Good',
|
health: 'Good',
|
||||||
},
|
},
|
||||||
power: {
|
power: {
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user