mirror of
https://github.com/unraid/api.git
synced 2026-01-02 14:40:01 -06:00
Compare commits
336 Commits
feat/dev-h
...
feat/notif
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d0db8a098d | ||
|
|
94dfe85716 | ||
|
|
eeb3289ae8 | ||
|
|
939d7a304d | ||
|
|
acccb3694c | ||
|
|
2724485989 | ||
|
|
2f4ff21986 | ||
|
|
83e00c640a | ||
|
|
abcaa5aedb | ||
|
|
4c663dc69c | ||
|
|
89eb841b20 | ||
|
|
7296195495 | ||
|
|
696b55de6c | ||
|
|
aa5fad39f3 | ||
|
|
9c38fa6a9c | ||
|
|
da5d1132d1 | ||
|
|
001be86181 | ||
|
|
ecfc797e7d | ||
|
|
dffbfc2dab | ||
|
|
e5f029830b | ||
|
|
1a33e6343a | ||
|
|
69441d890e | ||
|
|
46c82ecae3 | ||
|
|
0b469f5b3f | ||
|
|
3fc41480a2 | ||
|
|
e27776df3d | ||
|
|
abd8e09908 | ||
|
|
504283f227 | ||
|
|
ff7e09e15c | ||
|
|
deb42f6a81 | ||
|
|
95d018ea05 | ||
|
|
106b2e42c0 | ||
|
|
1c5ff58d2d | ||
|
|
d7bab9f443 | ||
|
|
902c76c759 | ||
|
|
5e50f24d70 | ||
|
|
4f0210d16a | ||
|
|
ddb8772692 | ||
|
|
787f8b9bf5 | ||
|
|
61ba324ca0 | ||
|
|
a230a33df5 | ||
|
|
84b234c9cf | ||
|
|
9bfc04c2a5 | ||
|
|
e84430471d | ||
|
|
2d60045784 | ||
|
|
e9137f2553 | ||
|
|
dbe0dd5dfb | ||
|
|
9d2796f2c9 | ||
|
|
972a19be04 | ||
|
|
c8da8fe314 | ||
|
|
353132b67a | ||
|
|
88b7cbfe95 | ||
|
|
3ed1d10c98 | ||
|
|
62693cfcc0 | ||
|
|
810708f775 | ||
|
|
08f6d6df65 | ||
|
|
da673c3f2b | ||
|
|
cb463bfdd0 | ||
|
|
7177171b75 | ||
|
|
9f0ab7fa38 | ||
|
|
a32374a3ac | ||
|
|
cb6534d9d9 | ||
|
|
2eaf175515 | ||
|
|
50376a0d66 | ||
|
|
4b2007b689 | ||
|
|
72fcaca4f3 | ||
|
|
2f48ddf942 | ||
|
|
62dfa6c83a | ||
|
|
27bb375460 | ||
|
|
cc4d5bdefb | ||
|
|
f55302c130 | ||
|
|
b8dbe3f9d9 | ||
|
|
20771f61a8 | ||
|
|
b9b8bbe871 | ||
|
|
b8e61007e3 | ||
|
|
49536032df | ||
|
|
9229cf3df6 | ||
|
|
58665a4e98 | ||
|
|
885d1537b6 | ||
|
|
198cfe5015 | ||
|
|
42189dd451 | ||
|
|
6122b3c001 | ||
|
|
cda7368d3d | ||
|
|
447cecd19d | ||
|
|
7321bd0088 | ||
|
|
67e898efe1 | ||
|
|
41e5de83a2 | ||
|
|
5c020a62d6 | ||
|
|
1393e967fa | ||
|
|
f07c14354f | ||
|
|
d42a426244 | ||
|
|
125bc29166 | ||
|
|
a6333bf5a2 | ||
|
|
e8e985ad6a | ||
|
|
1a598885cc | ||
|
|
d73f267245 | ||
|
|
7c1873249e | ||
|
|
09f33a0127 | ||
|
|
db00d7442d | ||
|
|
724159314c | ||
|
|
180f115b71 | ||
|
|
eb38eb219e | ||
|
|
3da701a53b | ||
|
|
6e5b2f1f67 | ||
|
|
812053d7a4 | ||
|
|
a929c7e3b3 | ||
|
|
c0179c8351 | ||
|
|
d5c7be54b0 | ||
|
|
32478f34c2 | ||
|
|
4daa09b340 | ||
|
|
346ce91f73 | ||
|
|
cee3a6d0ef | ||
|
|
e90f606f43 | ||
|
|
05fa344454 | ||
|
|
406c400bd2 | ||
|
|
1ae466899e | ||
|
|
5178e131ce | ||
|
|
0bd11bce5a | ||
|
|
fddde33977 | ||
|
|
1f5df845eb | ||
|
|
ef54af655e | ||
|
|
bb44862b7b | ||
|
|
9709dc82ea | ||
|
|
38f0699e19 | ||
|
|
6ca9f421eb | ||
|
|
935825571b | ||
|
|
9beaa78820 | ||
|
|
420c2c1afd | ||
|
|
7c0cb07b83 | ||
|
|
c6a7137f19 | ||
|
|
44f9ba0e7f | ||
|
|
1c61e64169 | ||
|
|
cf0eeebd31 | ||
|
|
f118597e47 | ||
|
|
6f2fcffd3e | ||
|
|
8f7748404c | ||
|
|
88c2605d4f | ||
|
|
c2d645612a | ||
|
|
b20f69c208 | ||
|
|
b9cedb70ff | ||
|
|
a11978aa33 | ||
|
|
b0efcc0d51 | ||
|
|
92b5f2226e | ||
|
|
98f2603525 | ||
|
|
cfb1d50c8e | ||
|
|
545ccf1938 | ||
|
|
0c79995107 | ||
|
|
9d3397a687 | ||
|
|
11c160835a | ||
|
|
e388b37aa6 | ||
|
|
1da882b807 | ||
|
|
d9d5a24b70 | ||
|
|
24e3cad882 | ||
|
|
323a4a17cf | ||
|
|
9968e0f7df | ||
|
|
2ccc53630b | ||
|
|
d7bb3defc3 | ||
|
|
ddb8bf8a5c | ||
|
|
6234d61ae5 | ||
|
|
a665ee3ec6 | ||
|
|
7ca3efe8b8 | ||
|
|
28f4952599 | ||
|
|
7e4022518d | ||
|
|
4d1656eaa8 | ||
|
|
5b2421cb0c | ||
|
|
0578b066f1 | ||
|
|
57fdcf3e60 | ||
|
|
eb7bdb6a85 | ||
|
|
ebd671e7b6 | ||
|
|
15a1a3ac15 | ||
|
|
9a0c7fe9c8 | ||
|
|
91bcbc3d6f | ||
|
|
b3d046f4ea | ||
|
|
0f13e34562 | ||
|
|
e18cd87180 | ||
|
|
421949a9f8 | ||
|
|
8c7c580f3f | ||
|
|
c616641044 | ||
|
|
fd16243287 | ||
|
|
7352bbe77a | ||
|
|
4d33908e01 | ||
|
|
adabe92f72 | ||
|
|
958f9e57e1 | ||
|
|
ac5032df83 | ||
|
|
5f4cc07473 | ||
|
|
38524bce88 | ||
|
|
64db2f19a7 | ||
|
|
8fe1e80bbd | ||
|
|
1c4506cf50 | ||
|
|
84fe7f6df6 | ||
|
|
5c7e650b3b | ||
|
|
6cac078e15 | ||
|
|
4e555021a7 | ||
|
|
b1e2f043b1 | ||
|
|
bc69852333 | ||
|
|
2c79ccc883 | ||
|
|
c240fab58a | ||
|
|
3c50022ac3 | ||
|
|
9201136cb1 | ||
|
|
ff52f75abf | ||
|
|
eed40f7875 | ||
|
|
754d4560ea | ||
|
|
f6d09f4ba2 | ||
|
|
a1f0dac42d | ||
|
|
fff935cf02 | ||
|
|
0849468fc2 | ||
|
|
6a57924fbf | ||
|
|
57802c2ea0 | ||
|
|
924df0dc9e | ||
|
|
d04001e052 | ||
|
|
92ec931aff | ||
|
|
30f92374d0 | ||
|
|
6bfd221cd1 | ||
|
|
ceb537ae91 | ||
|
|
81b197a9aa | ||
|
|
54b4ad0df8 | ||
|
|
e84c3ebe14 | ||
|
|
81acf1d947 | ||
|
|
80bfc231e0 | ||
|
|
b1409684db | ||
|
|
14d9448e4c | ||
|
|
924fa699eb | ||
|
|
999a8e39eb | ||
|
|
5a1c85d739 | ||
|
|
ba77ff4a4c | ||
|
|
05765495c4 | ||
|
|
f7cccc8c37 | ||
|
|
85e0f7993e | ||
|
|
d5a424ebe1 | ||
|
|
01441961c3 | ||
|
|
836f64d28f | ||
|
|
79bb4e585b | ||
|
|
409e88b727 | ||
|
|
5034a8981a | ||
|
|
e61d9f195d | ||
|
|
b3e213ba04 | ||
|
|
a7ea678683 | ||
|
|
791e16ce52 | ||
|
|
173da0e65b | ||
|
|
287aabfda7 | ||
|
|
d8656cc6b3 | ||
|
|
a3500c9bc9 | ||
|
|
b513cbe614 | ||
|
|
b5c525a9c2 | ||
|
|
648b560148 | ||
|
|
6eb34c3501 | ||
|
|
21544bd2dc | ||
|
|
3e115f84d7 | ||
|
|
ba586fc438 | ||
|
|
e6cbed14a9 | ||
|
|
f531e68b87 | ||
|
|
53f718e240 | ||
|
|
de36bfab99 | ||
|
|
1e2f57a4cd | ||
|
|
46aa3a3e24 | ||
|
|
0c627d1ade | ||
|
|
f20349fb2a | ||
|
|
dc72d63481 | ||
|
|
e9efed8067 | ||
|
|
71ce064008 | ||
|
|
b67b0ea633 | ||
|
|
bf3d46d190 | ||
|
|
a1fa3462eb | ||
|
|
c84175e763 | ||
|
|
0f9fe18379 | ||
|
|
76c0d35783 | ||
|
|
3ece0d1acc | ||
|
|
0473c9b676 | ||
|
|
1956227f63 | ||
|
|
c515d08d5c | ||
|
|
0bd9820c00 | ||
|
|
0c2299cfcd | ||
|
|
12fdfac467 | ||
|
|
3fc20ec593 | ||
|
|
69a6163e29 | ||
|
|
00294699f0 | ||
|
|
90ff980a00 | ||
|
|
17e7d2a2de | ||
|
|
d2a88df5bf | ||
|
|
9471f5c918 | ||
|
|
492d45f363 | ||
|
|
2951d68f9d | ||
|
|
4857bc0478 | ||
|
|
c794a1d1a1 | ||
|
|
d2a34acfb9 | ||
|
|
3dc60b6106 | ||
|
|
57587b9175 | ||
|
|
5ee7cb2647 | ||
|
|
911a3f8f1a | ||
|
|
d426001372 | ||
|
|
2d0c65aaf4 | ||
|
|
fd4605b956 | ||
|
|
3f84b6bbfd | ||
|
|
5ad10af303 | ||
|
|
9aa11faaaa | ||
|
|
bfa98574f1 | ||
|
|
dd2dc40ff1 | ||
|
|
8a3265d7b1 | ||
|
|
a240a031a8 | ||
|
|
979e41fe41 | ||
|
|
03dc404aa7 | ||
|
|
364320ffc9 | ||
|
|
2492f4cec9 | ||
|
|
1a643b3eef | ||
|
|
58ee3b958b | ||
|
|
2928cf5821 | ||
|
|
b21b276151 | ||
|
|
d80f25dc96 | ||
|
|
f5f5a081e6 | ||
|
|
f60474b4d7 | ||
|
|
364373df0c | ||
|
|
bb38533bb2 | ||
|
|
836801c524 | ||
|
|
b54cf5ede9 | ||
|
|
1a20c66c02 | ||
|
|
587bbb3b4d | ||
|
|
e95f7a1a03 | ||
|
|
9c75f6e2ca | ||
|
|
822042ab9c | ||
|
|
3a843b6e16 | ||
|
|
6072387c37 | ||
|
|
313162dbf2 | ||
|
|
495515abac | ||
|
|
09087040e9 | ||
|
|
4423829911 | ||
|
|
c8f469c4fb | ||
|
|
bc61b45f9f | ||
|
|
f530d9ea82 | ||
|
|
2046fa5310 | ||
|
|
9ea2327fa0 | ||
|
|
ff67b54a1b | ||
|
|
e6bd7a54be | ||
|
|
5827b5ffa3 | ||
|
|
572a1310e0 | ||
|
|
c1403d3826 | ||
|
|
29afe9b9e8 |
74
.github/workflows/lint-test-build-web.yml
vendored
74
.github/workflows/lint-test-build-web.yml
vendored
@@ -1,74 +0,0 @@
|
||||
name: Lint, Test, and Build Web Components
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
lint-web:
|
||||
defaults:
|
||||
run:
|
||||
working-directory: web
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Create env file
|
||||
run: |
|
||||
touch .env
|
||||
echo VITE_ACCOUNT=${{ vars.VITE_ACCOUNT }} >> .env
|
||||
echo VITE_CONNECT=${{ vars.VITE_CONNECT }} >> .env
|
||||
echo VITE_UNRAID_NET=${{ vars.VITE_UNRAID_NET }} >> .env
|
||||
echo VITE_CALLBACK_KEY=${{ vars.VITE_CALLBACK_KEY }} >> .env
|
||||
cat .env
|
||||
|
||||
- name: Install node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
cache: "npm"
|
||||
cache-dependency-path: "web/package-lock.json"
|
||||
node-version-file: "web/.nvmrc"
|
||||
|
||||
- name: Installing node deps
|
||||
run: npm install
|
||||
|
||||
- name: Lint files
|
||||
run: npm run lint
|
||||
|
||||
build-web:
|
||||
defaults:
|
||||
run:
|
||||
working-directory: web
|
||||
runs-on: ubuntu-latest
|
||||
needs: [lint-web]
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Create env file
|
||||
run: |
|
||||
touch .env
|
||||
echo VITE_ACCOUNT=${{ vars.VITE_ACCOUNT }} >> .env
|
||||
echo VITE_CONNECT=${{ vars.VITE_CONNECT }} >> .env
|
||||
echo VITE_UNRAID_NET=${{ vars.VITE_UNRAID_NET }} >> .env
|
||||
echo VITE_CALLBACK_KEY=${{ vars.VITE_CALLBACK_KEY }} >> .env
|
||||
cat .env
|
||||
|
||||
- name: Install node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
cache: "npm"
|
||||
cache-dependency-path: "web/package-lock.json"
|
||||
node-version-file: "web/.nvmrc"
|
||||
|
||||
- name: Installing node deps
|
||||
run: npm install
|
||||
|
||||
- name: Build
|
||||
run: npm run build
|
||||
|
||||
- name: Upload build to Github artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: unraid-web
|
||||
path: web/.nuxt/nuxt-custom-elements/dist/unraid-components
|
||||
325
.github/workflows/main.yml
vendored
325
.github/workflows/main.yml
vendored
@@ -1,6 +1,7 @@
|
||||
name: CI - Main (API)
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
@@ -12,6 +13,18 @@ concurrency:
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
release-please:
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- id: release
|
||||
uses: googleapis/release-please-action@v4
|
||||
outputs:
|
||||
releases_created: ${{ steps.release.outputs.releases_created }}
|
||||
tag_name: ${{ steps.release.outputs.tag_name }}
|
||||
start:
|
||||
# This prevents a tag running twice as it'll have a "tag" and a "commit" event
|
||||
# We only want the tag to run the action as it'll be able to create the release notes
|
||||
@@ -21,153 +34,66 @@ jobs:
|
||||
- name: Validate branch and tag
|
||||
run: exit 0
|
||||
|
||||
lint-api:
|
||||
continue-on-error: true
|
||||
build-test-api:
|
||||
name: Build and Test API
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: api
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Reconfigure git to use HTTP authenti:cation
|
||||
run: >
|
||||
git config --global url."https://github.com/".insteadOf
|
||||
ssh://git@github.com/
|
||||
|
||||
- name: Install node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: "api/.nvmrc"
|
||||
|
||||
# - name: Get npm cache directory
|
||||
# id: npm-cache
|
||||
# run: echo "::set-output name=dir::$(npm config get cache)"
|
||||
|
||||
# - name: Load npm cache
|
||||
# uses: actions/cache@v3
|
||||
# with:
|
||||
# path: ${{ steps.npm-cache.outputs.dir }}
|
||||
# key: ${{ runner.os }}-npm-cache-${{ hashFiles('**/package-lock.json') }}
|
||||
|
||||
- name: Install libvirt-dev
|
||||
run: sudo apt-get update && sudo apt-get install libvirt-dev
|
||||
|
||||
- name: Installing node deps
|
||||
run: npm install
|
||||
|
||||
- name: Lint files
|
||||
run: npm run lint
|
||||
|
||||
test-api:
|
||||
defaults:
|
||||
run:
|
||||
working-directory: api
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Reconfigure git to use HTTP authentication
|
||||
run: >
|
||||
git config --global url."https://github.com/".insteadOf
|
||||
ssh://git@github.com/
|
||||
|
||||
- name: Build Docker Compose
|
||||
run: |
|
||||
docker network create mothership_default
|
||||
GIT_SHA=$(git rev-parse --short HEAD) IS_TAGGED=$(git describe --tags --abbrev=0 --exact-match || echo '') docker-compose build builder
|
||||
|
||||
- name: Run Docker Compose
|
||||
run: GIT_SHA=$(git rev-parse --short HEAD) IS_TAGGED=$(git describe --tags --abbrev=0 --exact-match || echo '') docker-compose run builder npm run coverage
|
||||
|
||||
lint-web:
|
||||
defaults:
|
||||
run:
|
||||
working-directory: web
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Create env file
|
||||
run: |
|
||||
touch .env
|
||||
echo VITE_ACCOUNT=${{ vars.VITE_ACCOUNT }} >> .env
|
||||
echo VITE_CONNECT=${{ vars.VITE_CONNECT }} >> .env
|
||||
echo VITE_UNRAID_NET=${{ vars.VITE_UNRAID_NET }} >> .env
|
||||
echo VITE_CALLBACK_KEY=${{ vars.VITE_CALLBACK_KEY }} >> .env
|
||||
cat .env
|
||||
|
||||
- name: Install node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
cache: "npm"
|
||||
cache-dependency-path: "web/package-lock.json"
|
||||
node-version-file: "web/.nvmrc"
|
||||
|
||||
- name: Installing node deps
|
||||
run: npm install
|
||||
|
||||
- name: Lint files
|
||||
run: npm run lint
|
||||
|
||||
build-api:
|
||||
defaults:
|
||||
run:
|
||||
working-directory: api
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
outputs:
|
||||
API_VERSION: ${{ steps.build-pack-binary.outputs.API_VERSION }}
|
||||
API_VERSION: ${{ steps.vars.outputs.API_VERSION }}
|
||||
API_MD5: ${{ steps.set-hashes.outputs.API_MD5 }}
|
||||
API_SHA256: ${{ steps.set-hashes.outputs.API_SHA256 }}
|
||||
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Add SSH deploy key
|
||||
uses: shimataro/ssh-key-action@v2
|
||||
- name: Build with Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
key: ${{ secrets.UNRAID_BOT_SSH_KEY }}
|
||||
known_hosts: ${{ secrets.KNOWN_HOSTS }}
|
||||
|
||||
- name: Install node
|
||||
uses: actions/setup-node@v4
|
||||
install: true
|
||||
platforms: linux/amd64
|
||||
- name: Build Builder
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
node-version-file: "api/.nvmrc"
|
||||
context: ./api
|
||||
push: false
|
||||
tags: builder:latest
|
||||
cache-from: type=gha,ref=builder:latest
|
||||
cache-to: type=gha,mode=max,ref=builder:latest
|
||||
load: true
|
||||
- name: Lint inside of the docker container
|
||||
continue-on-error: true
|
||||
run: |
|
||||
docker run --rm builder npm run lint
|
||||
|
||||
- name: Install libvirt-dev
|
||||
run: sudo apt-get update && sudo apt-get install libvirt-dev
|
||||
|
||||
- name: Installing node deps
|
||||
run: npm install
|
||||
|
||||
- name: Install pkg and node-prune
|
||||
run: npm i -g pkg && curl -sf https://gobinaries.com/tj/node-prune | sh
|
||||
|
||||
# See https://github.com/apollographql/subscriptions-transport-ws/issues/433
|
||||
- name: Patch subscriptions-transport-ws
|
||||
run: npm run patch:subscriptions-transport-ws
|
||||
|
||||
|
||||
- name: Build and Pack
|
||||
- name: Test inside of the docker container
|
||||
run: |
|
||||
git fetch --depth=2 origin main
|
||||
if git diff --name-only --relative=api origin/main HEAD | grep -q '.'; then
|
||||
docker run --rm builder npm run coverage
|
||||
else
|
||||
echo "No changes in /api folder, skipping coverage."
|
||||
fi
|
||||
- name: Get Git Short Sha and API version
|
||||
id: vars
|
||||
run: |
|
||||
GIT_SHA=$(git rev-parse --short HEAD)
|
||||
IS_TAGGED=$(git describe --tags --abbrev=0 --exact-match || echo '')
|
||||
PACKAGE_LOCK_VERSION=$(jq -r '.version' package-lock.json)
|
||||
echo "GIT_SHA=$GIT_SHA" >> $GITHUB_OUTPUT
|
||||
echo "IS_TAGGED=$IS_TAGGED" >> $GITHUB_OUTPUT
|
||||
echo "PACKAGE_LOCK_VERSION=$PACKAGE_LOCK_VERSION" >> $GITHUB_OUTPUT
|
||||
echo "API_VERSION=$([[ -n "$IS_TAGGED" ]] && echo "$PACKAGE_LOCK_VERSION" || echo "${PACKAGE_LOCK_VERSION}+${GIT_SHA}")" >> $GITHUB_OUTPUT
|
||||
- name: Build inside of the docker container
|
||||
id: build-pack-binary
|
||||
run: WORKDIR=${{ github.workspace }} && npm run build-pkg
|
||||
run: |
|
||||
docker run --rm -v ${{ github.workspace }}/api/deploy/release:/app/deploy/release -e API_VERSION=${{ steps.vars.outputs.API_VERSION }} builder npm run build-and-pack
|
||||
|
||||
- name: Set Hashes
|
||||
id: set-hashes
|
||||
run: |
|
||||
API_MD5=$(md5sum ${{ github.workspace }}/api/deploy/release/*.tgz | awk '{ print $1 }')
|
||||
API_SHA256=$(sha256sum ${{ github.workspace }}/api/deploy/release/*.tgz | awk '{ print $1 }')
|
||||
echo "::set-output name=API_MD5::${API_MD5}"
|
||||
echo "::set-output name=API_SHA256::${API_SHA256}"
|
||||
echo "API_MD5=$(md5sum ${{ github.workspace }}/api/deploy/release/*.tgz | awk '{ print $1 }')" >> $GITHUB_OUTPUT
|
||||
echo "API_SHA256=$(sha256sum ${{ github.workspace }}/api/deploy/release/*.tgz | awk '{ print $1 }')" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Upload tgz to Github artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
@@ -176,13 +102,13 @@ jobs:
|
||||
path: ${{ github.workspace }}/api/deploy/release/*.tgz
|
||||
|
||||
build-web:
|
||||
name: Build Web App
|
||||
environment:
|
||||
name: production
|
||||
defaults:
|
||||
run:
|
||||
working-directory: web
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: production
|
||||
needs: [lint-web]
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
@@ -206,6 +132,13 @@ jobs:
|
||||
- name: Installing node deps
|
||||
run: npm install
|
||||
|
||||
- name: Lint files
|
||||
continue-on-error: true
|
||||
run: npm run lint
|
||||
|
||||
- name: Test
|
||||
run: npm run test:ci
|
||||
|
||||
- name: Build
|
||||
run: npm run build
|
||||
|
||||
@@ -216,7 +149,7 @@ jobs:
|
||||
path: web/.nuxt/nuxt-custom-elements/dist/unraid-components
|
||||
|
||||
build-plugin:
|
||||
needs: [lint-api, lint-web, test-api, build-api, build-web]
|
||||
needs: [build-test-api, build-web]
|
||||
defaults:
|
||||
run:
|
||||
working-directory: plugin
|
||||
@@ -233,13 +166,58 @@ jobs:
|
||||
with:
|
||||
name: unraid-web
|
||||
path: ./plugin/source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components
|
||||
- name: Download Node.js From Slackbuilds (skipped due to node.js issues)
|
||||
if: false
|
||||
id: download-nodejs
|
||||
run: |
|
||||
# Get latest node version (based on main_node_version) from slackware
|
||||
main_node_version=$(sed 's/^v//' ../api/.nvmrc)
|
||||
base_node_url="https://mirrors.slackware.com/slackware/slackware64-current/slackware64/l/"
|
||||
latest_nodejs=$(wget -q -O- "${base_node_url}" | grep -o "nodejs-${main_node_version}\.[0-9.]*-x86_64-[0-9]*\.txz" | sort -V | tail -n 1)
|
||||
if [[ -z "${latest_nodejs}" ]]; then
|
||||
echo "Error: Failed to fetch the latest nodejs version."
|
||||
exit 1
|
||||
fi
|
||||
node_download_url="${base_node_url}${latest_nodejs}"
|
||||
if ! wget -q "${node_download_url}" -O "${{ github.workspace }}/plugin/archive/${latest_nodejs}"; then
|
||||
echo "Error: Failed to download nodejs package."
|
||||
exit 1
|
||||
fi
|
||||
node_sha256=$(sha256sum "${{ github.workspace }}/plugin/archive/${latest_nodejs}" | cut -f 1 -d ' ')
|
||||
echo "NODEJS_FILENAME=${latest_nodejs}" >> $GITHUB_OUTPUT
|
||||
echo "NODEJS_SHA256=${node_sha256}" >> $GITHUB_OUTPUT
|
||||
- name: Download nghttp3
|
||||
id: download-nghttp3
|
||||
run: |
|
||||
# Get latest nghttp3 version
|
||||
base_nghttp3_url="https://mirrors.slackware.com/slackware/slackware64-current/slackware64/n/"
|
||||
latest_nghttp3=$(wget -q -O- "${base_nghttp3_url}" | grep -o "nghttp3-[0-9.]*-x86_64-[0-9]*\.txz" | sort -V | tail -n 1)
|
||||
nghttp3_download_url="${base_nghttp3_url}${latest_nghttp3}"
|
||||
if ! wget -q "${nghttp3_download_url}" -O "${{ github.workspace }}/plugin/archive/${latest_nghttp3}"; then
|
||||
echo "Error: Failed to download nghttp3 package."
|
||||
exit 1
|
||||
fi
|
||||
nghttp3_sha256=$(sha256sum "${{ github.workspace }}/plugin/archive/${latest_nghttp3}" | cut -f 1 -d ' ')
|
||||
echo "NGHTTP3_FILENAME=${latest_nghttp3}" >> $GITHUB_OUTPUT
|
||||
echo "NGHTTP3_SHA256=${nghttp3_sha256}" >> $GITHUB_OUTPUT
|
||||
- name: Build Plugin
|
||||
run: |
|
||||
cd source/dynamix.unraid.net
|
||||
export API_VERSION=${{needs.build-api.outputs.API_VERSION}}
|
||||
export API_MD5=${{needs.build-api.outputs.API_MD5}}
|
||||
export API_SHA256=${{needs.build-api.outputs.API_SHA256}}
|
||||
bash ./pkg_build.sh s
|
||||
export API_VERSION=${{needs.build-test-api.outputs.API_VERSION}}
|
||||
export API_MD5=${{needs.build-test-api.outputs.API_MD5}}
|
||||
export API_SHA256=${{needs.build-test-api.outputs.API_SHA256}}
|
||||
export NGHTTP3_FILENAME=${{ steps.download-nghttp3.outputs.NGHTTP3_FILENAME }}
|
||||
export NGHTTP3_SHA256=${{ steps.download-nghttp3.outputs.NGHTTP3_SHA256 }}
|
||||
if [ -z "${API_VERSION}" ] ||
|
||||
[ -z "${API_MD5}" ] ||
|
||||
[ -z "${API_SHA256}" ] ||
|
||||
[ -z "${NGHTTP3_FILENAME}" ] ||
|
||||
[ -z "${NGHTTP3_SHA256}" ]; then
|
||||
echo "Error: One or more required variables are not set."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
bash ./pkg_build.sh s ${{github.event.pull_request.number}}
|
||||
bash ./pkg_build.sh p
|
||||
- name: Upload binary txz and plg to Github artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
@@ -251,7 +229,69 @@ jobs:
|
||||
retention-days: 5
|
||||
if-no-files-found: error
|
||||
|
||||
release-pull-request:
|
||||
if: |
|
||||
github.event_name == 'pull_request' &&
|
||||
github.event.pull_request.base.ref == 'main'
|
||||
runs-on: ubuntu-latest
|
||||
needs: [build-plugin]
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Make PR Release Folder
|
||||
run: mkdir pr-release/
|
||||
|
||||
- name: Download unraid-api binary tgz
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: unraid-api
|
||||
path: pr-release
|
||||
|
||||
- name: Download plugin binary tgz
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: connect-files
|
||||
|
||||
- name: Write Changelog to Plugin XML
|
||||
run: |
|
||||
# Capture the pull request number and latest commit message
|
||||
pr_number="${{ github.event.pull_request.number }}"
|
||||
commit_message=$(git log -1 --pretty=%B)
|
||||
|
||||
# Clean up newlines, escape special characters, and handle line breaks
|
||||
notes=$(echo -e "Pull Request Build: ${pr_number}\n${commit_message}" | \
|
||||
sed ':a;N;$!ba;s/\n/\\n/g' | \
|
||||
sed -e 's/[&\\/]/\\&/g')
|
||||
|
||||
# Replace <CHANGES> tag content in the file
|
||||
sed -i -z -E "s/<CHANGES>(.*)<\/CHANGES>/<CHANGES>\n${notes}\n<\/CHANGES>/g" "plugins/dynamix.unraid.net.staging.plg"
|
||||
|
||||
- name: Copy other release files to pr-release
|
||||
run: |
|
||||
cp archive/*.txz pr-release/
|
||||
cp plugins/dynamix.unraid.net.staging.plg pr-release/
|
||||
|
||||
- name: Upload to Cloudflare
|
||||
uses: jakejarvis/s3-sync-action@v0.5.1
|
||||
env:
|
||||
AWS_S3_ENDPOINT: ${{ secrets.CF_ENDPOINT }}
|
||||
AWS_S3_BUCKET: ${{ secrets.CF_BUCKET_PREVIEW }}
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.CF_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
AWS_REGION: "auto"
|
||||
SOURCE_DIR: pr-release
|
||||
DEST_DIR: unraid-api/pr/${{ github.event.pull_request.number }}
|
||||
- name: Comment URL
|
||||
uses: thollander/actions-comment-pull-request@v3
|
||||
with:
|
||||
message: |
|
||||
This plugin has been deployed to Cloudflare R2 and is available for testing.
|
||||
Download it at this URL: [https://preview.dl.unraid.net/unraid-api/pr/${{ github.event.pull_request.number }}/dynamix.unraid.net.staging.plg](https://preview.dl.unraid.net/unraid-api/pr/${{ github.event.pull_request.number }}/dynamix.unraid.net.staging.plg)
|
||||
|
||||
release-staging:
|
||||
environment:
|
||||
name: staging
|
||||
# Only release if this is a push to the main branch
|
||||
if: startsWith(github.ref, 'refs/heads/main')
|
||||
runs-on: ubuntu-latest
|
||||
@@ -282,9 +322,9 @@ jobs:
|
||||
removeMarkdown: false
|
||||
filePath: "./api/CHANGELOG.md"
|
||||
|
||||
- name: Run LS in unraid-api folder
|
||||
- name: Copy Files for Staging Release
|
||||
run: |
|
||||
cp archive/dynamix.unraid.net.staging-*.txz staging-release/
|
||||
cp archive/*.txz staging-release/
|
||||
cp plugins/dynamix.unraid.net.staging.plg staging-release/
|
||||
ls -al staging-release
|
||||
|
||||
@@ -305,10 +345,9 @@ jobs:
|
||||
AWS_S3_BUCKET: ${{ secrets.CF_BUCKET_PREVIEW }}
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.CF_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
AWS_REGION: 'auto'
|
||||
AWS_REGION: "auto"
|
||||
SOURCE_DIR: staging-release
|
||||
DEST_DIR: unraid-api
|
||||
|
||||
|
||||
create-draft-release:
|
||||
# Only create new draft if this is a version tag
|
||||
@@ -339,6 +378,6 @@ jobs:
|
||||
files: |
|
||||
unraid-api-*.tgz
|
||||
plugins/dynamix.unraid.net*
|
||||
archive/dynamix.unraid.net*
|
||||
archive/*
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
82
.github/workflows/pull-request-web.yml
vendored
82
.github/workflows/pull-request-web.yml
vendored
@@ -1,82 +0,0 @@
|
||||
name: Pull Request Web
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- 'web/**'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}-web
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
lint-web:
|
||||
defaults:
|
||||
run:
|
||||
working-directory: web
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Create env file
|
||||
run: |
|
||||
touch .env
|
||||
echo VITE_ACCOUNT=${{ vars.VITE_ACCOUNT }} >> .env
|
||||
echo VITE_CONNECT=${{ vars.VITE_CONNECT }} >> .env
|
||||
echo VITE_UNRAID_NET=${{ vars.VITE_UNRAID_NET }} >> .env
|
||||
echo VITE_CALLBACK_KEY=${{ vars.VITE_CALLBACK_KEY }} >> .env
|
||||
cat .env
|
||||
|
||||
- name: Install node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
cache: "npm"
|
||||
cache-dependency-path: "web/package-lock.json"
|
||||
node-version-file: "web/.nvmrc"
|
||||
|
||||
- name: Installing node deps
|
||||
run: npm install
|
||||
|
||||
- name: Lint files
|
||||
run: npm run lint
|
||||
|
||||
build-web:
|
||||
defaults:
|
||||
run:
|
||||
working-directory: web
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: production
|
||||
needs: [lint-web]
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Create env file
|
||||
run: |
|
||||
touch .env
|
||||
echo VITE_ACCOUNT=${{ vars.VITE_ACCOUNT }} >> .env
|
||||
echo VITE_CONNECT=${{ vars.VITE_CONNECT }} >> .env
|
||||
echo VITE_UNRAID_NET=${{ vars.VITE_UNRAID_NET }} >> .env
|
||||
echo VITE_CALLBACK_KEY=${{ vars.VITE_CALLBACK_KEY }} >> .env
|
||||
cat .env
|
||||
|
||||
- name: Install node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
cache: "npm"
|
||||
cache-dependency-path: "web/package-lock.json"
|
||||
node-version-file: "web/.nvmrc"
|
||||
|
||||
- name: Installing node deps
|
||||
run: npm install
|
||||
|
||||
- name: Build
|
||||
run: npm run build
|
||||
|
||||
- name: Upload build to Github artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: unraid-web
|
||||
path: web/.nuxt/nuxt-custom-elements/dist/unraid-components
|
||||
183
.github/workflows/pull-request.yml
vendored
183
.github/workflows/pull-request.yml
vendored
@@ -1,183 +0,0 @@
|
||||
name: Pull Request
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- api/**
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
lint-api:
|
||||
services:
|
||||
registry: # Using a local registry is ~3x faster than exporting the image to docker agent
|
||||
image: registry:2
|
||||
ports:
|
||||
- 5000:5000
|
||||
|
||||
continue-on-error: true
|
||||
defaults:
|
||||
run:
|
||||
working-directory: api
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: true
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
# network=host driver-opt needed to push to local registry
|
||||
driver-opts: network=host
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: api
|
||||
target: builder
|
||||
push: true
|
||||
tags: localhost:5000/unraid-api:builder
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
- name: Lint
|
||||
run: |
|
||||
docker run localhost:5000/unraid-api:builder npm run lint
|
||||
|
||||
test-api:
|
||||
services:
|
||||
registry: # Using a local registry is ~3x faster than exporting the image to docker agent
|
||||
image: registry:2
|
||||
ports:
|
||||
- 5000:5000
|
||||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: api
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: true
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
# network=host driver-opt needed to push to local registry
|
||||
driver-opts: network=host
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: api
|
||||
target: builder
|
||||
push: true
|
||||
tags: localhost:5000/unraid-api:builder
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
- name: Test
|
||||
run: |
|
||||
docker run localhost:5000/unraid-api:builder npm run coverage
|
||||
|
||||
build-api:
|
||||
services:
|
||||
registry: # Using a local registry is ~3x faster than exporting the image to docker agent
|
||||
image: registry:2
|
||||
ports:
|
||||
- 5000:5000
|
||||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: api
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
outputs:
|
||||
API_VERSION: ${{ steps.build-pack-binary.outputs.API_VERSION }}
|
||||
API_MD5: ${{ steps.set-hashes.outputs.API_MD5 }}
|
||||
API_SHA256: ${{ steps.set-hashes.outputs.API_SHA256 }}
|
||||
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: true
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
# network=host driver-opt needed to push to local registry
|
||||
driver-opts: network=host
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: api
|
||||
target: builder
|
||||
push: true
|
||||
tags: localhost:5000/unraid-api:builder
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Run Build
|
||||
run: docker run -e GIT_SHA=$(git rev-parse --short HEAD) -e IS_TAGGED=$(git describe --tags --abbrev=0 --exact-match) -v $(pwd)/deploy:/app/deploy/ localhost:5000/unraid-api:builder npm run build-pkg
|
||||
|
||||
- name: Set Hashes
|
||||
id: set-hashes
|
||||
run: |
|
||||
API_MD5=$(md5sum ${{ github.workspace }}/api/deploy/release/*.tgz | awk '{ print $1 }')
|
||||
API_SHA256=$(sha256sum ${{ github.workspace }}/api/deploy/release/*.tgz | awk '{ print $1 }')
|
||||
echo "::set-output name=API_MD5::${API_MD5}"
|
||||
echo "::set-output name=API_SHA256::${API_SHA256}"
|
||||
|
||||
- name: Upload tgz to Github artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: unraid-api
|
||||
path: ${{ github.workspace }}/api/deploy/release/*.tgz
|
||||
|
||||
- name: Parse Changelog
|
||||
id: changelog
|
||||
uses: ocavue/changelog-parser-action@v1
|
||||
with:
|
||||
removeMarkdown: false
|
||||
filePath: "./api/CHANGELOG.md"
|
||||
|
||||
- name: View release notes
|
||||
run: |
|
||||
escapedNotes=$(sed -e 's/[&\\/]/\\&/g; s/$/\\/' -e '$s/\\$//' <<<"${{steps.changelog.outputs.latestBody}}")
|
||||
echo "${escapedNotes}"
|
||||
build-plugin:
|
||||
defaults:
|
||||
run:
|
||||
working-directory: plugin
|
||||
runs-on: ubuntu-latest
|
||||
needs: [lint-api, test-api, build-api]
|
||||
steps:
|
||||
- name: Set Timezone
|
||||
uses: szenius/set-timezone@v1.2
|
||||
with:
|
||||
timezoneLinux: "America/Los_Angeles"
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
- name: Build Plugin
|
||||
run: |
|
||||
cd source/dynamix.unraid.net
|
||||
export API_VERSION=${{needs.build-api.outputs.API_VERSION}}
|
||||
export API_MD5=${{needs.build-api.outputs.API_MD5}}
|
||||
export API_SHA256=${{needs.build-api.outputs.API_SHA256}}
|
||||
bash ./pkg_build.sh s
|
||||
bash ./pkg_build.sh p
|
||||
- name: Create release notes
|
||||
run: |
|
||||
LAST_RELEASE=$(git tag --list --sort=v:refname | tail -1)
|
||||
echo ${LAST_RELEASE}
|
||||
RELEASE_NOTES=$(git log "$LAST_RELEASE...HEAD" --pretty=format:"- %s [\`%h\`](http://github.com/$GITHUB_REPOSITORY/commit/%H)" --reverse)
|
||||
echo "${RELEASE_NOTES}"
|
||||
# escapedNotes=$(sed -e 's/[&\\/]/\\&/g; s/$/\\/' -e '$s/\\$//' <<<"${RELEASE_NOTES}")
|
||||
# sed -i -z -E "s/<CHANGES>(.*)<\/CHANGES>/<CHANGES>\n${escapedNotes}\n<\/CHANGES>/g" "plugins/dynamix.unraid.net.staging.plg"
|
||||
- name: Upload binary txz and plg to Github artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: connect-files
|
||||
path: |
|
||||
${{ github.workspace }}/plugin/archive/*.txz
|
||||
${{ github.workspace }}/plugin/plugins/*.plg
|
||||
retention-days: 5
|
||||
if-no-files-found: error
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -55,6 +55,9 @@ typings/
|
||||
# OSX
|
||||
.DS_Store
|
||||
|
||||
# Jetbrains Settings Files
|
||||
.idea
|
||||
|
||||
# Temp dir for tests
|
||||
test/__temp__/*
|
||||
|
||||
|
||||
1
.release-please-manifest.json
Normal file
1
.release-please-manifest.json
Normal file
@@ -0,0 +1 @@
|
||||
{"api":"3.10.0","web":"3.10.0"}
|
||||
@@ -1,11 +1,12 @@
|
||||
PATHS_UNRAID_DATA=./dev/data # Where we store plugin data (e.g. permissions.json)
|
||||
PATHS_STATES=./dev/states # Where .ini files live (e.g. vars.ini)
|
||||
PATHS_DYNAMIX_BASE=./dev/dynamix # Dynamix's data directory
|
||||
PATHS_DYNAMIX_CONFIG_DEFAULT=./dev/dynamix/default.cfg # Dynamix's default config file, which ships with unraid
|
||||
PATHS_DYNAMIX_CONFIG=./dev/dynamix/dynamix.cfg # Dynamix's config file
|
||||
PATHS_MY_SERVERS_CONFIG=./dev/Unraid.net/myservers.cfg # My servers config file
|
||||
PATHS_MY_SERVERS_FB=./dev/Unraid.net/fb_keepalive # My servers flashbackup timekeeper file
|
||||
PATHS_KEYFILE_BASE=./dev/Unraid.net # Keyfile location
|
||||
PATHS_MACHINE_ID=./dev/data/machine-id
|
||||
|
||||
ENVIRONMENT="development"
|
||||
NODE_ENV="development"
|
||||
PORT="3001"
|
||||
@@ -13,6 +14,8 @@ PLAYGROUND=true
|
||||
INTROSPECTION=true
|
||||
MOTHERSHIP_GRAPHQL_LINK="http://authenticator:3000/graphql"
|
||||
NODE_TLS_REJECT_UNAUTHORIZED=0
|
||||
BYPASS_PERMISSION_CHECKS=true
|
||||
BYPASS_CORS_CHECKS=false
|
||||
BYPASS_PERMISSION_CHECKS=false
|
||||
BYPASS_CORS_CHECKS=true
|
||||
CHOKIDAR_USEPOLLING=true
|
||||
LOG_TRANSPORT=console
|
||||
LOG_LEVEL=trace
|
||||
@@ -1,47 +0,0 @@
|
||||
/** @type {import('eslint').Linter.Config} */
|
||||
module.exports = {
|
||||
root: true,
|
||||
plugins: [
|
||||
'@typescript-eslint/eslint-plugin',
|
||||
'unused-imports',
|
||||
'eslint-plugin-unicorn',
|
||||
],
|
||||
ignorePatterns: ['src/graphql/generated/**/*.ts', '*.test.ts', 'tsup.config.ts', 'vite.config.ts'],
|
||||
parser: '@typescript-eslint/parser',
|
||||
rules: {
|
||||
'@typescript-eslint/no-redundant-type-constituents': 'off',
|
||||
'@typescript-eslint/no-unsafe-call': 'off',
|
||||
'@typescript-eslint/naming-convention': 'off',
|
||||
'@typescript-eslint/no-unsafe-assignment': 'off',
|
||||
'@typescript-eslint/no-unsafe-return': 'off',
|
||||
'@typescript-eslint/ban-types': 'off',
|
||||
'@typescript-eslint/no-explicit-any': 'off',
|
||||
'@typescript-eslint/consistent-type-imports': [
|
||||
'warn',
|
||||
{ fixStyle: 'inline-type-imports' },
|
||||
],
|
||||
'unicorn/numeric-separators-style': [
|
||||
'error',
|
||||
{ number: { minimumDigits: 0, groupLength: 3 } },
|
||||
],
|
||||
'import/no-cycle': 'off', // Change this to "error" to find circular imports
|
||||
'@typescript-eslint/no-use-before-define': ['error'],
|
||||
'no-multiple-empty-lines': ['error', { max: 1, maxBOF: 0, maxEOF: 1 }],
|
||||
},
|
||||
overrides: [
|
||||
{
|
||||
files: ['*.ts'],
|
||||
extends: [
|
||||
'eslint:recommended',
|
||||
'plugin:@typescript-eslint/recommended',
|
||||
],
|
||||
parserOptions: {
|
||||
project: true,
|
||||
tsconfigRootDir: __dirname,
|
||||
},
|
||||
rules: {
|
||||
'@typescript-eslint/no-explicit-any': 'off',
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
21
api/.eslintrc.ts
Normal file
21
api/.eslintrc.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
|
||||
import type { Linter } from 'eslint';
|
||||
import eslint from '@eslint/js';
|
||||
import tseslint from 'typescript-eslint';
|
||||
|
||||
export default tseslint.config(eslint.configs.recommended, ...tseslint.configs.recommended, {
|
||||
rules: {
|
||||
'@typescript-eslint/no-redundant-type-constituents': 'off',
|
||||
'@typescript-eslint/no-unsafe-call': 'off',
|
||||
'@typescript-eslint/naming-convention': 'off',
|
||||
'@typescript-eslint/no-unsafe-assignment': 'off',
|
||||
'@typescript-eslint/no-unsafe-return': 'off',
|
||||
'@typescript-eslint/ban-types': 'off',
|
||||
'@typescript-eslint/no-explicit-any': 'off',
|
||||
'@typescript-eslint/no-empty-object-type': 'off',
|
||||
'no-use-before-define': ['off'],
|
||||
'no-multiple-empty-lines': ['error', { max: 1, maxBOF: 0, maxEOF: 1 }],
|
||||
'@typescript-eslint/no-unused-vars': 'off',
|
||||
'@typescript-eslint/no-unused-expressions': 'off',
|
||||
},
|
||||
});
|
||||
82
api/.gitignore
vendored
Normal file
82
api/.gitignore
vendored
Normal file
@@ -0,0 +1,82 @@
|
||||
# Logs
|
||||
./logs
|
||||
*.log
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
|
||||
# Directory for instrumented libs generated by jscoverage/JSCover
|
||||
lib-cov
|
||||
|
||||
# Coverage directory used by tools like istanbul
|
||||
coverage
|
||||
coverage-ts
|
||||
|
||||
# nyc test coverage
|
||||
.nyc_output
|
||||
|
||||
# node-waf configuration
|
||||
.lock-wscript
|
||||
|
||||
# Compiled binary addons (https://nodejs.org/api/addons.html)
|
||||
build/Release
|
||||
|
||||
# Dependency directories
|
||||
node_modules/
|
||||
jspm_packages/
|
||||
|
||||
# TypeScript v1 declaration files
|
||||
typings/
|
||||
|
||||
# Optional npm cache directory
|
||||
.npm
|
||||
|
||||
# Optional eslint cache
|
||||
.eslintcache
|
||||
|
||||
# Optional REPL history
|
||||
.node_repl_history
|
||||
|
||||
# Output of 'npm pack'
|
||||
*.tgz
|
||||
|
||||
# Yarn Integrity file
|
||||
.yarn-integrity
|
||||
|
||||
# dotenv environment variables file
|
||||
.env
|
||||
|
||||
# next.js build output
|
||||
.next
|
||||
|
||||
# Visual Studio Code workspace
|
||||
.vscode/*
|
||||
!.vscode/extensions.json
|
||||
|
||||
# OSX
|
||||
.DS_Store
|
||||
|
||||
# Temp dir for tests
|
||||
test/__temp__/*
|
||||
|
||||
# Built files
|
||||
dist
|
||||
|
||||
# Typescript
|
||||
typescript
|
||||
|
||||
# Ultra runner
|
||||
.ultra.cache.json
|
||||
|
||||
# Github actions
|
||||
RELEASE_NOTES.md
|
||||
|
||||
# Docker Deploy Folder
|
||||
deploy/*
|
||||
!deploy/.gitkeep
|
||||
|
||||
# pkg cache
|
||||
.pkg-cache
|
||||
|
||||
# IDE Settings Files
|
||||
.idea
|
||||
@@ -1 +1 @@
|
||||
18.19.1
|
||||
v20
|
||||
38
api/.prettierrc.cjs
Normal file
38
api/.prettierrc.cjs
Normal file
@@ -0,0 +1,38 @@
|
||||
/**
|
||||
* @see https://prettier.io/docs/en/configuration.html
|
||||
* @type {import("prettier").Config}
|
||||
*/
|
||||
module.exports = {
|
||||
trailingComma: 'es5',
|
||||
tabWidth: 4,
|
||||
semi: true,
|
||||
singleQuote: true,
|
||||
printWidth: 105,
|
||||
plugins: ['@ianvs/prettier-plugin-sort-imports'],
|
||||
// decorators-legacy lets the import sorter transform files with decorators
|
||||
importOrderParserPlugins: ['typescript', 'decorators-legacy'],
|
||||
importOrder: [
|
||||
/**----------------------
|
||||
* Nest.js & node.js imports
|
||||
*------------------------**/
|
||||
'<TYPES>^@nestjs(/.*)?$',
|
||||
'^@nestjs(/.*)?$', // matches imports starting with @nestjs
|
||||
'<TYPES>^(node:)',
|
||||
'<BUILTIN_MODULES>', // Node.js built-in modules
|
||||
'',
|
||||
/**----------------------
|
||||
* Third party packages
|
||||
*------------------------**/
|
||||
'<TYPES>',
|
||||
'<THIRD_PARTY_MODULES>', // Imports not matched by other special words or groups.
|
||||
'',
|
||||
/**----------------------
|
||||
* Application Code
|
||||
*------------------------**/
|
||||
'<TYPES>^@app(/.*)?$', // matches type imports starting with @app
|
||||
'^@app(/.*)?$',
|
||||
'',
|
||||
'<TYPES>^[.]',
|
||||
'^[.]', // relative imports
|
||||
],
|
||||
};
|
||||
179
api/CHANGELOG.md
179
api/CHANGELOG.md
@@ -2,6 +2,183 @@
|
||||
|
||||
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
|
||||
|
||||
## [3.11.0](https://github.com/unraid/api/compare/v3.10.1...v3.11.0) (2024-09-11)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* reduce how often rc.flashbackup checks for changes ([793d368](https://github.com/unraid/api/commit/793d3681404018e0ae933df0ad111809220ad138))
|
||||
* send api_version to flash/activate endpoint ([d8ec20e](https://github.com/unraid/api/commit/d8ec20ea6aa35aa241abd8424c4d884bcbb8f590))
|
||||
* update ProvisionCert.php to clean hosts file when it runs ([fbe20c9](https://github.com/unraid/api/commit/fbe20c97b327849c15a4b34f5f53476edaefbeb6))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* remove local flash backup ratelimit file on uninstall/update ([abf207b](https://github.com/unraid/api/commit/abf207b077861798c53739b1965207f87d5633b3))
|
||||
|
||||
### [3.10.1](https://github.com/unraid/api/compare/v3.10.0...v3.10.1) (2024-09-03)
|
||||
|
||||
## [3.10.0](https://github.com/unraid/api/compare/v3.9.0...v3.10.0) (2024-09-03)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add a timestamp to flash backup ([#877](https://github.com/unraid/api/issues/877)) ([b868fd4](https://github.com/unraid/api/commit/b868fd46c3886b2182245a61f20be6df65e46abe))
|
||||
* add environment to docker-compose ([2ee4683](https://github.com/unraid/api/commit/2ee46839095e3b8ee287cfe10f29ae9a39dcff68))
|
||||
* add global agent ([#897](https://github.com/unraid/api/issues/897)) ([8b0dc69](https://github.com/unraid/api/commit/8b0dc69f65bd3e280a21c50aab221334f7341b1c))
|
||||
* add logrotate to cron in nestjs ([#839](https://github.com/unraid/api/issues/839)) ([5c91524](https://github.com/unraid/api/commit/5c91524d849147c0ac7925f3a2f1cce67ffe75de))
|
||||
* add new staging url for connect website ([#841](https://github.com/unraid/api/issues/841)) ([4cfc07b](https://github.com/unraid/api/commit/4cfc07b6763dbb79b68cf01f7eaf7cf33370d4db))
|
||||
* add support for expiration in var.ini ([#833](https://github.com/unraid/api/issues/833)) ([0474c2e](https://github.com/unraid/api/commit/0474c2e14fa462d2e1ec6d9a7f974660385d073e))
|
||||
* always show DRA even if disabled ([ab708c0](https://github.com/unraid/api/commit/ab708c0df634e21bf81595412d7de0be3ff7c392))
|
||||
* close log on exit ([d6ede86](https://github.com/unraid/api/commit/d6ede86eca6301342cdf35bf1f9365896b5e5009))
|
||||
* create stable hash based on apikey rather than hostname ([ecf5554](https://github.com/unraid/api/commit/ecf5554e304cc7dee78cb1f206ef4e80222c3e64))
|
||||
* disable all legacy dashboard and network logic ([6784f4b](https://github.com/unraid/api/commit/6784f4b6e1a12b2f30bfa9ab4fe6310994bd18ae))
|
||||
* dynamic remote access using remote queries ([f7fc0c4](https://github.com/unraid/api/commit/f7fc0c431561978054d2ff37d1aa644865e846ec))
|
||||
* extraOrigins public, remove origin listener ([91f96ba](https://github.com/unraid/api/commit/91f96ba818773d6e71dde1ff52a4c8ec21ba6b5d))
|
||||
* fix codegen ([d0bf5bb](https://github.com/unraid/api/commit/d0bf5bb8197b11f7a250ca5392890184a1dbeff7))
|
||||
* fix exit hook and cleanup docker scripts ([#758](https://github.com/unraid/api/issues/758)) ([a9ff73e](https://github.com/unraid/api/commit/a9ff73e0a04c67e9ec9d5551cf0b1f124be6f381))
|
||||
* fix logging format on start and stop ([c6720c3](https://github.com/unraid/api/commit/c6720c331df055480d2d65b37290f4978fe429da))
|
||||
* local start command ([99b6007](https://github.com/unraid/api/commit/99b6007ba30353084a8bea54cc0e782fcc1bfea4))
|
||||
* log config recreation reason ([f36c72f](https://github.com/unraid/api/commit/f36c72f5ad44b7e41d1726fa181dc2b9f594c72c))
|
||||
* move dynamic remote access to be fully api controlled ([206eb6b](https://github.com/unraid/api/commit/206eb6b74aa83047237e5f6c94c46b08c6507168))
|
||||
* move FQDN urls to a generic parser ([#899](https://github.com/unraid/api/issues/899)) ([246595e](https://github.com/unraid/api/commit/246595ee7acd8370906a759cbe618def4f52c173))
|
||||
* nestjs initial query implementation ([#748](https://github.com/unraid/api/issues/748)) ([075d7f2](https://github.com/unraid/api/commit/075d7f25785bf686779b7fee1d5ea39f09ff3ea8))
|
||||
* new key types in API ([e42f9dc](https://github.com/unraid/api/commit/e42f9dc95be03e8389aac443f2147c07a316d48d))
|
||||
* regTy swapped ([564b25c](https://github.com/unraid/api/commit/564b25cf5ce0a62d40f8d63d44c81e9c8560e0be))
|
||||
* remove dashboard resolver completely in favor of direct field resolvers ([1cd1ee5](https://github.com/unraid/api/commit/1cd1ee534825ccf775208c438ae0bd777bbe4d39))
|
||||
* remove dashboard types ([2f0167d](https://github.com/unraid/api/commit/2f0167dc89835bcf8aa946425c5c6683221fd763))
|
||||
* run codegen and update build script ([07512ad](https://github.com/unraid/api/commit/07512adc13ee0d819db45ff6c5c5f58a0ba31141))
|
||||
* settings through the API ([#867](https://github.com/unraid/api/issues/867)) ([e73624b](https://github.com/unraid/api/commit/e73624be6be8bc2c70d898b8601a88cc8d20a3e4))
|
||||
* swap to docker compose from docker-compose ([ec16a6a](https://github.com/unraid/api/commit/ec16a6aab1a2d5c836387da438fbeade07d23425))
|
||||
* swap to fragement usage on webcomponent ([42733ab](https://github.com/unraid/api/commit/42733abf6e443516ff715569333422ce80d3b1d2))
|
||||
* update tests and snapshots ([c39aa17](https://github.com/unraid/api/commit/c39aa17e4302ed56b3097ab3244d840f11eb686b))
|
||||
* upgrade a ton of dependencies ([#842](https://github.com/unraid/api/issues/842)) ([94c1746](https://github.com/unraid/api/commit/94c174620c2347a3cf3d100404635f99a5b47287))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* add serverName / description to dashboard payload ([9677aff](https://github.com/unraid/api/commit/9677aff1cd0942f36a2845f3f105601c494efd9e))
|
||||
* allow failure for log deletion ([eff3142](https://github.com/unraid/api/commit/eff31423927644be436a831126678719c2eb0621))
|
||||
* allowed origins check not working without spaces ([#838](https://github.com/unraid/api/issues/838)) ([b998b38](https://github.com/unraid/api/commit/b998b38355fab77ecc2f62bc64896766218db3d4))
|
||||
* **api:** readme discord url ([ffd5c6a](https://github.com/unraid/api/commit/ffd5c6afb64956e76df22c77104a21bc22798008))
|
||||
* build docker command updated to use dc.sh script ([0b40886](https://github.com/unraid/api/commit/0b40886e84f27a94dbf67ef4ca0cd8539ef3913e))
|
||||
* codegen on web run ([e2e67c2](https://github.com/unraid/api/commit/e2e67c21067a138d963f5f10760b84cf6a533542))
|
||||
* **deps:** update dependency @apollo/client to v3.9.5 ([#785](https://github.com/unraid/api/issues/785)) ([75b98bc](https://github.com/unraid/api/commit/75b98bc1cbca5b66ae72f52a0b6f5f58230a2473))
|
||||
* **deps:** update dependency graphql to v16.8.1 ([bff1b19](https://github.com/unraid/api/commit/bff1b19706bee1e3103e3a0a1d2fceb3154f9bba))
|
||||
* **deps:** update dependency graphql-ws to v5.15.0 ([#790](https://github.com/unraid/api/issues/790)) ([4773b13](https://github.com/unraid/api/commit/4773b132167d740d4c996efe22e0f1b99576fb9b))
|
||||
* **deps:** update dependency ws to v8.16.0 ([#815](https://github.com/unraid/api/issues/815)) ([212020e](https://github.com/unraid/api/commit/212020e78d4de0576137058a3374837b4a43e02d))
|
||||
* **deps:** update dependency wtfnode to v0.9.3 ([#901](https://github.com/unraid/api/issues/901)) ([a88482b](https://github.com/unraid/api/commit/a88482bfcbf134f55330f8728bc5c7f67c521773))
|
||||
* **deps:** update graphql-tools monorepo ([3447eb0](https://github.com/unraid/api/commit/3447eb047a1dcd575b88a96bbcef9946aca366a1))
|
||||
* **deps:** update graphql-tools monorepo (major) ([#693](https://github.com/unraid/api/issues/693)) ([3447eb0](https://github.com/unraid/api/commit/3447eb047a1dcd575b88a96bbcef9946aca366a1))
|
||||
* **deps:** update nest monorepo ([#816](https://github.com/unraid/api/issues/816)) ([4af3699](https://github.com/unraid/api/commit/4af36991b8b376f816ed51fd503a66e99675a3e7))
|
||||
* excessive logging ([89cb254](https://github.com/unraid/api/commit/89cb2544ed0e0edd33b59f15d487487e22c0ae32))
|
||||
* exit with process.exit not process.exitcode ([dcb6def](https://github.com/unraid/api/commit/dcb6def1cf3365dca819feed101160c8ad0125dc))
|
||||
* lint ([919873d](https://github.com/unraid/api/commit/919873d9edee304d99036a4a810db3789c734fbf))
|
||||
* local container startup commands cleaned up ([6c0ccb2](https://github.com/unraid/api/commit/6c0ccb2b24f98282be4db2e0b2e6362f4a187def))
|
||||
* logrotate not working due to invalid ownership of unraid-api folder ([ec0581a](https://github.com/unraid/api/commit/ec0581abf58a217f698d52d5337f2b312e5a645b))
|
||||
* optional check on api.version to allow fallback to save value ([0ac4455](https://github.com/unraid/api/commit/0ac4455f78407eca7aa1d6ee360830067a1c5c3e))
|
||||
* permission for dashboard payload ([704a530](https://github.com/unraid/api/commit/704a530653dac415766bded5e96f6060f931e591))
|
||||
* rearrange exit hook to try to fix closing ([843d3f4](https://github.com/unraid/api/commit/843d3f41162c5dbcfd7803912b1879d7a182231a))
|
||||
* revert myservers.cfg to fix test ([a7705be](https://github.com/unraid/api/commit/a7705beb0a5b32660367ad8de9b46b06f7a3bec7))
|
||||
* run hourly ([0425794](https://github.com/unraid/api/commit/0425794356a01262222e7dff2645d3629e00d0f6))
|
||||
* unused import ([065fe57](https://github.com/unraid/api/commit/065fe575f578a74d593805c3121dd7fbdfc3e5ae))
|
||||
* update snapshots ([c8a0a8e](https://github.com/unraid/api/commit/c8a0a8ec007abc0372464c7e2b44bd47b6babd94))
|
||||
|
||||
## [3.9.0](https://github.com/unraid/api/compare/api-v3.8.1...api-v3.9.0) (2024-09-03)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add a timestamp to flash backup ([#877](https://github.com/unraid/api/issues/877)) ([b868fd4](https://github.com/unraid/api/commit/b868fd46c3886b2182245a61f20be6df65e46abe))
|
||||
* add environment to docker-compose ([2ee4683](https://github.com/unraid/api/commit/2ee46839095e3b8ee287cfe10f29ae9a39dcff68))
|
||||
* add global agent ([#897](https://github.com/unraid/api/issues/897)) ([8b0dc69](https://github.com/unraid/api/commit/8b0dc69f65bd3e280a21c50aab221334f7341b1c))
|
||||
* add logrotate to cron in nestjs ([#839](https://github.com/unraid/api/issues/839)) ([5c91524](https://github.com/unraid/api/commit/5c91524d849147c0ac7925f3a2f1cce67ffe75de))
|
||||
* add new staging url for connect website ([#841](https://github.com/unraid/api/issues/841)) ([4cfc07b](https://github.com/unraid/api/commit/4cfc07b6763dbb79b68cf01f7eaf7cf33370d4db))
|
||||
* add support for expiration in var.ini ([#833](https://github.com/unraid/api/issues/833)) ([0474c2e](https://github.com/unraid/api/commit/0474c2e14fa462d2e1ec6d9a7f974660385d073e))
|
||||
* always show DRA even if disabled ([ab708c0](https://github.com/unraid/api/commit/ab708c0df634e21bf81595412d7de0be3ff7c392))
|
||||
* close log on exit ([d6ede86](https://github.com/unraid/api/commit/d6ede86eca6301342cdf35bf1f9365896b5e5009))
|
||||
* create stable hash based on apikey rather than hostname ([ecf5554](https://github.com/unraid/api/commit/ecf5554e304cc7dee78cb1f206ef4e80222c3e64))
|
||||
* disable all legacy dashboard and network logic ([6784f4b](https://github.com/unraid/api/commit/6784f4b6e1a12b2f30bfa9ab4fe6310994bd18ae))
|
||||
* dynamic remote access using remote queries ([f7fc0c4](https://github.com/unraid/api/commit/f7fc0c431561978054d2ff37d1aa644865e846ec))
|
||||
* extraOrigins public, remove origin listener ([91f96ba](https://github.com/unraid/api/commit/91f96ba818773d6e71dde1ff52a4c8ec21ba6b5d))
|
||||
* fix codegen ([d0bf5bb](https://github.com/unraid/api/commit/d0bf5bb8197b11f7a250ca5392890184a1dbeff7))
|
||||
* fix exit hook and cleanup docker scripts ([#758](https://github.com/unraid/api/issues/758)) ([a9ff73e](https://github.com/unraid/api/commit/a9ff73e0a04c67e9ec9d5551cf0b1f124be6f381))
|
||||
* fix logging format on start and stop ([c6720c3](https://github.com/unraid/api/commit/c6720c331df055480d2d65b37290f4978fe429da))
|
||||
* local start command ([99b6007](https://github.com/unraid/api/commit/99b6007ba30353084a8bea54cc0e782fcc1bfea4))
|
||||
* log config recreation reason ([f36c72f](https://github.com/unraid/api/commit/f36c72f5ad44b7e41d1726fa181dc2b9f594c72c))
|
||||
* move dynamic remote access to be fully api controlled ([206eb6b](https://github.com/unraid/api/commit/206eb6b74aa83047237e5f6c94c46b08c6507168))
|
||||
* move FQDN urls to a generic parser ([#899](https://github.com/unraid/api/issues/899)) ([246595e](https://github.com/unraid/api/commit/246595ee7acd8370906a759cbe618def4f52c173))
|
||||
* nestjs initial query implementation ([#748](https://github.com/unraid/api/issues/748)) ([075d7f2](https://github.com/unraid/api/commit/075d7f25785bf686779b7fee1d5ea39f09ff3ea8))
|
||||
* new key types in API ([e42f9dc](https://github.com/unraid/api/commit/e42f9dc95be03e8389aac443f2147c07a316d48d))
|
||||
* regTy swapped ([564b25c](https://github.com/unraid/api/commit/564b25cf5ce0a62d40f8d63d44c81e9c8560e0be))
|
||||
* remove dashboard resolver completely in favor of direct field resolvers ([1cd1ee5](https://github.com/unraid/api/commit/1cd1ee534825ccf775208c438ae0bd777bbe4d39))
|
||||
* remove dashboard types ([2f0167d](https://github.com/unraid/api/commit/2f0167dc89835bcf8aa946425c5c6683221fd763))
|
||||
* run codegen and update build script ([07512ad](https://github.com/unraid/api/commit/07512adc13ee0d819db45ff6c5c5f58a0ba31141))
|
||||
* settings through the API ([#867](https://github.com/unraid/api/issues/867)) ([e73624b](https://github.com/unraid/api/commit/e73624be6be8bc2c70d898b8601a88cc8d20a3e4))
|
||||
* swap to docker compose from docker-compose ([ec16a6a](https://github.com/unraid/api/commit/ec16a6aab1a2d5c836387da438fbeade07d23425))
|
||||
* swap to fragement usage on webcomponent ([42733ab](https://github.com/unraid/api/commit/42733abf6e443516ff715569333422ce80d3b1d2))
|
||||
* update tests and snapshots ([c39aa17](https://github.com/unraid/api/commit/c39aa17e4302ed56b3097ab3244d840f11eb686b))
|
||||
* upgrade a ton of dependencies ([#842](https://github.com/unraid/api/issues/842)) ([94c1746](https://github.com/unraid/api/commit/94c174620c2347a3cf3d100404635f99a5b47287))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* add serverName / description to dashboard payload ([9677aff](https://github.com/unraid/api/commit/9677aff1cd0942f36a2845f3f105601c494efd9e))
|
||||
* allow failure for log deletion ([eff3142](https://github.com/unraid/api/commit/eff31423927644be436a831126678719c2eb0621))
|
||||
* allowed origins check not working without spaces ([#838](https://github.com/unraid/api/issues/838)) ([b998b38](https://github.com/unraid/api/commit/b998b38355fab77ecc2f62bc64896766218db3d4))
|
||||
* **api:** readme discord url ([ffd5c6a](https://github.com/unraid/api/commit/ffd5c6afb64956e76df22c77104a21bc22798008))
|
||||
* build docker command updated to use dc.sh script ([0b40886](https://github.com/unraid/api/commit/0b40886e84f27a94dbf67ef4ca0cd8539ef3913e))
|
||||
* codegen on web run ([e2e67c2](https://github.com/unraid/api/commit/e2e67c21067a138d963f5f10760b84cf6a533542))
|
||||
* **deps:** update dependency @apollo/client to v3.9.5 ([#785](https://github.com/unraid/api/issues/785)) ([75b98bc](https://github.com/unraid/api/commit/75b98bc1cbca5b66ae72f52a0b6f5f58230a2473))
|
||||
* **deps:** update dependency graphql to v16.8.1 ([bff1b19](https://github.com/unraid/api/commit/bff1b19706bee1e3103e3a0a1d2fceb3154f9bba))
|
||||
* **deps:** update dependency graphql-ws to v5.15.0 ([#790](https://github.com/unraid/api/issues/790)) ([4773b13](https://github.com/unraid/api/commit/4773b132167d740d4c996efe22e0f1b99576fb9b))
|
||||
* **deps:** update dependency ws to v8.16.0 ([#815](https://github.com/unraid/api/issues/815)) ([212020e](https://github.com/unraid/api/commit/212020e78d4de0576137058a3374837b4a43e02d))
|
||||
* **deps:** update dependency wtfnode to v0.9.3 ([#901](https://github.com/unraid/api/issues/901)) ([a88482b](https://github.com/unraid/api/commit/a88482bfcbf134f55330f8728bc5c7f67c521773))
|
||||
* **deps:** update graphql-tools monorepo ([3447eb0](https://github.com/unraid/api/commit/3447eb047a1dcd575b88a96bbcef9946aca366a1))
|
||||
* **deps:** update graphql-tools monorepo (major) ([#693](https://github.com/unraid/api/issues/693)) ([3447eb0](https://github.com/unraid/api/commit/3447eb047a1dcd575b88a96bbcef9946aca366a1))
|
||||
* **deps:** update nest monorepo ([#816](https://github.com/unraid/api/issues/816)) ([4af3699](https://github.com/unraid/api/commit/4af36991b8b376f816ed51fd503a66e99675a3e7))
|
||||
* excessive logging ([89cb254](https://github.com/unraid/api/commit/89cb2544ed0e0edd33b59f15d487487e22c0ae32))
|
||||
* exit with process.exit not process.exitcode ([dcb6def](https://github.com/unraid/api/commit/dcb6def1cf3365dca819feed101160c8ad0125dc))
|
||||
* lint ([919873d](https://github.com/unraid/api/commit/919873d9edee304d99036a4a810db3789c734fbf))
|
||||
* local container startup commands cleaned up ([6c0ccb2](https://github.com/unraid/api/commit/6c0ccb2b24f98282be4db2e0b2e6362f4a187def))
|
||||
* logrotate not working due to invalid ownership of unraid-api folder ([ec0581a](https://github.com/unraid/api/commit/ec0581abf58a217f698d52d5337f2b312e5a645b))
|
||||
* optional check on api.version to allow fallback to save value ([0ac4455](https://github.com/unraid/api/commit/0ac4455f78407eca7aa1d6ee360830067a1c5c3e))
|
||||
* permission for dashboard payload ([704a530](https://github.com/unraid/api/commit/704a530653dac415766bded5e96f6060f931e591))
|
||||
* rearrange exit hook to try to fix closing ([843d3f4](https://github.com/unraid/api/commit/843d3f41162c5dbcfd7803912b1879d7a182231a))
|
||||
* revert myservers.cfg to fix test ([a7705be](https://github.com/unraid/api/commit/a7705beb0a5b32660367ad8de9b46b06f7a3bec7))
|
||||
* run hourly ([0425794](https://github.com/unraid/api/commit/0425794356a01262222e7dff2645d3629e00d0f6))
|
||||
* unused import ([065fe57](https://github.com/unraid/api/commit/065fe575f578a74d593805c3121dd7fbdfc3e5ae))
|
||||
* update snapshots ([c8a0a8e](https://github.com/unraid/api/commit/c8a0a8ec007abc0372464c7e2b44bd47b6babd94))
|
||||
|
||||
### [3.8.1](https://github.com/unraid/api/compare/v3.8.0...v3.8.1) (2024-08-13)
|
||||
|
||||
## [3.8.0](https://github.com/unraid/api/compare/v3.7.1...v3.8.0) (2024-08-13)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* always force push ([662f3ce](https://github.com/unraid/api/commit/662f3ce440593e609c64364726f7da16dda0972b))
|
||||
* don't allow flash backup repos larger than 500MB ([#890](https://github.com/unraid/api/issues/890)) ([30a32f5](https://github.com/unraid/api/commit/30a32f5fe684bb32c084c4125aade5e63ffd788b))
|
||||
* downgradeOs callback for non stable osCurrentBranch ([17c4489](https://github.com/unraid/api/commit/17c4489e97bda504ca45e360591655ded166c355))
|
||||
* settings through the API ([#867](https://github.com/unraid/api/issues/867)) ([e73624b](https://github.com/unraid/api/commit/e73624be6be8bc2c70d898b8601a88cc8d20a3e4))
|
||||
* swap to docker compose from docker-compose ([ec16a6a](https://github.com/unraid/api/commit/ec16a6aab1a2d5c836387da438fbeade07d23425))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* apolloClient types ([f14c767](https://github.com/unraid/api/commit/f14c7673735b92aa167e9e8dcb14a045bcfea994))
|
||||
* **deps:** update dependency @vue/apollo-composable to v4.0.2 ([#787](https://github.com/unraid/api/issues/787)) ([edfc846](https://github.com/unraid/api/commit/edfc8464b0e0c2f38003ae8420e81532fd18351f))
|
||||
* formattedRegTm type ([748906e](https://github.com/unraid/api/commit/748906e15d30c6162e2f08f28724c9104c81d123))
|
||||
* i18n t prop type ([96d519f](https://github.com/unraid/api/commit/96d519f3e6b96ea7c4dc60616522216de20ee140))
|
||||
* lint error for web components ([bc27b20](https://github.com/unraid/api/commit/bc27b20524934cf896efb84a131cd270431c508c))
|
||||
* lint issues ([853dc19](https://github.com/unraid/api/commit/853dc195b13fff29160afb44f9ff11d4dd6a3232))
|
||||
* swap undefined to null ([ebba976](https://github.com/unraid/api/commit/ebba9769873a6536e3fce65978e6475d93280560))
|
||||
* tailwind config types ([0f77e55](https://github.com/unraid/api/commit/0f77e5596db3356b5dc05129b3ce215a8809e1dc))
|
||||
* ts-expect-error unneeded ([ee4d4e9](https://github.com/unraid/api/commit/ee4d4e9f12b4488ff39445bc72c1b83a9d93e993))
|
||||
* type check ([606aad7](https://github.com/unraid/api/commit/606aad703d91b72a14e15da3100dfa355052ed58))
|
||||
* type errors round 1 ([977d5da](https://github.com/unraid/api/commit/977d5daf04012f16e7b6602167338f0bc363735a))
|
||||
* update status button alignment ([4f2deaf](https://github.com/unraid/api/commit/4f2deaf70e5caa9f29fc5b2974b278f80b7b3a8a))
|
||||
|
||||
### [3.7.1](https://github.com/unraid/api/compare/v3.7.0...v3.7.1) (2024-05-15)
|
||||
|
||||
|
||||
@@ -3444,4 +3621,4 @@ All notable changes to this project will be documented in this file. See [standa
|
||||
|
||||
# Changelog
|
||||
|
||||
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
|
||||
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
###########################################################
|
||||
# Development/Build Image
|
||||
###########################################################
|
||||
FROM node:18.19.1-bookworm-slim As development
|
||||
FROM node:20-bookworm-slim AS development
|
||||
|
||||
# Install build tools and dependencies
|
||||
RUN apt-get update -y && apt-get install -y \
|
||||
@@ -20,17 +20,10 @@ WORKDIR /app
|
||||
# Set app env
|
||||
ENV NODE_ENV=development
|
||||
|
||||
# Setup cache for pkg
|
||||
ENV PKG_CACHE_PATH /app/.pkg-cache
|
||||
RUN mkdir -p ${PKG_CACHE_PATH}
|
||||
|
||||
COPY tsconfig.json tsup.config.ts .eslintrc.cjs .npmrc .env.production .env.staging ./
|
||||
COPY tsconfig.json .eslintrc.ts .npmrc .env.production .env.staging ./
|
||||
|
||||
COPY package.json package-lock.json ./
|
||||
|
||||
# Install pkg
|
||||
RUN npm i -g pkg zx
|
||||
|
||||
# Install deps
|
||||
RUN npm i
|
||||
|
||||
@@ -42,6 +35,8 @@ EXPOSE 4000
|
||||
|
||||
FROM development AS builder
|
||||
|
||||
ENV NODE_ENV=production
|
||||
|
||||
COPY . .
|
||||
|
||||
CMD ["npm", "run", "build-pkg"]
|
||||
CMD ["npm", "run", "build-and-pack"]
|
||||
@@ -2,7 +2,11 @@
|
||||
|
||||
## Installation
|
||||
|
||||
Install the production plugin via the apps tab (search for "my servers") on Unraid 6.9.2 or later.
|
||||
Install the production plugin via the apps tab (search for "Unraid Connect")
|
||||
|
||||
Manual install can be done with the following routes:
|
||||
[production](https://stable.dl.unraid.net/unraid-api/dynamix.unraid.net.plg)
|
||||
[staging](https://preview.dl.unraid.net/unraid-api/dynamix.unraid.net.staging.plg)
|
||||
|
||||
## CLI
|
||||
|
||||
@@ -31,7 +35,7 @@ Options:
|
||||
--environment production/staging/development Set the working environment.
|
||||
--log-level ALL/TRACE/DEBUG/INFO/WARN/ERROR/FATAL/MARK/OFF Set the log level.
|
||||
|
||||
Copyright © 2022 Lime Technology, Inc.
|
||||
Copyright © 2024 Lime Technology, Inc.
|
||||
|
||||
```
|
||||
|
||||
@@ -55,4 +59,4 @@ unraid-api report -vv
|
||||
If you found this file you're likely a developer. If you'd like to know more about the API and when it's available please join [our discord](https://discord.unraid.net/).
|
||||
|
||||
## License
|
||||
Copyright 2019-2022 Lime Technology Inc. All rights reserved.
|
||||
Copyright Lime Technology Inc. All rights reserved.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
[api]
|
||||
version="3.4.0"
|
||||
version="3.11.0"
|
||||
extraOrigins="https://google.com,https://test.com"
|
||||
[local]
|
||||
[notifier]
|
||||
|
||||
80
api/dev/dynamix/default.cfg
Normal file
80
api/dev/dynamix/default.cfg
Normal file
@@ -0,0 +1,80 @@
|
||||
[confirm]
|
||||
down="1"
|
||||
stop="1"
|
||||
[display]
|
||||
width=""
|
||||
font=""
|
||||
tty="15"
|
||||
date="%c"
|
||||
time="%R"
|
||||
number=".,"
|
||||
unit="C"
|
||||
scale="-1"
|
||||
resize="0"
|
||||
wwn="0"
|
||||
total="1"
|
||||
banner=""
|
||||
header=""
|
||||
background=""
|
||||
tabs="1"
|
||||
users="Tasks:3"
|
||||
usage="0"
|
||||
text="1"
|
||||
warning="70"
|
||||
critical="90"
|
||||
hot="45"
|
||||
max="55"
|
||||
hotssd="60"
|
||||
maxssd="70"
|
||||
power=""
|
||||
theme="white"
|
||||
locale=""
|
||||
raw=""
|
||||
rtl=""
|
||||
headermetacolor=""
|
||||
headerdescription="yes"
|
||||
showBannerGradient="yes"
|
||||
[parity]
|
||||
mode="0"
|
||||
hour="0 0"
|
||||
dotm="1"
|
||||
month="1"
|
||||
day="0"
|
||||
cron=""
|
||||
write="NOCORRECT"
|
||||
[notify]
|
||||
display="0"
|
||||
life="5"
|
||||
date="d-m-Y"
|
||||
time="H:i"
|
||||
position="top-right"
|
||||
path="/tmp/notifications"
|
||||
system="*/1 * * * *"
|
||||
entity="1"
|
||||
normal="1"
|
||||
warning="1"
|
||||
alert="1"
|
||||
unraid="1"
|
||||
plugin="1"
|
||||
docker_notify="1"
|
||||
language_notify="1"
|
||||
report="1"
|
||||
unraidos=""
|
||||
version=""
|
||||
docker_update=""
|
||||
language_update=""
|
||||
status=""
|
||||
[ssmtp]
|
||||
root=""
|
||||
RcptTo=""
|
||||
SetEmailPriority="True"
|
||||
Subject="Unraid Status: "
|
||||
server="smtp.gmail.com"
|
||||
port="465"
|
||||
UseTLS="YES"
|
||||
UseSTARTTLS="NO"
|
||||
UseTLSCert="NO"
|
||||
TLSCert=""
|
||||
AuthMethod="login"
|
||||
AuthUser=""
|
||||
AuthPass=""
|
||||
@@ -3,3 +3,4 @@ event=Unraid Parity check
|
||||
subject=Notice [UNRAID] - Parity check finished (0 errors)
|
||||
description=Canceled
|
||||
importance=warning
|
||||
link=/
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
[api]
|
||||
version="3.4.0"
|
||||
version="3.11.0"
|
||||
extraOrigins="https://google.com,https://test.com"
|
||||
[local]
|
||||
[notifier]
|
||||
@@ -16,9 +16,9 @@ regWizTime="1611175408732_0951-1653-3509-FBA155FA23C0"
|
||||
idtoken=""
|
||||
accesstoken=""
|
||||
refreshtoken=""
|
||||
allowedOrigins="/var/run/unraid-notifications.sock, /var/run/unraid-php.sock, /var/run/unraid-cli.sock, http://localhost:8080, https://localhost:4443, https://tower.local:4443, https://192.168.1.150:4443, https://tower:4443, https://192-168-1-150.thisisfourtyrandomcharacters012345678900.myunraid.net:4443, https://85-121-123-122.thisisfourtyrandomcharacters012345678900.myunraid.net:8443, https://10-252-0-1.hash.myunraid.net:4443, https://10-252-1-1.hash.myunraid.net:4443, https://10-253-3-1.hash.myunraid.net:4443, https://10-253-4-1.hash.myunraid.net:4443, https://10-253-5-1.hash.myunraid.net:4443, https://google.com, https://test.com, https://connect.myunraid.net, https://connect-staging.myunraid.net, https://dev-my.myunraid.net:4000, https://studio.apollographql.com"
|
||||
allowedOrigins="/var/run/unraid-notifications.sock, /var/run/unraid-php.sock, /var/run/unraid-cli.sock, http://localhost:8080, https://localhost:4443, https://tower.local:4443, https://192.168.1.150:4443, https://tower:4443, https://192-168-1-150.thisisfourtyrandomcharacters012345678900.myunraid.net:4443, https://85-121-123-122.thisisfourtyrandomcharacters012345678900.myunraid.net:8443, https://10-252-0-1.hash.myunraid.net:4443, https://10-252-1-1.hash.myunraid.net:4443, https://10-253-3-1.hash.myunraid.net:4443, https://10-253-4-1.hash.myunraid.net:4443, https://10-253-5-1.hash.myunraid.net:4443, https://10-100-0-1.hash.myunraid.net:4443, https://10-100-0-2.hash.myunraid.net:4443, https://10-123-1-2.hash.myunraid.net:4443, https://221-123-121-112.hash.myunraid.net:4443, https://google.com, https://test.com, https://connect.myunraid.net, https://connect-staging.myunraid.net, https://dev-my.myunraid.net:4000, https://studio.apollographql.com"
|
||||
dynamicRemoteAccessType="DISABLED"
|
||||
[upc]
|
||||
apikey="unupc_fab6ff6ffe51040595c6d9ffb63a353ba16cc2ad7d93f813a2e80a5810"
|
||||
[connectionStatus]
|
||||
minigraph="PRE_INIT"
|
||||
minigraph="ERROR_RETRYING"
|
||||
|
||||
26
api/dev/states/nginx.ini
Normal file
26
api/dev/states/nginx.ini
Normal file
@@ -0,0 +1,26 @@
|
||||
NGINX_LANIP="192.168.1.150"
|
||||
NGINX_LANIP6=""
|
||||
NGINX_LANNAME="Tower"
|
||||
NGINX_LANMDNS="Tower.local"
|
||||
NGINX_CERTPATH="/boot/config/ssl/certs/certificate_bundle.pem"
|
||||
NGINX_USESSL="yes"
|
||||
NGINX_PORT="8080"
|
||||
NGINX_PORTSSL="4443"
|
||||
NGINX_DEFAULTURL="https://Tower.local:4443"
|
||||
NGINX_CERTNAME="*.thisisfourtyrandomcharacters012345678900.myunraid.net"
|
||||
NGINX_LANFQDN="192-168-1-150.thisisfourtyrandomcharacters012345678900.myunraid.net"
|
||||
NGINX_LANFQDN6=""
|
||||
NGINX_WANACCESS=""
|
||||
NGINX_WANIP=""
|
||||
NGINX_WANIP6=""
|
||||
NGINX_WANFQDN="85-121-123-122.thisisfourtyrandomcharacters012345678900.myunraid.net"
|
||||
NGINX_WANFQDN6=""
|
||||
NGINX_WG0FQDN="10-252-0-1.hash.myunraid.net"
|
||||
NGINX_WG1FQDN="10-252-1-1.hash.myunraid.net"
|
||||
NGINX_WG3FQDN="10-253-3-1.hash.myunraid.net"
|
||||
NGINX_WG4FQDN="10-253-4-1.hash.myunraid.net"
|
||||
NGINX_WG55FQDN="10-253-5-1.hash.myunraid.net"
|
||||
NGINX_TAILSCALEFQDN="10-100-0-1.hash.myunraid.net"
|
||||
NGINX_TAILSCALE0FQDN="10-100-0-2.hash.myunraid.net"
|
||||
NGINX_CUSTOMFQDN="10-123-1-2.hash.myunraid.net"
|
||||
NGINX_CUSTOMFQDN6="221-123-121-112.hash.myunraid.net"
|
||||
@@ -4,11 +4,9 @@ x-volumes: &volumes
|
||||
volumes:
|
||||
- ./dev:/app/dev
|
||||
- ./src:/app/src
|
||||
- ./patches:/app/patches
|
||||
- ./package.json:/app/package.json
|
||||
- ./package-lock.json:/app/package-lock.json
|
||||
- ./tsconfig.json:/app/tsconfig.json
|
||||
- ./tsup.config.ts:/app/tsup.config.ts
|
||||
- ./vite.config.ts:/app/vite.config.ts
|
||||
- ./dist/:/app/dist/
|
||||
- ./deploy/:/app/deploy/
|
||||
@@ -19,19 +17,15 @@ x-volumes: &volumes
|
||||
- ./.env.staging:/app/.env.staging
|
||||
- ./.env.test:/app/.env.test
|
||||
- ./.env.development:/app/.env.development
|
||||
- ./.pkg-cache:/app/.pkg-cache
|
||||
- ./codegen.yml:/app/codegen.yml
|
||||
- ./fix-array-type.cjs:/app/fix-array-type.cjs
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
- ./unraid-api.js:/app/unraid-api.js
|
||||
- ./ecosystem.config.json:/app/ecosystem.config.json
|
||||
|
||||
|
||||
networks:
|
||||
mothership_default:
|
||||
services:
|
||||
|
||||
dev:
|
||||
networks:
|
||||
- mothership_default
|
||||
image: unraid-api:development
|
||||
ports:
|
||||
- "3001:3001"
|
||||
@@ -51,8 +45,6 @@ services:
|
||||
- builder
|
||||
|
||||
local:
|
||||
networks:
|
||||
- mothership_default
|
||||
image: unraid-api:development
|
||||
ports:
|
||||
- "3001:3001"
|
||||
|
||||
116
api/docs/development.md
Normal file
116
api/docs/development.md
Normal file
@@ -0,0 +1,116 @@
|
||||
# Development
|
||||
|
||||
## Installation
|
||||
|
||||
Manual install can be done with the following routes:
|
||||
[production](https://stable.dl.unraid.net/unraid-api/dynamix.unraid.net.plg)
|
||||
[staging](https://preview.dl.unraid.net/unraid-api/dynamix.unraid.net.staging.plg)
|
||||
|
||||
## Connecting to the API
|
||||
|
||||
### HTTP
|
||||
|
||||
This can be accessed by default via `http://tower.local/graphql`.
|
||||
|
||||
See <https://graphql.org/learn/serving-over-http/#http-methods-headers-and-body>
|
||||
|
||||
## Building in Docker
|
||||
|
||||
To get a development environment for testing start by running this docker command:
|
||||
|
||||
`npm run build:docker`
|
||||
`npm run start:ddev`
|
||||
|
||||
which will give you an interactive shell inside of the newly build linux container.
|
||||
|
||||
To automatically build the plugin run the command below:
|
||||
|
||||
`npm run build:docker`
|
||||
|
||||
The builder command will build the plugin into deploy/release, and the interactive plugin lets you build the plugin or install node modules how you like.
|
||||
|
||||
## Logs
|
||||
|
||||
Logging can be configured via environment variables.
|
||||
|
||||
Log levels can be set when the api starts via `LOG_LEVEL=all/trace/debug/info/warn/error/fatal/mark/off`.
|
||||
|
||||
Additional detail for the log entry can be added with `LOG_CONTEXT=true` (warning, generates a lot of data).
|
||||
|
||||
By default, logs will be sent to syslog. Or you can set `LOG_TRANSPORT=file` to have logs saved in `/var/log/unraid-api/stdout.log`. Or enable debug mode to view logs inline.
|
||||
|
||||
Examples:
|
||||
|
||||
- `unraid-api start`
|
||||
- `LOG_LEVEL=debug unraid-api start --debug`
|
||||
- `LOG_LEVEL=trace LOG_CONTEXT=true LOG_TRANSPORT=file unraid-api start`
|
||||
|
||||
## Viewing data sent to mothership
|
||||
|
||||
If the environment variable `LOG_MOTHERSHIP_MESSAGES=true` exists, any data the unraid-api sends to mothership will be saved in clear text here: `/var/log/unraid-api/relay-messages.log`
|
||||
|
||||
Examples:
|
||||
|
||||
- `LOG_MOTHERSHIP_MESSAGES=true unraid-api start`
|
||||
- `LOG_MOTHERSHIP_MESSAGES=true LOG_LEVEL=debug unraid-api start --debug`
|
||||
|
||||
## Debug Logging
|
||||
|
||||
To view debug logs, change the log level when starting the API. Then type unraid-api logs to trail the logs.
|
||||
Examples:
|
||||
|
||||
- `LOG_LEVEL=debug unraid-api start`
|
||||
- `unraid-api logs`
|
||||
|
||||
## Switching between staging and production environments
|
||||
|
||||
1. Stop the api: `unraid-api stop`
|
||||
2. Switch environments: `unraid-api switch-env`
|
||||
3. Start the api: `unraid-api start`
|
||||
4. Confirm the environment: `unraid-api report`
|
||||
|
||||
## Playground
|
||||
|
||||
The playground can be access via `http://tower.local/graphql` while in debug mode.
|
||||
To get your API key open a terminal on your server and run `cat /boot/config/plugins/dynamix.my.servers/myservers.cfg | grep apikey=\"unraid | cut -d '"' -f2`. Add that API key in the "HTTP headers" panel of the playground.
|
||||
|
||||
```json
|
||||
{
|
||||
"x-api-key": "__REPLACE_ME_WITH_API_KEY__"
|
||||
}
|
||||
```
|
||||
|
||||
Next add the query you want to run and hit the play icon.
|
||||
|
||||
```gql
|
||||
query welcome {
|
||||
welcome {
|
||||
message
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
You should get something like this back.
|
||||
|
||||
```json
|
||||
{
|
||||
"data": {
|
||||
"welcome": {
|
||||
"message": "Welcome root to this Unraid 6.10.0 server"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Click the "Schema" and "Docs" button on the right side of the playground to learn more.
|
||||
|
||||
## Create a new release
|
||||
|
||||
To create a new version run `npm run release` and then run **ONLY** the `git push` section of the commands it returns.
|
||||
To create a new prerelease run `npm run release -- --prerelease alpha`.
|
||||
|
||||
Pushing to this repo will cause an automatic "rolling" release to be built which can be accessed via the page for the associated Github action run.
|
||||
|
||||
## Using a custom version (e.g. testing a new release)
|
||||
|
||||
Find the Pull Request you'd like to install, and a link will be present as a comment to install a PR-specific version.
|
||||
18
api/ecosystem.config.json
Normal file
18
api/ecosystem.config.json
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"apps": [
|
||||
{
|
||||
"name": "unraid-api",
|
||||
"script": "npm",
|
||||
"args": "start",
|
||||
"cwd": "/usr/local/unraid-api",
|
||||
"log": "/var/log/unraid-api/unraid-api.log",
|
||||
"exec_mode": "fork",
|
||||
"ignore_watch": [
|
||||
"node_modules",
|
||||
"src",
|
||||
".env.*",
|
||||
"myservers.cfg"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
18570
api/package-lock.json
generated
18570
api/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
238
api/package.json
238
api/package.json
@@ -1,103 +1,86 @@
|
||||
{
|
||||
"name": "@unraid/api",
|
||||
"version": "3.7.1",
|
||||
"main": "dist/index.js",
|
||||
"bin": "dist/unraid-api.cjs",
|
||||
"version": "3.11.0",
|
||||
"main": "src/cli/index.ts",
|
||||
"type": "module",
|
||||
"repository": "git@github.com:unraid/api.git",
|
||||
"author": "Alexis Tyler <xo@wvvw.me> (https://wvvw.me/)",
|
||||
"author": "Lime Technology, Inc. <unraid.net>",
|
||||
"license": "UNLICENSED",
|
||||
"engines": {
|
||||
"node": ">=16.5.0"
|
||||
},
|
||||
"pkg": {
|
||||
"assets": [
|
||||
"dist/index.cjs",
|
||||
"node_modules/@vmngr/libvirt/build/Release",
|
||||
"node_modules/ts-invariant/",
|
||||
"src/**/*.graphql"
|
||||
],
|
||||
"targets": [
|
||||
"node18-linux-x64"
|
||||
],
|
||||
"outputPath": "dist"
|
||||
},
|
||||
"scripts": {
|
||||
"compile": "tsup --config ./tsup.config.ts",
|
||||
"bundle": "pkg . --public",
|
||||
"build": "npm run compile && npm run bundle",
|
||||
"start": "node dist/main.js",
|
||||
"build:docker": "./scripts/dc.sh run --rm builder",
|
||||
"build-pkg": "./scripts/build.mjs",
|
||||
"build": "vite build --mode=production",
|
||||
"postbuild": "chmod +x dist/main.js && chmod +x dist/cli.js",
|
||||
"build-and-pack": "./scripts/build.mjs",
|
||||
"codegen": "MOTHERSHIP_GRAPHQL_LINK='https://staging.mothership.unraid.net/ws' graphql-codegen --config codegen.yml -r dotenv/config './.env.staging'",
|
||||
"codegen:watch": "DOTENV_CONFIG_PATH='./.env.staging' graphql-codegen-esm --config codegen.yml --watch -r dotenv/config",
|
||||
"codegen:watch": "DOTENV_CONFIG_PATH='./.env.staging' graphql-codegen --config codegen.yml --watch -r dotenv/config",
|
||||
"codegen:local": "NODE_TLS_REJECT_UNAUTHORIZED=0 MOTHERSHIP_GRAPHQL_LINK='https://mothership.localhost/ws' graphql-codegen-esm --config codegen.yml --watch",
|
||||
"tsc": "tsc --noEmit",
|
||||
"lint": "DEBUG=eslint:cli-engine eslint . --config .eslintrc.cjs",
|
||||
"lint:fix": "DEBUG=eslint:cli-engine eslint . --fix --config .eslintrc.cjs",
|
||||
"test:watch": "vitest --segfault-retry=3 --pool=forks",
|
||||
"test": "vitest run --segfault-retry=3 --pool=forks",
|
||||
"coverage": "vitest run --segfault-retry=3 --coverage",
|
||||
"patch:subscriptions-transport-ws": "node ./.scripts/patches/subscriptions-transport-ws.cjs",
|
||||
"lint": "eslint --flag unstable_ts_config --config .eslintrc.ts src/",
|
||||
"lint:fix": "eslint --flag unstable_ts_config --fix --config .eslintrc.ts src/",
|
||||
"test:watch": "vitest --pool=forks",
|
||||
"test": "vitest run --pool=forks",
|
||||
"coverage": "vitest run --pool=forks --coverage",
|
||||
"release": "standard-version",
|
||||
"typesync": "typesync",
|
||||
"install:unraid": "./scripts/install-in-unraid.sh",
|
||||
"start:plugin": "INTROSPECTION=true LOG_MOTHERSHIP_MESSAGES=true LOG_TYPE=pretty LOG_LEVEL=trace unraid-api start --debug",
|
||||
"start:plugin-verbose": "LOG_CONTEXT=true LOG_MOTHERSHIP_MESSAGES=true LOG_TYPE=pretty LOG_LEVEL=trace unraid-api start --debug",
|
||||
"start:dev": "LOG_MOTHERSHIP_MESSAGES=true LOG_TYPE=pretty NODE_ENV=development LOG_LEVEL=trace NODE_ENV=development tsup --config ./tsup.config.ts --watch --onSuccess 'DOTENV_CONFIG_PATH=./.env.development node -r dotenv/config dist/unraid-api.cjs start --debug'",
|
||||
"restart:dev": "LOG_MOTHERSHIP_MESSAGES=true LOG_TYPE=pretty NODE_ENV=development LOG_LEVEL=trace NODE_ENV=development tsup --config ./tsup.config.ts --watch --onSuccess 'DOTENV_CONFIG_PATH=./.env.development node -r dotenv/config dist/unraid-api.cjs restart --debug'",
|
||||
"stop:dev": "LOG_MOTHERSHIP_MESSAGES=true LOG_TYPE=pretty NODE_ENV=development LOG_LEVEL=trace NODE_ENV=development tsup --config ./tsup.config.ts --onSuccess 'DOTENV_CONFIG_PATH=./.env.development node -r dotenv/config dist/unraid-api.cjs stop --debug'",
|
||||
"start:report": "LOG_MOTHERSHIP_MESSAGES=true LOG_TYPE=pretty NODE_ENV=development LOG_LEVEL=trace NODE_ENV=development LOG_CONTEXT=true tsup --config ./tsup.config.ts --watch --onSuccess 'DOTENV_CONFIG_PATH=./.env.development node -r dotenv/config dist/unraid-api.cjs report --debug'",
|
||||
"build:dev": "./scripts/dc.sh build dev",
|
||||
"start:local": "./scripts/dc.sh run --rm --service-ports local",
|
||||
"start:ddev": "./scripts/dc.sh run --rm --service-ports dev",
|
||||
"start:dtest": "./scripts/dc.sh run --rm builder npm run test"
|
||||
"dev": "vite",
|
||||
"container:build": "./scripts/dc.sh build dev",
|
||||
"container:start": "./scripts/dc.sh run --rm --service-ports dev",
|
||||
"container:test": "./scripts/dc.sh run --rm builder npm run test",
|
||||
"container:enter": "./scripts/dc.sh exec dev /bin/bash"
|
||||
},
|
||||
"files": [
|
||||
".env.staging",
|
||||
".env.production",
|
||||
"dist",
|
||||
"unraid-api"
|
||||
"ecosystem.config.json",
|
||||
"README.md",
|
||||
"src",
|
||||
"node_modules/"
|
||||
],
|
||||
"bin": {
|
||||
"unraid-api": "dist/cli.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"@apollo/client": "^3.10.4",
|
||||
"@apollo/server": "^4.10.4",
|
||||
"@apollo/client": "^3.11.8",
|
||||
"@apollo/server": "^4.11.2",
|
||||
"@as-integrations/fastify": "^2.1.1",
|
||||
"@graphql-codegen/client-preset": "^4.2.5",
|
||||
"@fastify/cookie": "^9.4.0",
|
||||
"@graphql-codegen/client-preset": "^4.5.0",
|
||||
"@graphql-tools/load-files": "^7.0.0",
|
||||
"@graphql-tools/merge": "^9.0.4",
|
||||
"@graphql-tools/schema": "^10.0.3",
|
||||
"@graphql-tools/utils": "^10.2.0",
|
||||
"@nestjs/apollo": "^12.1.0",
|
||||
"@nestjs/core": "^10.3.8",
|
||||
"@nestjs/graphql": "^12.1.1",
|
||||
"@graphql-tools/merge": "^9.0.8",
|
||||
"@graphql-tools/schema": "^10.0.7",
|
||||
"@graphql-tools/utils": "^10.5.5",
|
||||
"@nestjs/apollo": "^12.2.1",
|
||||
"@nestjs/core": "^10.4.7",
|
||||
"@nestjs/graphql": "^12.2.1",
|
||||
"@nestjs/passport": "^10.0.3",
|
||||
"@nestjs/platform-fastify": "^10.3.8",
|
||||
"@nestjs/schedule": "^4.0.2",
|
||||
"@reduxjs/toolkit": "^2.2.4",
|
||||
"@nestjs/platform-fastify": "^10.4.7",
|
||||
"@nestjs/schedule": "^4.1.1",
|
||||
"@reduxjs/toolkit": "^2.3.0",
|
||||
"@reflet/cron": "^1.3.1",
|
||||
"@runonflux/nat-upnp": "^1.0.2",
|
||||
"accesscontrol": "^2.2.1",
|
||||
"am": "github:unraid/am",
|
||||
"async-exit-hook": "^2.0.1",
|
||||
"btoa": "^1.2.1",
|
||||
"bycontract": "^2.0.11",
|
||||
"bytes": "^3.1.2",
|
||||
"cacheable-lookup": "^6.1.0",
|
||||
"cacheable-lookup": "^7.0.0",
|
||||
"camelcase-keys": "^9.1.3",
|
||||
"catch-exit": "^1.2.2",
|
||||
"chokidar": "^3.6.0",
|
||||
"class-transformer": "^0.5.1",
|
||||
"class-validator": "^0.14.1",
|
||||
"chokidar": "^4.0.1",
|
||||
"cli-table": "^0.3.11",
|
||||
"command-exists": "^1.2.9",
|
||||
"convert": "^4.14.1",
|
||||
"cors": "^2.8.5",
|
||||
"convert": "^5.5.1",
|
||||
"cross-fetch": "^4.0.0",
|
||||
"docker-event-emitter": "^0.3.0",
|
||||
"dockerode": "^3.3.5",
|
||||
"dotenv": "^16.4.5",
|
||||
"express": "^4.19.2",
|
||||
"find-process": "^1.4.7",
|
||||
"graphql": "^16.8.1",
|
||||
"execa": "^9.5.1",
|
||||
"exit-hook": "^4.0.0",
|
||||
"express": "^4.21.1",
|
||||
"filenamify": "^6.0.0",
|
||||
"fs-extra": "^11.2.0",
|
||||
"global-agent": "^3.0.0",
|
||||
"got": "^14.4.4",
|
||||
"graphql": "^16.9.0",
|
||||
"graphql-fields": "^2.0.3",
|
||||
"graphql-scalars": "^1.23.0",
|
||||
"graphql-subscriptions": "^2.0.0",
|
||||
@@ -105,112 +88,99 @@
|
||||
"graphql-type-json": "^0.3.2",
|
||||
"graphql-type-uuid": "^0.2.0",
|
||||
"graphql-ws": "^5.16.0",
|
||||
"htpasswd-js": "^1.0.2",
|
||||
"ini": "^4.1.2",
|
||||
"ip": "^2.0.1",
|
||||
"jose": "^5.3.0",
|
||||
"lodash": "^4.17.21",
|
||||
"multi-ini": "^2.2.0",
|
||||
"ip-regex": "^5.0.0",
|
||||
"jose": "^5.9.6",
|
||||
"lodash-es": "^4.17.21",
|
||||
"multi-ini": "^2.3.2",
|
||||
"mustache": "^4.2.0",
|
||||
"nanobus": "^4.5.0",
|
||||
"nest-access-control": "^3.1.0",
|
||||
"nestjs-pino": "^4.0.0",
|
||||
"nest-authz": "^2.11.0",
|
||||
"nestjs-pino": "^4.1.0",
|
||||
"node-cache": "^5.1.2",
|
||||
"node-window-polyfill": "^1.0.2",
|
||||
"openid-client": "^5.6.5",
|
||||
"p-iteration": "^1.1.8",
|
||||
"p-retry": "^4.6.2",
|
||||
"openid-client": "^6.1.3",
|
||||
"p-retry": "^6.2.0",
|
||||
"passport-custom": "^1.1.1",
|
||||
"passport-http-header-strategy": "^1.1.0",
|
||||
"path-type": "^6.0.0",
|
||||
"pidusage": "^3.0.2",
|
||||
"pino": "^9.1.0",
|
||||
"pino-http": "^9.0.0",
|
||||
"pino-pretty": "^11.0.0",
|
||||
"pino": "^9.5.0",
|
||||
"pino-http": "^10.3.0",
|
||||
"pino-pretty": "^11.3.0",
|
||||
"pm2": "^5.4.2",
|
||||
"reflect-metadata": "^0.1.14",
|
||||
"request": "^2.88.2",
|
||||
"semver": "^7.6.2",
|
||||
"semver": "^7.6.3",
|
||||
"stoppable": "^1.1.0",
|
||||
"systeminformation": "^5.22.9",
|
||||
"systeminformation": "^5.23.5",
|
||||
"ts-command-line-args": "^2.5.1",
|
||||
"uuid": "^9.0.1",
|
||||
"ws": "^8.17.0",
|
||||
"wtfnode": "^0.9.2",
|
||||
"uuid": "^11.0.2",
|
||||
"ws": "^8.18.0",
|
||||
"xhr2": "^0.2.1",
|
||||
"zod": "^3.23.8"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/runtime": "^7.24.5",
|
||||
"@graphql-codegen/add": "^5.0.2",
|
||||
"@graphql-codegen/cli": "^5.0.2",
|
||||
"@graphql-codegen/add": "^5.0.3",
|
||||
"@graphql-codegen/cli": "^5.0.3",
|
||||
"@graphql-codegen/fragment-matcher": "^5.0.2",
|
||||
"@graphql-codegen/import-types-preset": "^3.0.0",
|
||||
"@graphql-codegen/typed-document-node": "^5.0.6",
|
||||
"@graphql-codegen/typescript": "^4.0.6",
|
||||
"@graphql-codegen/typescript-operations": "^4.2.0",
|
||||
"@graphql-codegen/typescript-resolvers": "4.0.6",
|
||||
"@graphql-codegen/typed-document-node": "^5.0.11",
|
||||
"@graphql-codegen/typescript": "^4.1.1",
|
||||
"@graphql-codegen/typescript-operations": "^4.3.1",
|
||||
"@graphql-codegen/typescript-resolvers": "4.4.0",
|
||||
"@graphql-typed-document-node/core": "^3.2.0",
|
||||
"@nestjs/testing": "^10.3.8",
|
||||
"@swc/core": "^1.5.7",
|
||||
"@ianvs/prettier-plugin-sort-imports": "^4.4.0",
|
||||
"@nestjs/testing": "^10.4.7",
|
||||
"@originjs/vite-plugin-commonjs": "^1.0.3",
|
||||
"@rollup/plugin-node-resolve": "^15.3.0",
|
||||
"@types/async-exit-hook": "^2.0.2",
|
||||
"@types/btoa": "^1.2.5",
|
||||
"@types/bytes": "^3.1.4",
|
||||
"@types/cli-table": "^0.3.4",
|
||||
"@types/command-exists": "^1.2.3",
|
||||
"@types/dockerode": "^3.3.29",
|
||||
"@types/express": "^4.17.21",
|
||||
"@types/cors": "^2.8.17",
|
||||
"@types/dockerode": "^3.3.31",
|
||||
"@types/express": "^5.0.0",
|
||||
"@types/graphql-fields": "^1.3.9",
|
||||
"@types/graphql-type-uuid": "^0.2.6",
|
||||
"@types/ini": "^4.1.0",
|
||||
"@types/lodash": "^4.17.1",
|
||||
"@types/ini": "^4.1.1",
|
||||
"@types/ip": "^1.1.3",
|
||||
"@types/lodash": "^4.17.13",
|
||||
"@types/mustache": "^4.2.5",
|
||||
"@types/node": "^20.12.12",
|
||||
"@types/node": "^22.9.0",
|
||||
"@types/pidusage": "^2.0.5",
|
||||
"@types/pify": "^5.0.4",
|
||||
"@types/semver": "^7.5.8",
|
||||
"@types/sendmail": "^1.4.7",
|
||||
"@types/stoppable": "^1.1.3",
|
||||
"@types/uuid": "^9.0.8",
|
||||
"@types/ws": "^8.5.10",
|
||||
"@types/uuid": "^10.0.0",
|
||||
"@types/ws": "^8.5.13",
|
||||
"@types/wtfnode": "^0.7.3",
|
||||
"@typescript-eslint/eslint-plugin": "^7.9.0",
|
||||
"@typescript-eslint/parser": "^7.9.0",
|
||||
"@unraid/eslint-config": "github:unraid/eslint-config",
|
||||
"@vitest/coverage-v8": "^1.6.0",
|
||||
"@vitest/ui": "^1.6.0",
|
||||
"camelcase-keys": "^8.0.2",
|
||||
"@vitest/coverage-v8": "^2.1.4",
|
||||
"@vitest/ui": "^2.1.4",
|
||||
"cz-conventional-changelog": "3.3.0",
|
||||
"eslint": "^8.56.0",
|
||||
"eslint-import-resolver-typescript": "^3.6.1",
|
||||
"eslint-plugin-import": "^2.29.1",
|
||||
"eslint-plugin-prettier": "^5.1.3",
|
||||
"eslint-plugin-unicorn": "^53.0.0",
|
||||
"eslint-plugin-unused-imports": "^3.2.0",
|
||||
"execa": "^7.1.1",
|
||||
"filter-obj": "^5.1.0",
|
||||
"got": "^13",
|
||||
"graphql-codegen-typescript-validation-schema": "^0.14.1",
|
||||
"ip-regex": "^5.0.0",
|
||||
"json-difference": "^1.16.1",
|
||||
"map-obj": "^5.0.2",
|
||||
"p-props": "^5.0.0",
|
||||
"path-exists": "^5.0.0",
|
||||
"path-type": "^5.0.0",
|
||||
"pkg": "^5.8.1",
|
||||
"pretty-bytes": "^6.1.1",
|
||||
"pretty-ms": "^8.0.0",
|
||||
"eslint": "^9.14.0",
|
||||
"graphql-codegen-typescript-validation-schema": "^0.16.0",
|
||||
"jiti": "^2.4.0",
|
||||
"rollup-plugin-node-externals": "^7.1.3",
|
||||
"standard-version": "^9.5.0",
|
||||
"tsup": "^8.0.2",
|
||||
"typescript": "^5.4.5",
|
||||
"typesync": "^0.12.1",
|
||||
"vite-tsconfig-paths": "^4.3.2",
|
||||
"vitest": "^1.6.0",
|
||||
"zx": "^7.2.3"
|
||||
"typescript": "^5.6.3",
|
||||
"typescript-eslint": "^8.13.0",
|
||||
"vite": "^5.4.10",
|
||||
"vite-plugin-node": "^4.0.0",
|
||||
"vite-plugin-static-copy": "^2.0.0",
|
||||
"vite-tsconfig-paths": "^5.1.0",
|
||||
"vitest": "^2.1.4",
|
||||
"zx": "^8.2.0"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@vmngr/libvirt": "github:unraid/libvirt"
|
||||
},
|
||||
"config": {
|
||||
"commitizen": {
|
||||
"path": "./node_modules/cz-conventional-changelog"
|
||||
"overrides": {
|
||||
"eslint": {
|
||||
"jiti": "2"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,71 +2,83 @@
|
||||
import { exit } from 'process';
|
||||
import { cd, $ } from 'zx';
|
||||
|
||||
import getTags from './get-tags.mjs'
|
||||
import { getDeploymentVersion } from './get-deployment-version.mjs';
|
||||
|
||||
try {
|
||||
// Enable colours in output
|
||||
process.env.FORCE_COLOR = '1';
|
||||
// Enable colours in output
|
||||
process.env.FORCE_COLOR = '1';
|
||||
|
||||
// Ensure we have the correct working directory
|
||||
process.env.WORKDIR = process.env.WORKDIR ?? process.env.PWD;
|
||||
cd(process.env.WORKDIR);
|
||||
// Ensure we have the correct working directory
|
||||
process.env.WORKDIR ??= process.env.PWD;
|
||||
cd(process.env.WORKDIR);
|
||||
|
||||
// Clean up last deploy
|
||||
await $`rm -rf ./deploy/release`;
|
||||
await $`rm -rf ./deploy/pre-pack`;
|
||||
await $`mkdir -p ./deploy/release/`;
|
||||
await $`mkdir -p ./deploy/pre-pack/`;
|
||||
// Create deployment directories - ignore if they already exist
|
||||
await $`mkdir -p ./deploy/release`;
|
||||
await $`mkdir -p ./deploy/pre-pack`;
|
||||
|
||||
// Ensure all deps are installed
|
||||
await $`npm i`;
|
||||
await $`rm -rf ./deploy/release/*`;
|
||||
await $`rm -rf ./deploy/pre-pack/*`;
|
||||
|
||||
// Build Generated Types
|
||||
await $`npm run codegen`;
|
||||
// Build binary
|
||||
await $`npm run build`;
|
||||
// Build Generated Types
|
||||
await $`npm run codegen`;
|
||||
|
||||
// Copy binary + extra files to deployment directory
|
||||
await $`cp ./dist/api ./deploy/pre-pack/unraid-api`;
|
||||
await $`cp ./.env.production ./deploy/pre-pack/.env.production`;
|
||||
await $`cp ./.env.staging ./deploy/pre-pack/.env.staging`;
|
||||
await $`npm run build`;
|
||||
// Copy app files to plugin directory
|
||||
await $`cp -r ./src/ ./deploy/pre-pack/src/`;
|
||||
await $`cp -r ./dist/ ./deploy/pre-pack/dist/`;
|
||||
|
||||
// Get package details
|
||||
const { name, version } = await import('../package.json', {
|
||||
assert: { type: 'json' },
|
||||
}).then(pkg => pkg.default);
|
||||
// Copy environment to deployment directory
|
||||
const files = [
|
||||
'.env.production',
|
||||
'.env.staging',
|
||||
'tsconfig.json',
|
||||
'codegen.yml',
|
||||
'ecosystem.config.json'
|
||||
]
|
||||
|
||||
const tags = getTags(process.env);
|
||||
|
||||
// Decide whether to use full version or just tag
|
||||
const isTaggedRelease = tags.isTagged;
|
||||
const gitShaShort = tags.shortSha;
|
||||
|
||||
const deploymentVersion = isTaggedRelease ? version : `${version}+${gitShaShort}`;
|
||||
for (const file of files) {
|
||||
await $`cp ./${file} ./deploy/pre-pack/${file}`;
|
||||
}
|
||||
|
||||
// Create deployment package.json
|
||||
await $`echo ${JSON.stringify({ name, version: deploymentVersion })} > ./deploy/pre-pack/package.json`;
|
||||
// Get package details
|
||||
const { name, version, ...rest } = await import('../package.json', {
|
||||
assert: { type: 'json' },
|
||||
}).then((pkg) => pkg.default);
|
||||
|
||||
// # Create final tgz
|
||||
await $`cp ./README.md ./deploy/pre-pack/`;
|
||||
cd('./deploy/pre-pack');
|
||||
await $`npm pack`;
|
||||
const deploymentVersion = getDeploymentVersion(process.env, version);
|
||||
|
||||
// Move unraid-api.tgz to release directory
|
||||
await $`mv unraid-api-${deploymentVersion}.tgz ../release`;
|
||||
// Create deployment package.json
|
||||
await $`echo ${JSON.stringify({
|
||||
...rest,
|
||||
name,
|
||||
version: deploymentVersion,
|
||||
})} > ./deploy/pre-pack/package.json`;
|
||||
|
||||
// Set API_VERSION output based on this command
|
||||
await $`echo "::set-output name=API_VERSION::${deploymentVersion}"`;
|
||||
// # Create final tgz
|
||||
await $`cp ./README.md ./deploy/pre-pack/`;
|
||||
|
||||
await $`cp -r ./node_modules ./deploy/pre-pack/node_modules`;
|
||||
// Install production dependencies
|
||||
cd('./deploy/pre-pack');
|
||||
|
||||
await $`npm prune --omit=dev`;
|
||||
await $`npm install --omit=dev`;
|
||||
|
||||
// Now we'll pack everything in the pre-pack directory
|
||||
await $`tar -czf ../unraid-api-${deploymentVersion}.tgz .`;
|
||||
|
||||
// Move unraid-api.tgz to release directory
|
||||
await $`mv ../unraid-api-${deploymentVersion}.tgz ../release`;
|
||||
} catch (error) {
|
||||
// Error with a command
|
||||
if (Object.keys(error).includes('stderr')) {
|
||||
console.log(`Failed building package. Exit code: ${error.exitCode}`);
|
||||
console.log(`Error: ${error.stderr}`);
|
||||
} else {
|
||||
// Normal js error
|
||||
console.log('Failed building package.');
|
||||
console.log(`Error: ${error.message}`);
|
||||
}
|
||||
// Error with a command
|
||||
if (Object.keys(error).includes('stderr')) {
|
||||
console.log(`Failed building package. Exit code: ${error.exitCode}`);
|
||||
console.log(`Error: ${error.stderr}`);
|
||||
} else {
|
||||
// Normal js error
|
||||
console.log('Failed building package.');
|
||||
console.log(`Error: ${error.message}`);
|
||||
}
|
||||
|
||||
exit(error.exitCode);
|
||||
exit(error.exitCode);
|
||||
}
|
||||
|
||||
33
api/scripts/create-session.sh
Executable file
33
api/scripts/create-session.sh
Executable file
@@ -0,0 +1,33 @@
|
||||
# This script creates a mock session on a server.
|
||||
# During local dev/testing, you should run it in the api container,
|
||||
# so the nest.js api can authenticate cookies against it.
|
||||
#
|
||||
# You should also set a cookie named 'unraid_...' whose value matches
|
||||
# the name of the session you created (where name is sess_<name>).
|
||||
# By default, this is my-session
|
||||
|
||||
sessions_dir=/var/lib/php
|
||||
default_session_name=my-session
|
||||
|
||||
if [ "$1" = "--help" ]; then
|
||||
echo "This script creates a mock session on a server."
|
||||
echo ""
|
||||
echo "Usage: $0 [options]"
|
||||
echo ""
|
||||
echo "Options:"
|
||||
echo " [name] Name of the session to create (default: my-session)"
|
||||
echo " --help Display this help message and exit"
|
||||
echo ""
|
||||
echo "Example: $0 a-session-name"
|
||||
echo ""
|
||||
echo "Current list of sessions:"
|
||||
ls $sessions_dir
|
||||
exit 0
|
||||
fi
|
||||
|
||||
session_name="${1:-$default_session_name}"
|
||||
|
||||
mkdir -p $sessions_dir
|
||||
touch "$sessions_dir/sess_$session_name"
|
||||
|
||||
ls $sessions_dir
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/bin/sh
|
||||
|
||||
# Pass all entered params after the docker-compose call
|
||||
GIT_SHA=$(git rev-parse --short HEAD) IS_TAGGED=$(git describe --tags --abbrev=0 --exact-match || echo '') docker-compose -f docker-compose.yml "$@"
|
||||
# Pass all entered params after the docker compose call
|
||||
COMPOSE_PROJECT_NAME="connect" GIT_SHA=$(git rev-parse --short HEAD) IS_TAGGED=$(git describe --tags --abbrev=0 --exact-match || echo '') docker compose -f docker-compose.yml "$@"
|
||||
|
||||
59
api/scripts/deploy-dev.sh
Executable file
59
api/scripts/deploy-dev.sh
Executable file
@@ -0,0 +1,59 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Path to store the last used server name
|
||||
state_file="$HOME/.deploy_state"
|
||||
|
||||
# Read the last used server name from the state file
|
||||
if [[ -f "$state_file" ]]; then
|
||||
last_server_name=$(cat "$state_file")
|
||||
else
|
||||
last_server_name=""
|
||||
fi
|
||||
|
||||
# Read the server name from the command-line argument or use the last used server name as the default
|
||||
server_name="${1:-$last_server_name}"
|
||||
|
||||
# Check if the server name is provided
|
||||
if [[ -z "$server_name" ]]; then
|
||||
echo "Please provide the SSH server name."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Save the current server name to the state file
|
||||
echo "$server_name" > "$state_file"
|
||||
|
||||
# Source directory path
|
||||
source_directory="./src"
|
||||
|
||||
if [ ! -d "$source_directory" ]; then
|
||||
echo "The src directory does not exist."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Replace the value inside the rsync command with the user's input
|
||||
rsync_command="rsync -avz -e ssh $source_directory root@${server_name}:/usr/local/unraid-api"
|
||||
|
||||
echo "Executing the following command:"
|
||||
echo "$rsync_command"
|
||||
|
||||
# Execute the rsync command and capture the exit code
|
||||
eval "$rsync_command"
|
||||
exit_code=$?
|
||||
|
||||
# Run unraid-api restart on remote host
|
||||
ssh root@"${server_name}" "unraid-api restart"
|
||||
|
||||
# Play built-in sound based on the operating system
|
||||
if [[ "$OSTYPE" == "darwin"* ]]; then
|
||||
# macOS
|
||||
afplay /System/Library/Sounds/Glass.aiff
|
||||
elif [[ "$OSTYPE" == "linux-gnu" ]]; then
|
||||
# Linux
|
||||
paplay /usr/share/sounds/freedesktop/stereo/complete.oga
|
||||
elif [[ "$OSTYPE" == "msys" || "$OSTYPE" == "win32" ]]; then
|
||||
# Windows
|
||||
powershell.exe -c "(New-Object Media.SoundPlayer 'C:\Windows\Media\Windows Default.wav').PlaySync()"
|
||||
fi
|
||||
|
||||
# Exit with the rsync command's exit code
|
||||
exit $exit_code
|
||||
@@ -3,32 +3,27 @@ import { execSync } from 'child_process';
|
||||
const runCommand = (command) => {
|
||||
try {
|
||||
return execSync(command, { stdio: 'pipe' }).toString().trim();
|
||||
} catch(error) {
|
||||
} catch (error) {
|
||||
console.log('Failed to get value from tag command: ', command, error.message);
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
const getTags = (env = process.env) => {
|
||||
|
||||
if (env.GIT_SHA) {
|
||||
console.log(`Using env vars for git tags: ${env.GIT_SHA} ${env.IS_TAGGED}`)
|
||||
return {
|
||||
shortSha: env.GIT_SHA,
|
||||
isTagged: Boolean(env.IS_TAGGED)
|
||||
}
|
||||
export const getDeploymentVersion = (env = process.env, packageVersion) => {
|
||||
if (env.API_VERSION) {
|
||||
console.log(`Using env var for version: ${env.API_VERSION}`);
|
||||
return env.API_VERSION;
|
||||
} else if (env.GIT_SHA && env.IS_TAGGED) {
|
||||
console.log(`Using env vars for git tags: ${env.GIT_SHA} ${env.IS_TAGGED}`);
|
||||
return env.IS_TAGGED ? packageVersion : `${packageVersion}+${env.GIT_SHA}`;
|
||||
} else {
|
||||
const gitShortSHA = runCommand('git rev-parse --short HEAD');
|
||||
const isCommitTagged = runCommand('git describe --tags --abbrev=0 --exact-match') !== undefined;
|
||||
console.log('gitShortSHA', gitShortSHA, 'isCommitTagged', isCommitTagged);
|
||||
if (!gitShortSHA) {
|
||||
throw new Error('Failing build due to missing SHA');
|
||||
}
|
||||
return {
|
||||
shortSha: gitShortSHA,
|
||||
isTagged: isCommitTagged
|
||||
console.error('Failed to get git short SHA');
|
||||
process.exit(1);
|
||||
}
|
||||
return isCommitTagged ? packageVersion : `${packageVersion}+${gitShortSHA}`;
|
||||
}
|
||||
}
|
||||
|
||||
export default getTags;
|
||||
};
|
||||
@@ -1,31 +0,0 @@
|
||||
import { beforeEach, expect, test, vi } from 'vitest';
|
||||
|
||||
// Preloading imports for faster tests
|
||||
import '@app/cli/commands/restart';
|
||||
import '@app/cli/commands/start';
|
||||
import '@app/cli/commands/stop';
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetAllMocks();
|
||||
});
|
||||
|
||||
test('calls stop and then start', async () => {
|
||||
vi.mock('@app/cli/commands/start');
|
||||
vi.mock('@app/cli/commands/stop');
|
||||
// Call restart
|
||||
const { restart } = await import('@app/cli/commands/restart');
|
||||
const { start } = await import('@app/cli/commands/start');
|
||||
const { stop } = await import('@app/cli/commands/stop');
|
||||
await restart();
|
||||
|
||||
// Check stop was called
|
||||
expect(vi.mocked(stop).mock.calls.length).toBe(1);
|
||||
|
||||
// Check start was called
|
||||
expect(vi.mocked(start).mock.calls.length).toBe(1);
|
||||
|
||||
// Check stop was called first
|
||||
expect(vi.mocked(stop).mock.invocationCallOrder[0]).toBeLessThan(
|
||||
vi.mocked(start).mock.invocationCallOrder[0]
|
||||
);
|
||||
});
|
||||
@@ -1,3 +1,4 @@
|
||||
import 'reflect-metadata';
|
||||
import { expect, test } from 'vitest';
|
||||
|
||||
// Preloading imports for faster tests
|
||||
@@ -6,17 +7,17 @@ import '@app/store/modules/emhttp';
|
||||
import '@app/store';
|
||||
|
||||
test('Returns allowed origins', async () => {
|
||||
const { store } = await import('@app/store');
|
||||
const { loadStateFiles } = await import('@app/store/modules/emhttp');
|
||||
const { getAllowedOrigins } = await import('@app/common/allowed-origins');
|
||||
const { loadConfigFile } = await import('@app/store/modules/config');
|
||||
const { store } = await import('@app/store');
|
||||
const { loadStateFiles } = await import('@app/store/modules/emhttp');
|
||||
const { getAllowedOrigins } = await import('@app/common/allowed-origins');
|
||||
const { loadConfigFile } = await import('@app/store/modules/config');
|
||||
|
||||
// Load state files into store
|
||||
await store.dispatch(loadStateFiles());
|
||||
await store.dispatch(loadConfigFile());
|
||||
// Load state files into store
|
||||
await store.dispatch(loadStateFiles());
|
||||
await store.dispatch(loadConfigFile());
|
||||
|
||||
// Get allowed origins
|
||||
expect(getAllowedOrigins()).toMatchInlineSnapshot(`
|
||||
// Get allowed origins
|
||||
expect(getAllowedOrigins()).toMatchInlineSnapshot(`
|
||||
[
|
||||
"/var/run/unraid-notifications.sock",
|
||||
"/var/run/unraid-php.sock",
|
||||
@@ -33,6 +34,10 @@ test('Returns allowed origins', async () => {
|
||||
"https://10-253-3-1.hash.myunraid.net:4443",
|
||||
"https://10-253-4-1.hash.myunraid.net:4443",
|
||||
"https://10-253-5-1.hash.myunraid.net:4443",
|
||||
"https://10-100-0-1.hash.myunraid.net:4443",
|
||||
"https://10-100-0-2.hash.myunraid.net:4443",
|
||||
"https://10-123-1-2.hash.myunraid.net:4443",
|
||||
"https://221-123-121-112.hash.myunraid.net:4443",
|
||||
"https://google.com",
|
||||
"https://test.com",
|
||||
"https://connect.myunraid.net",
|
||||
|
||||
@@ -1,109 +0,0 @@
|
||||
import { expect, test, vi } from 'vitest';
|
||||
import { store } from '@app/store';
|
||||
|
||||
import { loadStateFiles } from '@app/store/modules/emhttp';
|
||||
|
||||
vi.mock('@vmngr/libvirt', () => ({
|
||||
ConnectListAllDomainsFlags: {
|
||||
ACTIVE: 0,
|
||||
INACTIVE: 1,
|
||||
},
|
||||
}));
|
||||
|
||||
vi.mock('@app/core/log', () => ({
|
||||
logger: {
|
||||
info: vi.fn(),
|
||||
error: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
trace: vi.fn(),
|
||||
},
|
||||
dashboardLogger: {
|
||||
info: vi.fn(),
|
||||
error: vi.fn((...input) => console.log(input)),
|
||||
debug: vi.fn(),
|
||||
trace: vi.fn(),
|
||||
},
|
||||
emhttpLogger: {
|
||||
info: vi.fn(),
|
||||
error: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
trace: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
vi.mock('@app/common/dashboard/boot-timestamp', () => ({
|
||||
bootTimestamp: new Date('2022-06-10T04:35:58.276Z'),
|
||||
}));
|
||||
|
||||
test('Returns generated data', async () => {
|
||||
await store.dispatch(loadStateFiles()).unwrap();
|
||||
|
||||
const { generateData } = await import('@app/common/dashboard/generate-data');
|
||||
const result = await generateData();
|
||||
|
||||
expect(result).toMatchInlineSnapshot(`
|
||||
{
|
||||
"apps": {
|
||||
"installed": 0,
|
||||
"started": 0,
|
||||
},
|
||||
"array": {
|
||||
"capacity": {
|
||||
"bytes": {
|
||||
"free": 19495825571000,
|
||||
"total": 41994745901000,
|
||||
"used": 22498920330000,
|
||||
},
|
||||
},
|
||||
"state": "STOPPED",
|
||||
},
|
||||
"config": {
|
||||
"valid": true,
|
||||
},
|
||||
"display": {
|
||||
"case": {
|
||||
"base64": "",
|
||||
"error": "",
|
||||
"icon": "",
|
||||
"url": "",
|
||||
},
|
||||
},
|
||||
"os": {
|
||||
"hostname": "Tower",
|
||||
"uptime": "2022-06-10T04:35:58.276Z",
|
||||
},
|
||||
"services": [
|
||||
{
|
||||
"name": "unraid-api",
|
||||
"online": true,
|
||||
"uptime": {
|
||||
"timestamp": "2022-06-10T04:35:58.276Z",
|
||||
},
|
||||
"version": "THIS_WILL_BE_REPLACED_WHEN_BUILT",
|
||||
},
|
||||
{
|
||||
"name": "dynamic-remote-access",
|
||||
"online": false,
|
||||
"uptime": {
|
||||
"timestamp": "2022-06-10T04:35:58.276Z",
|
||||
},
|
||||
"version": "DISABLED",
|
||||
},
|
||||
],
|
||||
"vars": {
|
||||
"flashGuid": "0000-0000-0000-000000000000",
|
||||
"regState": "PRO",
|
||||
"regTy": "PRO",
|
||||
"serverDescription": "Dev Server",
|
||||
"serverName": "Tower",
|
||||
},
|
||||
"versions": {
|
||||
"unraid": "6.11.2",
|
||||
},
|
||||
"vms": {
|
||||
"installed": 0,
|
||||
"started": 0,
|
||||
},
|
||||
}
|
||||
`);
|
||||
}, 10_000);
|
||||
@@ -23,6 +23,9 @@ RolesBuilder {
|
||||
],
|
||||
},
|
||||
"config": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
"update:own": [
|
||||
"*",
|
||||
],
|
||||
@@ -245,6 +248,24 @@ RolesBuilder {
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"config": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"connect": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"connect/dynamic-remote-access": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
"update:own": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"customizations": {
|
||||
"read:any": [
|
||||
"*",
|
||||
@@ -255,12 +276,22 @@ RolesBuilder {
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"display": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"docker": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"docker/container": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"docker/network": {
|
||||
"info": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
@@ -270,16 +301,31 @@ RolesBuilder {
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"network": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"notifications": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"services": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"unraid-version": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"vars": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"vms": {
|
||||
"read:any": [
|
||||
"*",
|
||||
@@ -366,6 +412,14 @@ RolesBuilder {
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"notifications": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
"update:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"os": {
|
||||
"read:any": [
|
||||
"*",
|
||||
|
||||
@@ -10,12 +10,14 @@ test('Creates an array event', async () => {
|
||||
);
|
||||
const { store } = await import('@app/store');
|
||||
const { loadStateFiles } = await import('@app/store/modules/emhttp');
|
||||
|
||||
const { loadConfigFile } = await import('@app/store/modules/config');
|
||||
// Load state files into store
|
||||
await store.dispatch(loadStateFiles());
|
||||
|
||||
await store.dispatch(loadConfigFile());
|
||||
|
||||
const arrayEvent = getArrayData(store.getState);
|
||||
expect(arrayEvent).toMatchInlineSnapshot(`
|
||||
expect(arrayEvent).toMatchObject(
|
||||
{
|
||||
"boot": {
|
||||
"comment": "Unraid OS boot device",
|
||||
@@ -177,6 +179,7 @@ test('Creates an array event', async () => {
|
||||
"warning": null,
|
||||
},
|
||||
],
|
||||
"id": expect.any(String),
|
||||
"parities": [
|
||||
{
|
||||
"comment": null,
|
||||
@@ -205,5 +208,5 @@ test('Creates an array event', async () => {
|
||||
],
|
||||
"state": "STOPPED",
|
||||
}
|
||||
`);
|
||||
);
|
||||
});
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import 'reflect-metadata';
|
||||
|
||||
import { expect, test } from 'vitest';
|
||||
import { setupPermissions } from '@app/core/permissions';
|
||||
|
||||
test('Returns default permissions', () => {
|
||||
expect(setupPermissions()).toMatchSnapshot();
|
||||
expect(setupPermissions()).toMatchSnapshot();
|
||||
});
|
||||
|
||||
@@ -1,12 +1,14 @@
|
||||
import 'reflect-metadata';
|
||||
|
||||
import { test, expect } from 'vitest';
|
||||
import { getWriteableConfig } from '@app/core/utils/files/config-file-normalizer';
|
||||
import { initialState } from '@app/store/modules/config';
|
||||
import { cloneDeep } from 'lodash';
|
||||
import { cloneDeep } from 'lodash-es';
|
||||
|
||||
test('it creates a FLASH config with NO OPTIONAL values', () => {
|
||||
const basicConfig = initialState;
|
||||
const config = getWriteableConfig(basicConfig, 'flash');
|
||||
expect(config).toMatchInlineSnapshot(`
|
||||
const basicConfig = initialState;
|
||||
const config = getWriteableConfig(basicConfig, 'flash');
|
||||
expect(config).toMatchInlineSnapshot(`
|
||||
{
|
||||
"api": {
|
||||
"extraOrigins": "",
|
||||
@@ -37,9 +39,9 @@ test('it creates a FLASH config with NO OPTIONAL values', () => {
|
||||
});
|
||||
|
||||
test('it creates a MEMORY config with NO OPTIONAL values', () => {
|
||||
const basicConfig = initialState;
|
||||
const config = getWriteableConfig(basicConfig, 'memory');
|
||||
expect(config).toMatchInlineSnapshot(`
|
||||
const basicConfig = initialState;
|
||||
const config = getWriteableConfig(basicConfig, 'memory');
|
||||
expect(config).toMatchInlineSnapshot(`
|
||||
{
|
||||
"api": {
|
||||
"extraOrigins": "",
|
||||
@@ -74,15 +76,15 @@ test('it creates a MEMORY config with NO OPTIONAL values', () => {
|
||||
});
|
||||
|
||||
test('it creates a FLASH config with OPTIONAL values', () => {
|
||||
const basicConfig = cloneDeep(initialState);
|
||||
basicConfig.remote['2Fa'] = 'yes';
|
||||
basicConfig.local['2Fa'] = 'yes';
|
||||
basicConfig.local.showT2Fa = 'yes';
|
||||
basicConfig.api.extraOrigins = 'myextra.origins';
|
||||
basicConfig.remote.upnpEnabled = 'yes';
|
||||
basicConfig.connectionStatus.upnpStatus = 'Turned On';
|
||||
const config = getWriteableConfig(basicConfig, 'flash');
|
||||
expect(config).toMatchInlineSnapshot(`
|
||||
const basicConfig = cloneDeep(initialState);
|
||||
basicConfig.remote['2Fa'] = 'yes';
|
||||
basicConfig.local['2Fa'] = 'yes';
|
||||
basicConfig.local.showT2Fa = 'yes';
|
||||
basicConfig.api.extraOrigins = 'myextra.origins';
|
||||
basicConfig.remote.upnpEnabled = 'yes';
|
||||
basicConfig.connectionStatus.upnpStatus = 'Turned On';
|
||||
const config = getWriteableConfig(basicConfig, 'flash');
|
||||
expect(config).toMatchInlineSnapshot(`
|
||||
{
|
||||
"api": {
|
||||
"extraOrigins": "myextra.origins",
|
||||
@@ -118,15 +120,15 @@ test('it creates a FLASH config with OPTIONAL values', () => {
|
||||
});
|
||||
|
||||
test('it creates a MEMORY config with OPTIONAL values', () => {
|
||||
const basicConfig = cloneDeep(initialState);
|
||||
basicConfig.remote['2Fa'] = 'yes';
|
||||
basicConfig.local['2Fa'] = 'yes';
|
||||
basicConfig.local.showT2Fa = 'yes';
|
||||
basicConfig.api.extraOrigins = 'myextra.origins';
|
||||
basicConfig.remote.upnpEnabled = 'yes';
|
||||
basicConfig.connectionStatus.upnpStatus = 'Turned On';
|
||||
const config = getWriteableConfig(basicConfig, 'memory');
|
||||
expect(config).toMatchInlineSnapshot(`
|
||||
const basicConfig = cloneDeep(initialState);
|
||||
basicConfig.remote['2Fa'] = 'yes';
|
||||
basicConfig.local['2Fa'] = 'yes';
|
||||
basicConfig.local.showT2Fa = 'yes';
|
||||
basicConfig.api.extraOrigins = 'myextra.origins';
|
||||
basicConfig.remote.upnpEnabled = 'yes';
|
||||
basicConfig.connectionStatus.upnpStatus = 'Turned On';
|
||||
const config = getWriteableConfig(basicConfig, 'memory');
|
||||
expect(config).toMatchInlineSnapshot(`
|
||||
{
|
||||
"api": {
|
||||
"extraOrigins": "myextra.origins",
|
||||
|
||||
@@ -4,32 +4,32 @@ import { safelySerializeObjectToIni } from '@app/core/utils/files/safe-ini-seria
|
||||
import { Serializer } from 'multi-ini';
|
||||
|
||||
test('MultiIni breaks when serializing an object with a boolean inside', async () => {
|
||||
const objectToSerialize = {
|
||||
root: {
|
||||
anonMode: false,
|
||||
},
|
||||
};
|
||||
const serializer = new Serializer({ keep_quotes: false });
|
||||
expect(serializer.serialize(objectToSerialize)).toMatchInlineSnapshot(`
|
||||
const objectToSerialize = {
|
||||
root: {
|
||||
anonMode: false,
|
||||
},
|
||||
};
|
||||
const serializer = new Serializer({ keep_quotes: false });
|
||||
expect(serializer.serialize(objectToSerialize)).toMatchInlineSnapshot(`
|
||||
"[root]
|
||||
anonMode=false
|
||||
"
|
||||
`)
|
||||
`);
|
||||
});
|
||||
|
||||
test('MultiIni can safely serialize an object with a boolean inside', async () => {
|
||||
const objectToSerialize = {
|
||||
root: {
|
||||
anonMode: false,
|
||||
},
|
||||
};
|
||||
expect(safelySerializeObjectToIni(objectToSerialize)).toMatchInlineSnapshot(`
|
||||
const objectToSerialize = {
|
||||
root: {
|
||||
anonMode: false,
|
||||
},
|
||||
};
|
||||
expect(safelySerializeObjectToIni(objectToSerialize)).toMatchInlineSnapshot(`
|
||||
"[root]
|
||||
anonMode="false"
|
||||
"
|
||||
`);
|
||||
const result = safelySerializeObjectToIni(objectToSerialize);
|
||||
expect(parse(result)).toMatchInlineSnapshot(`
|
||||
const result = safelySerializeObjectToIni(objectToSerialize);
|
||||
expect(parse(result)).toMatchInlineSnapshot(`
|
||||
{
|
||||
"root": {
|
||||
"anonMode": false,
|
||||
@@ -37,3 +37,33 @@ test('MultiIni can safely serialize an object with a boolean inside', async () =
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
test.skip('Can serialize top-level fields', async () => {
|
||||
const objectToSerialize = {
|
||||
id: 'an-id',
|
||||
message: 'hello-world',
|
||||
number: 1,
|
||||
float: 1.1,
|
||||
flag: true,
|
||||
flag2: false,
|
||||
item: undefined,
|
||||
missing: null,
|
||||
empty: {},
|
||||
};
|
||||
|
||||
const expected = `
|
||||
"id=an-id
|
||||
message=hello-world
|
||||
number=1
|
||||
float=1.1
|
||||
flag="true"
|
||||
flag2="false"
|
||||
[empty]
|
||||
"
|
||||
`
|
||||
.split('\n')
|
||||
.map((line) => line.trim())
|
||||
.join('\n');
|
||||
|
||||
expect(safelySerializeObjectToIni(objectToSerialize)).toMatchInlineSnapshot(expected);
|
||||
});
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import 'reflect-metadata';
|
||||
import { checkMothershipAuthentication } from "@app/graphql/resolvers/query/cloud/check-mothership-authentication";
|
||||
import { expect, test } from "vitest";
|
||||
import packageJson from '@app/../package.json'
|
||||
|
||||
@@ -143,37 +143,57 @@ test('integration test, loading nginx ini and generating all URLs', async () =>
|
||||
},
|
||||
{
|
||||
"ipv4": "https://192-168-1-150.thisisfourtyrandomcharacters012345678900.myunraid.net:4443/",
|
||||
"name": "LAN FQDN",
|
||||
"name": "FQDN LAN",
|
||||
"type": "LAN",
|
||||
},
|
||||
{
|
||||
"ipv4": "https://85-121-123-122.thisisfourtyrandomcharacters012345678900.myunraid.net:8443/",
|
||||
"name": "WAN FQDN",
|
||||
"name": "FQDN WAN",
|
||||
"type": "WAN",
|
||||
},
|
||||
{
|
||||
"ipv4": "https://10-252-0-1.hash.myunraid.net:4443/",
|
||||
"name": "WG FQDN 0",
|
||||
"name": "FQDN WG 0",
|
||||
"type": "WIREGUARD",
|
||||
},
|
||||
{
|
||||
"ipv4": "https://10-252-1-1.hash.myunraid.net:4443/",
|
||||
"name": "WG FQDN 1",
|
||||
"name": "FQDN WG 1",
|
||||
"type": "WIREGUARD",
|
||||
},
|
||||
{
|
||||
"ipv4": "https://10-253-3-1.hash.myunraid.net:4443/",
|
||||
"name": "WG FQDN 3",
|
||||
"name": "FQDN WG 2",
|
||||
"type": "WIREGUARD",
|
||||
},
|
||||
{
|
||||
"ipv4": "https://10-253-4-1.hash.myunraid.net:4443/",
|
||||
"name": "WG FQDN 4",
|
||||
"name": "FQDN WG 3",
|
||||
"type": "WIREGUARD",
|
||||
},
|
||||
{
|
||||
"ipv4": "https://10-253-5-1.hash.myunraid.net:4443/",
|
||||
"name": "WG FQDN 55",
|
||||
"name": "FQDN WG 4",
|
||||
"type": "WIREGUARD",
|
||||
},
|
||||
{
|
||||
"ipv4": "https://10-100-0-1.hash.myunraid.net:4443/",
|
||||
"name": "FQDN TAILSCALE 0",
|
||||
"type": "WIREGUARD",
|
||||
},
|
||||
{
|
||||
"ipv4": "https://10-100-0-2.hash.myunraid.net:4443/",
|
||||
"name": "FQDN TAILSCALE 1",
|
||||
"type": "WIREGUARD",
|
||||
},
|
||||
{
|
||||
"ipv4": "https://10-123-1-2.hash.myunraid.net:4443/",
|
||||
"name": "FQDN CUSTOM 0",
|
||||
"type": "WIREGUARD",
|
||||
},
|
||||
{
|
||||
"ipv4": "https://221-123-121-112.hash.myunraid.net:4443/",
|
||||
"name": "FQDN CUSTOM 1",
|
||||
"type": "WIREGUARD",
|
||||
},
|
||||
]
|
||||
@@ -181,8 +201,6 @@ test('integration test, loading nginx ini and generating all URLs', async () =>
|
||||
expect(urls.errors).toMatchInlineSnapshot(`
|
||||
[
|
||||
[Error: IP URL Resolver: Could not resolve any access URL for field: "lanIp6", is FQDN?: false],
|
||||
[Error: IP URL Resolver: Could not resolve any access URL for field: "lanFqdn6", is FQDN?: true],
|
||||
[Error: No URL Provided],
|
||||
]
|
||||
`);
|
||||
});
|
||||
|
||||
@@ -4,7 +4,6 @@ import * as apiKeyValidator from '@app/mothership/api-key/validate-api-key-with-
|
||||
import { describe, expect, it, vi } from 'vitest';
|
||||
import { type RecursivePartial } from '@app/types/index';
|
||||
import { type RootState } from '@app/store/index';
|
||||
import { logoutUser } from '@app/store/modules/config';
|
||||
|
||||
describe('apiKeyCheckJob Tests', () => {
|
||||
it('API Check Job (with success)', async () => {
|
||||
|
||||
6
api/src/__test__/setup/child-process-easy-to-kill.js
Executable file
6
api/src/__test__/setup/child-process-easy-to-kill.js
Executable file
@@ -0,0 +1,6 @@
|
||||
/* eslint-disable */
|
||||
process.title = 'unraid-api';
|
||||
|
||||
setInterval(() => {
|
||||
console.log('I NEED TO DIE');
|
||||
}, 5_000);
|
||||
10
api/src/__test__/setup/child-process-hard-to-kill.js
Normal file
10
api/src/__test__/setup/child-process-hard-to-kill.js
Normal file
@@ -0,0 +1,10 @@
|
||||
/* eslint-disable */
|
||||
process.title = 'unraid-api';
|
||||
setInterval(() => {
|
||||
console.log('I NEED TO DIE (but i am very hard to kill)');
|
||||
}, 5_000);
|
||||
|
||||
process.on('SIGTERM', () => {
|
||||
// Do nothing
|
||||
console.log('you cant kill me haha');
|
||||
});
|
||||
6
api/src/__test__/setup/env-setup.ts
Normal file
6
api/src/__test__/setup/env-setup.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
import { config } from 'dotenv';
|
||||
config({
|
||||
path: './.env.test',
|
||||
debug: false,
|
||||
encoding: 'utf-8',
|
||||
})
|
||||
@@ -6,7 +6,7 @@ exports[`loads notifications properly 1`] = `
|
||||
"description": "Canceled",
|
||||
"id": "/app/dev/notifications/unread/Unraid_Parity_check_1683971161.notify",
|
||||
"importance": "WARNING",
|
||||
"link": undefined,
|
||||
"link": "/",
|
||||
"subject": "Notice [UNRAID] - Parity check finished (0 errors)",
|
||||
"timestamp": "2023-05-13T09:46:01.000Z",
|
||||
"title": "Unraid Parity check",
|
||||
|
||||
@@ -104,10 +104,76 @@ test('After init returns values from cfg file for all fields', async () => {
|
||||
"certificateName": "*.thisisfourtyrandomcharacters012345678900.myunraid.net",
|
||||
"certificatePath": "/boot/config/ssl/certs/certificate_bundle.pem",
|
||||
"defaultUrl": "https://Tower.local:4443",
|
||||
"fqdnUrls": [
|
||||
{
|
||||
"fqdn": "192-168-1-150.thisisfourtyrandomcharacters012345678900.myunraid.net",
|
||||
"id": null,
|
||||
"interface": "LAN",
|
||||
"isIpv6": false,
|
||||
},
|
||||
{
|
||||
"fqdn": "85-121-123-122.thisisfourtyrandomcharacters012345678900.myunraid.net",
|
||||
"id": null,
|
||||
"interface": "WAN",
|
||||
"isIpv6": false,
|
||||
},
|
||||
{
|
||||
"fqdn": "10-252-0-1.hash.myunraid.net",
|
||||
"id": 0,
|
||||
"interface": "WG",
|
||||
"isIpv6": false,
|
||||
},
|
||||
{
|
||||
"fqdn": "10-252-1-1.hash.myunraid.net",
|
||||
"id": 1,
|
||||
"interface": "WG",
|
||||
"isIpv6": false,
|
||||
},
|
||||
{
|
||||
"fqdn": "10-253-3-1.hash.myunraid.net",
|
||||
"id": 2,
|
||||
"interface": "WG",
|
||||
"isIpv6": false,
|
||||
},
|
||||
{
|
||||
"fqdn": "10-253-4-1.hash.myunraid.net",
|
||||
"id": 3,
|
||||
"interface": "WG",
|
||||
"isIpv6": false,
|
||||
},
|
||||
{
|
||||
"fqdn": "10-253-5-1.hash.myunraid.net",
|
||||
"id": 4,
|
||||
"interface": "WG",
|
||||
"isIpv6": false,
|
||||
},
|
||||
{
|
||||
"fqdn": "10-100-0-1.hash.myunraid.net",
|
||||
"id": 0,
|
||||
"interface": "TAILSCALE",
|
||||
"isIpv6": false,
|
||||
},
|
||||
{
|
||||
"fqdn": "10-100-0-2.hash.myunraid.net",
|
||||
"id": 1,
|
||||
"interface": "TAILSCALE",
|
||||
"isIpv6": false,
|
||||
},
|
||||
{
|
||||
"fqdn": "10-123-1-2.hash.myunraid.net",
|
||||
"id": 0,
|
||||
"interface": "CUSTOM",
|
||||
"isIpv6": false,
|
||||
},
|
||||
{
|
||||
"fqdn": "221-123-121-112.hash.myunraid.net",
|
||||
"id": 1,
|
||||
"interface": "CUSTOM",
|
||||
"isIpv6": true,
|
||||
},
|
||||
],
|
||||
"httpPort": 8080,
|
||||
"httpsPort": 4443,
|
||||
"lanFqdn": "192-168-1-150.thisisfourtyrandomcharacters012345678900.myunraid.net",
|
||||
"lanFqdn6": "",
|
||||
"lanIp": "192.168.1.150",
|
||||
"lanIp6": "",
|
||||
"lanMdns": "Tower.local",
|
||||
@@ -115,31 +181,7 @@ test('After init returns values from cfg file for all fields', async () => {
|
||||
"sslEnabled": true,
|
||||
"sslMode": "yes",
|
||||
"wanAccessEnabled": false,
|
||||
"wanFqdn": "85-121-123-122.thisisfourtyrandomcharacters012345678900.myunraid.net",
|
||||
"wanFqdn6": "",
|
||||
"wanIp": "",
|
||||
"wgFqdns": [
|
||||
{
|
||||
"fqdn": "10-252-0-1.hash.myunraid.net",
|
||||
"id": 0,
|
||||
},
|
||||
{
|
||||
"fqdn": "10-252-1-1.hash.myunraid.net",
|
||||
"id": 1,
|
||||
},
|
||||
{
|
||||
"fqdn": "10-253-3-1.hash.myunraid.net",
|
||||
"id": 3,
|
||||
},
|
||||
{
|
||||
"fqdn": "10-253-4-1.hash.myunraid.net",
|
||||
"id": 4,
|
||||
},
|
||||
{
|
||||
"fqdn": "10-253-5-1.hash.myunraid.net",
|
||||
"id": 55,
|
||||
},
|
||||
],
|
||||
}
|
||||
`);
|
||||
expect(disks).toMatchInlineSnapshot(`
|
||||
|
||||
@@ -25,6 +25,7 @@ test('Returns paths', async () => {
|
||||
"machine-id",
|
||||
"log-base",
|
||||
"var-run",
|
||||
"auth-sessions",
|
||||
]
|
||||
`);
|
||||
});
|
||||
|
||||
@@ -0,0 +1,87 @@
|
||||
// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html
|
||||
|
||||
exports[`Returns parsed state file 1`] = `
|
||||
{
|
||||
"certificateName": "*.thisisfourtyrandomcharacters012345678900.myunraid.net",
|
||||
"certificatePath": "/boot/config/ssl/certs/certificate_bundle.pem",
|
||||
"defaultUrl": "https://Tower.local:4443",
|
||||
"fqdnUrls": [
|
||||
{
|
||||
"fqdn": "192-168-1-150.thisisfourtyrandomcharacters012345678900.myunraid.net",
|
||||
"id": null,
|
||||
"interface": "LAN",
|
||||
"isIpv6": false,
|
||||
},
|
||||
{
|
||||
"fqdn": "85-121-123-122.thisisfourtyrandomcharacters012345678900.myunraid.net",
|
||||
"id": null,
|
||||
"interface": "WAN",
|
||||
"isIpv6": false,
|
||||
},
|
||||
{
|
||||
"fqdn": "10-252-0-1.hash.myunraid.net",
|
||||
"id": 0,
|
||||
"interface": "WG",
|
||||
"isIpv6": false,
|
||||
},
|
||||
{
|
||||
"fqdn": "10-252-1-1.hash.myunraid.net",
|
||||
"id": 1,
|
||||
"interface": "WG",
|
||||
"isIpv6": false,
|
||||
},
|
||||
{
|
||||
"fqdn": "10-253-3-1.hash.myunraid.net",
|
||||
"id": 2,
|
||||
"interface": "WG",
|
||||
"isIpv6": false,
|
||||
},
|
||||
{
|
||||
"fqdn": "10-253-4-1.hash.myunraid.net",
|
||||
"id": 3,
|
||||
"interface": "WG",
|
||||
"isIpv6": false,
|
||||
},
|
||||
{
|
||||
"fqdn": "10-253-5-1.hash.myunraid.net",
|
||||
"id": 4,
|
||||
"interface": "WG",
|
||||
"isIpv6": false,
|
||||
},
|
||||
{
|
||||
"fqdn": "10-100-0-1.hash.myunraid.net",
|
||||
"id": 0,
|
||||
"interface": "TAILSCALE",
|
||||
"isIpv6": false,
|
||||
},
|
||||
{
|
||||
"fqdn": "10-100-0-2.hash.myunraid.net",
|
||||
"id": 1,
|
||||
"interface": "TAILSCALE",
|
||||
"isIpv6": false,
|
||||
},
|
||||
{
|
||||
"fqdn": "10-123-1-2.hash.myunraid.net",
|
||||
"id": 0,
|
||||
"interface": "CUSTOM",
|
||||
"isIpv6": false,
|
||||
},
|
||||
{
|
||||
"fqdn": "221-123-121-112.hash.myunraid.net",
|
||||
"id": 1,
|
||||
"interface": "CUSTOM",
|
||||
"isIpv6": true,
|
||||
},
|
||||
],
|
||||
"httpPort": 8080,
|
||||
"httpsPort": 4443,
|
||||
"lanIp": "192.168.1.150",
|
||||
"lanIp6": "",
|
||||
"lanMdns": "Tower.local",
|
||||
"lanName": "Tower",
|
||||
"sslEnabled": true,
|
||||
"sslMode": "yes",
|
||||
"wanAccessEnabled": false,
|
||||
"wanIp": "",
|
||||
}
|
||||
`;
|
||||
16
api/src/__test__/store/state-parsers/nginx.test.ts
Normal file
16
api/src/__test__/store/state-parsers/nginx.test.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
import { join } from 'path';
|
||||
import { expect, test } from 'vitest';
|
||||
import { store } from '@app/store';
|
||||
import type { NginxIni } from '@app/store/state-parsers/nginx';
|
||||
|
||||
test('Returns parsed state file', async () => {
|
||||
const { parse } = await import('@app/store/state-parsers/nginx');
|
||||
const { parseConfig } = await import('@app/core/utils/misc/parse-config');
|
||||
const { paths } = store.getState();
|
||||
const filePath = join(paths.states, 'nginx.ini');
|
||||
const stateFile = parseConfig<NginxIni>({
|
||||
filePath,
|
||||
type: 'ini',
|
||||
});
|
||||
expect(parse(stateFile)).toMatchSnapshot();
|
||||
});
|
||||
@@ -1,11 +1,16 @@
|
||||
import 'wtfnode';
|
||||
#!/usr/bin/env node
|
||||
import '@app/dotenv';
|
||||
|
||||
import { am } from 'am';
|
||||
import { main } from '@app/cli/index';
|
||||
import { internalLogger } from '@app/core/log';
|
||||
|
||||
void am(main, (error: unknown) => {
|
||||
internalLogger.fatal((error as Error).message);
|
||||
// Ensure process is exited
|
||||
process.exit(1);
|
||||
});
|
||||
try {
|
||||
await main();
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
internalLogger.error({
|
||||
message: 'Failed to start unraid-api',
|
||||
error,
|
||||
});
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import ipRegex from 'ip-regex';
|
||||
import readLine from 'readline';
|
||||
import { setEnv } from '@app/cli/set-env';
|
||||
import { getUnraidApiPid } from '@app/cli/get-unraid-api-pid';
|
||||
import { isUnraidApiRunning } from '@app/core/utils/pm2/unraid-api-running';
|
||||
import { cliLogger } from '@app/core/log';
|
||||
import { getters, store } from '@app/store';
|
||||
import { stdout } from 'process';
|
||||
@@ -13,21 +13,13 @@ import {
|
||||
type getServersQuery,
|
||||
type getCloudQuery,
|
||||
} from '../../graphql/generated/api/operations';
|
||||
import {
|
||||
type ApolloQueryResult,
|
||||
type ApolloClient,
|
||||
type NormalizedCacheObject,
|
||||
} from '@apollo/client/core/core.cjs';
|
||||
import { MinigraphStatus } from '@app/graphql/generated/api/types';
|
||||
import { API_VERSION } from '@app/environment';
|
||||
import { loadStateFiles } from '@app/store/modules/emhttp';
|
||||
import { ApolloClient, ApolloQueryResult, NormalizedCacheObject } from '@apollo/client/core/index.js';
|
||||
|
||||
type CloudQueryResult = NonNullable<
|
||||
ApolloQueryResult<getCloudQuery>['data']['cloud']
|
||||
>;
|
||||
type ServersQueryResultServer = NonNullable<
|
||||
ApolloQueryResult<getServersQuery>['data']['servers']
|
||||
>[0];
|
||||
type CloudQueryResult = NonNullable<ApolloQueryResult<getCloudQuery>['data']['cloud']>;
|
||||
type ServersQueryResultServer = NonNullable<ApolloQueryResult<getServersQuery>['data']['servers']>[0];
|
||||
|
||||
type Verbosity = '' | '-v' | '-vv';
|
||||
|
||||
@@ -132,8 +124,7 @@ export const getServersData = async ({
|
||||
const hashUrlRegex = () => /(.*)([a-z0-9]{40})(.*)/g;
|
||||
|
||||
export const anonymiseOrigins = (origins?: string[]): string[] => {
|
||||
const originsWithoutSocks =
|
||||
origins?.filter((url) => !url.endsWith('.sock')) ?? [];
|
||||
const originsWithoutSocks = origins?.filter((url) => !url.endsWith('.sock')) ?? [];
|
||||
return originsWithoutSocks
|
||||
.map((origin) =>
|
||||
origin
|
||||
@@ -142,29 +133,17 @@ export const anonymiseOrigins = (origins?: string[]): string[] => {
|
||||
// Replace ipv4 address using . separator with "IPV4ADDRESS"
|
||||
.replace(ipRegex(), 'IPV4ADDRESS')
|
||||
// Replace ipv4 address using - separator with "IPV4ADDRESS"
|
||||
.replace(
|
||||
new RegExp(ipRegex().toString().replace('\\.', '-')),
|
||||
'/IPV4ADDRESS'
|
||||
)
|
||||
.replace(new RegExp(ipRegex().toString().replace('\\.', '-')), '/IPV4ADDRESS')
|
||||
// Report WAN port
|
||||
.replace(
|
||||
`:${getters.config().remote.wanport || 443}`,
|
||||
':WANPORT'
|
||||
)
|
||||
.replace(`:${getters.config().remote.wanport || 443}`, ':WANPORT')
|
||||
)
|
||||
.filter(Boolean);
|
||||
};
|
||||
|
||||
const getAllowedOrigins = (
|
||||
cloud: CloudQueryResult | null,
|
||||
v: Verbosity
|
||||
): string[] | null => {
|
||||
const getAllowedOrigins = (cloud: CloudQueryResult | null, v: Verbosity): string[] | null => {
|
||||
switch (v) {
|
||||
case '-vv':
|
||||
return (
|
||||
cloud?.allowedOrigins.filter((url) => !url.endsWith('.sock')) ??
|
||||
[]
|
||||
);
|
||||
return cloud?.allowedOrigins.filter((url) => !url.endsWith('.sock')) ?? [];
|
||||
case '-v':
|
||||
return anonymiseOrigins(cloud?.allowedOrigins ?? []);
|
||||
default:
|
||||
@@ -172,37 +151,23 @@ const getAllowedOrigins = (
|
||||
}
|
||||
};
|
||||
|
||||
const getReadableCloudDetails = (
|
||||
reportObject: ReportObject,
|
||||
v: Verbosity
|
||||
): string => {
|
||||
const error = reportObject.cloud.error
|
||||
? `\n ERROR [${reportObject.cloud.error}]`
|
||||
: '';
|
||||
const status = reportObject.cloud.status
|
||||
? reportObject.cloud.status
|
||||
: 'disconnected';
|
||||
const ip =
|
||||
reportObject.cloud.ip && v !== ''
|
||||
? `\n IP: [${reportObject.cloud.ip}]`
|
||||
: '';
|
||||
const getReadableCloudDetails = (reportObject: ReportObject, v: Verbosity): string => {
|
||||
const error = reportObject.cloud.error ? `\n ERROR [${reportObject.cloud.error}]` : '';
|
||||
const status = reportObject.cloud.status ? reportObject.cloud.status : 'disconnected';
|
||||
const ip = reportObject.cloud.ip && v !== '' ? `\n IP: [${reportObject.cloud.ip}]` : '';
|
||||
return `
|
||||
STATUS: [${status}] ${ip} ${error}`;
|
||||
};
|
||||
|
||||
const getReadableMinigraphDetails = (reportObject: ReportObject): string => {
|
||||
const statusLine = `STATUS: [${reportObject.minigraph.status}]`;
|
||||
const errorLine = reportObject.minigraph.error
|
||||
? ` ERROR: [${reportObject.minigraph.error}]`
|
||||
: null;
|
||||
const errorLine = reportObject.minigraph.error ? ` ERROR: [${reportObject.minigraph.error}]` : null;
|
||||
const timeoutLine = reportObject.minigraph.timeout
|
||||
? ` TIMEOUT: [${(reportObject.minigraph.timeout || 1) / 1_000}s]`
|
||||
: null; // 1 in case of divide by zero
|
||||
|
||||
return `
|
||||
${statusLine}${errorLine ? `\n${errorLine}` : ''}${
|
||||
timeoutLine ? `\n${timeoutLine}` : ''
|
||||
}`;
|
||||
${statusLine}${errorLine ? `\n${errorLine}` : ''}${timeoutLine ? `\n${timeoutLine}` : ''}`;
|
||||
};
|
||||
|
||||
// Convert server to string output
|
||||
@@ -215,10 +180,7 @@ const serverToString = (v: Verbosity) => (server: ServersQueryResultServer) =>
|
||||
: ''
|
||||
}`;
|
||||
|
||||
const getReadableServerDetails = (
|
||||
reportObject: ReportObject,
|
||||
v: Verbosity
|
||||
): string => {
|
||||
const getReadableServerDetails = (reportObject: ReportObject, v: Verbosity): string => {
|
||||
if (!reportObject.servers) {
|
||||
return '';
|
||||
}
|
||||
@@ -236,9 +198,7 @@ const getReadableServerDetails = (
|
||||
return `
|
||||
SERVERS:
|
||||
ONLINE: ${reportObject.servers.online.map(serverToString(v)).join(',')}
|
||||
OFFLINE: ${reportObject.servers.offline
|
||||
.map(serverToString(v))
|
||||
.join(',')}${invalid}`;
|
||||
OFFLINE: ${reportObject.servers.offline.map(serverToString(v)).join(',')}${invalid}`;
|
||||
};
|
||||
|
||||
const getReadableAllowedOrigins = (reportObject: ReportObject): string => {
|
||||
@@ -263,7 +223,6 @@ const getVerbosity = (argv: string[]): Verbosity => {
|
||||
return '';
|
||||
};
|
||||
|
||||
// eslint-disable-next-line complexity
|
||||
export const report = async (...argv: string[]) => {
|
||||
// Check if the user has raw output enabled
|
||||
const rawOutput = argv.includes('--raw');
|
||||
@@ -295,7 +254,7 @@ export const report = async (...argv: string[]) => {
|
||||
const v = getVerbosity(argv);
|
||||
|
||||
// Find all processes called "unraid-api" which aren't this process
|
||||
const unraidApiPid = await getUnraidApiPid();
|
||||
const unraidApiRunning = await isUnraidApiRunning();
|
||||
|
||||
// Load my servers config file into store
|
||||
await store.dispatch(loadConfigFile());
|
||||
@@ -321,43 +280,37 @@ export const report = async (...argv: string[]) => {
|
||||
const reportObject: ReportObject = {
|
||||
os: {
|
||||
serverName: emhttp.var.name,
|
||||
version: emhttp.var.version
|
||||
version: emhttp.var.version,
|
||||
},
|
||||
api: {
|
||||
version: API_VERSION,
|
||||
status: unraidApiPid ? 'running' : 'stopped',
|
||||
environment:
|
||||
process.env.ENVIRONMENT ??
|
||||
'THIS_WILL_BE_REPLACED_WHEN_BUILT',
|
||||
status: unraidApiRunning ? 'running' : 'stopped',
|
||||
environment: process.env.ENVIRONMENT ?? 'THIS_WILL_BE_REPLACED_WHEN_BUILT',
|
||||
nodeVersion: process.version,
|
||||
},
|
||||
apiKey: isApiKeyValid ? 'valid' : cloud?.apiKey.error ?? 'invalid',
|
||||
...(servers ? { servers } : {}),
|
||||
myServers: {
|
||||
status: config?.remote?.username
|
||||
? 'authenticated'
|
||||
: 'signed out',
|
||||
status: config?.remote?.username ? 'authenticated' : 'signed out',
|
||||
...(config?.remote?.username
|
||||
? { myServersUsername: config?.remote?.username?.includes('@') ? 'REDACTED' : config?.remote.username }
|
||||
? {
|
||||
myServersUsername: config?.remote?.username?.includes('@')
|
||||
? 'REDACTED'
|
||||
: config?.remote.username,
|
||||
}
|
||||
: {}),
|
||||
},
|
||||
minigraph: {
|
||||
status: cloud?.minigraphql.status ?? MinigraphStatus.PRE_INIT,
|
||||
timeout: cloud?.minigraphql.timeout ?? null,
|
||||
error:
|
||||
cloud?.minigraphql.error ?? !cloud?.minigraphql.status
|
||||
? 'API Disconnected'
|
||||
: null,
|
||||
cloud?.minigraphql.error ?? !cloud?.minigraphql.status ? 'API Disconnected' : null,
|
||||
},
|
||||
cloud: {
|
||||
status: cloud?.cloud.status ?? 'error',
|
||||
...(cloud?.cloud.error ? { error: cloud.cloud.error } : {}),
|
||||
...(cloud?.cloud.status === 'ok'
|
||||
? { ip: cloud.cloud.ip ?? 'NO_IP' }
|
||||
: {}),
|
||||
...(getAllowedOrigins(cloud, v)
|
||||
? { allowedOrigins: getAllowedOrigins(cloud, v) }
|
||||
: {}),
|
||||
...(cloud?.cloud.status === 'ok' ? { ip: cloud.cloud.ip ?? 'NO_IP' } : {}),
|
||||
...(getAllowedOrigins(cloud, v) ? { allowedOrigins: getAllowedOrigins(cloud, v) } : {}),
|
||||
},
|
||||
};
|
||||
|
||||
@@ -370,8 +323,8 @@ export const report = async (...argv: string[]) => {
|
||||
|
||||
if (jsonReport) {
|
||||
stdout.write(JSON.stringify(reportObject) + '\n');
|
||||
stdoutLogger.close();
|
||||
return reportObject;
|
||||
stdoutLogger.close();
|
||||
return reportObject;
|
||||
} else {
|
||||
// Generate the actual report
|
||||
const report = `
|
||||
@@ -388,9 +341,7 @@ MY_SERVERS: ${reportObject.myServers.status}${
|
||||
: ''
|
||||
}
|
||||
CLOUD: ${getReadableCloudDetails(reportObject, v)}
|
||||
MINI-GRAPH: ${getReadableMinigraphDetails(
|
||||
reportObject
|
||||
)}${getReadableServerDetails(
|
||||
MINI-GRAPH: ${getReadableMinigraphDetails(reportObject)}${getReadableServerDetails(
|
||||
reportObject,
|
||||
v
|
||||
)}${getReadableAllowedOrigins(reportObject)}
|
||||
@@ -405,9 +356,7 @@ MINI-GRAPH: ${getReadableMinigraphDetails(
|
||||
console.log({ error });
|
||||
if (error instanceof Error) {
|
||||
cliLogger.trace(error);
|
||||
stdoutLogger.write(
|
||||
`\nFailed generating report with "${error.message}"\n`
|
||||
);
|
||||
stdoutLogger.write(`\nFailed generating report with "${error.message}"\n`);
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,88 +1,16 @@
|
||||
import { spawn } from 'child_process';
|
||||
import { addExitCallback } from 'catch-exit';
|
||||
import { PM2_PATH } from '@app/consts';
|
||||
import { cliLogger } from '@app/core/log';
|
||||
import { mainOptions } from '@app/cli/options';
|
||||
import { logToSyslog } from '@app/cli/log-to-syslog';
|
||||
import { getters } from '@app/store';
|
||||
import { getAllUnraidApiPids } from '@app/cli/get-unraid-api-pid';
|
||||
import { API_VERSION } from '@app/environment';
|
||||
|
||||
import { execSync } from 'child_process';
|
||||
import { join } from 'node:path';
|
||||
/**
|
||||
* Start a new API process.
|
||||
*/
|
||||
export const start = async () => {
|
||||
// Set process title
|
||||
cliLogger.info('Starting unraid-api with command', `${PM2_PATH} start ${join(import.meta.dirname, 'ecosystem.config.json')} --update-env`);
|
||||
|
||||
process.title = 'unraid-api';
|
||||
const runningProcesses = await getAllUnraidApiPids();
|
||||
if (runningProcesses.length > 0) {
|
||||
cliLogger.info('unraid-api is Already Running!');
|
||||
cliLogger.info('Run "unraid-api restart" to stop all running processes and restart');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Start API
|
||||
cliLogger.info('Starting unraid-api@v%s', API_VERSION);
|
||||
|
||||
// If we're in debug mode or we're NOT
|
||||
// in debug but ARE in the child process
|
||||
if (mainOptions.debug || process.env._DAEMONIZE_PROCESS) {
|
||||
// Log when the API exits
|
||||
addExitCallback((signal, exitCode, error) => {
|
||||
if (exitCode === 0 || exitCode === 130 || signal === 'SIGTERM') {
|
||||
logToSyslog('👋 Farewell. UNRAID API shutting down!');
|
||||
return;
|
||||
}
|
||||
|
||||
// Log when the API crashes
|
||||
if (signal === 'uncaughtException' && error) {
|
||||
logToSyslog(`⚠️ Caught exception: ${error.message}`);
|
||||
}
|
||||
|
||||
// Log when we crash
|
||||
if (exitCode) {
|
||||
logToSyslog(`⚠️ UNRAID API crashed with exit code ${exitCode}`);
|
||||
return;
|
||||
}
|
||||
|
||||
logToSyslog('🛑 UNRAID API crashed without an exit code?');
|
||||
});
|
||||
|
||||
logToSyslog('✔️ UNRAID API started successfully!');
|
||||
}
|
||||
|
||||
// Load bundled index file
|
||||
// eslint-disable-next-line @typescript-eslint/no-require-imports
|
||||
require('../../index');
|
||||
|
||||
if (!mainOptions.debug) {
|
||||
if ('_DAEMONIZE_PROCESS' in process.env) {
|
||||
// In the child, clean up the tracking environment variable
|
||||
delete process.env._DAEMONIZE_PROCESS;
|
||||
} else {
|
||||
cliLogger.debug('Daemonizing process. %s %o', process.execPath, process.argv);
|
||||
|
||||
// Spawn child
|
||||
// First arg is path (inside PKG), second arg is restart, stop, etc, rest is args to main argument
|
||||
const [path, , ...rest] = process.argv.slice(1);
|
||||
const replacedCommand = [path, 'start', ...rest];
|
||||
const child = spawn(process.execPath, replacedCommand, {
|
||||
// In the parent set the tracking environment variable
|
||||
env: Object.assign(process.env, { _DAEMONIZE_PROCESS: '1' }),
|
||||
// The process MUST have it's cwd set to the
|
||||
// path where it resides within the Nexe VFS
|
||||
cwd: getters.paths()['unraid-api-base'],
|
||||
stdio: 'ignore',
|
||||
detached: true,
|
||||
});
|
||||
|
||||
// Convert process into daemon
|
||||
child.unref();
|
||||
|
||||
cliLogger.debug('Daemonized successfully!');
|
||||
|
||||
// Exit cleanly
|
||||
process.exit(0);
|
||||
}
|
||||
}
|
||||
execSync(`${PM2_PATH} start ${join(import.meta.dirname, '../../', 'ecosystem.config.json')} --update-env`, {
|
||||
env: process.env,
|
||||
stdio: 'inherit',
|
||||
cwd: process.cwd()
|
||||
});
|
||||
};
|
||||
|
||||
@@ -1,19 +1,7 @@
|
||||
import prettyMs from 'pretty-ms';
|
||||
import pidUsage from 'pidusage';
|
||||
import { cliLogger } from '@app/core/log';
|
||||
import { getUnraidApiPid } from '@app/cli/get-unraid-api-pid';
|
||||
import { setEnv } from '@app/cli/set-env';
|
||||
import { PM2_PATH } from '@app/consts';
|
||||
import { execSync } from 'child_process';
|
||||
|
||||
export const status = async () => {
|
||||
setEnv('LOG_TYPE', 'raw');
|
||||
|
||||
// Find all processes called "unraid-api" which aren't this process
|
||||
const unraidApiPid = await getUnraidApiPid();
|
||||
if (!unraidApiPid) {
|
||||
cliLogger.info('Found no running processes.');
|
||||
return;
|
||||
}
|
||||
|
||||
const stats = await pidUsage(unraidApiPid);
|
||||
cliLogger.info(`API has been running for ${prettyMs(stats.elapsed)} and is in "${process.env.ENVIRONMENT ?? 'ERR: Unknown Environment'}" mode!`);
|
||||
execSync(`${PM2_PATH} status unraid-api`, { stdio: 'inherit' });
|
||||
process.exit(0);
|
||||
};
|
||||
|
||||
@@ -1,44 +1,6 @@
|
||||
import { cliLogger } from '@app/core/log';
|
||||
import { getAllUnraidApiPids } from '@app/cli/get-unraid-api-pid';
|
||||
import { sleep } from '@app/core/utils/misc/sleep';
|
||||
import pRetry from 'p-retry';
|
||||
|
||||
/**
|
||||
* Stop a running API process.
|
||||
*/
|
||||
import { PM2_PATH } from '@app/consts';
|
||||
import { execSync } from 'child_process';
|
||||
|
||||
export const stop = async () => {
|
||||
try {
|
||||
await pRetry(async (attempts) => {
|
||||
const runningApis = await getAllUnraidApiPids();
|
||||
|
||||
if (runningApis.length > 0) {
|
||||
cliLogger.info('Stopping %s unraid-api process(es)...', runningApis.length);
|
||||
runningApis.forEach(pid => process.kill(pid, 'SIGTERM'));
|
||||
await sleep(50);
|
||||
const newPids = await getAllUnraidApiPids();
|
||||
|
||||
if (newPids.length > 0) {
|
||||
throw new Error('Not all processes have exited yet');
|
||||
}
|
||||
} else if (attempts < 1) {
|
||||
cliLogger.info('Found no running processes.');
|
||||
}
|
||||
|
||||
return true;
|
||||
}, {
|
||||
retries: 2,
|
||||
minTimeout: 1_000,
|
||||
factor: 1,
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
cliLogger.info('Process did not exit cleanly, forcing shutdown', error);
|
||||
const processes = await getAllUnraidApiPids();
|
||||
for (const pid of processes) {
|
||||
process.kill(pid, 'SIGKILL');
|
||||
await sleep(100);
|
||||
}
|
||||
}
|
||||
|
||||
await sleep(500);
|
||||
execSync(`${PM2_PATH} stop unraid-api`, { stdio: 'inherit' });
|
||||
};
|
||||
|
||||
@@ -1,27 +1,17 @@
|
||||
import { copyFile, readFile, writeFile } from 'fs/promises';
|
||||
import { join } from 'path';
|
||||
import { cliLogger } from '@app/core/log';
|
||||
import { getUnraidApiPid } from '@app/cli/get-unraid-api-pid';
|
||||
import { setEnv } from '@app/cli/set-env';
|
||||
import { getters } from '@app/store';
|
||||
import { start } from '@app/cli/commands/start';
|
||||
import { stop } from '@app/cli/commands/stop';
|
||||
|
||||
export const switchEnv = async () => {
|
||||
setEnv('LOG_TYPE', 'raw');
|
||||
|
||||
const paths = getters.paths();
|
||||
const basePath = paths['unraid-api-base'];
|
||||
const envFlashFilePath = paths['myservers-env'];
|
||||
const envFile = await readFile(envFlashFilePath, 'utf-8').catch(() => '');
|
||||
|
||||
let shouldStartAfterRunning = false;
|
||||
if (await getUnraidApiPid()) {
|
||||
cliLogger.info('unraid-api is running, stopping...');
|
||||
// Stop Running Process
|
||||
await stop();
|
||||
shouldStartAfterRunning = true;
|
||||
}
|
||||
await stop();
|
||||
|
||||
cliLogger.debug(
|
||||
'Checking %s for current ENV, found %s',
|
||||
@@ -70,11 +60,5 @@ export const switchEnv = async () => {
|
||||
await copyFile(source, destination);
|
||||
|
||||
cliLogger.info('Now using %s', newEnv);
|
||||
if (shouldStartAfterRunning) {
|
||||
cliLogger.debug('Restarting unraid-api');
|
||||
// Start Process
|
||||
await start();
|
||||
} else {
|
||||
cliLogger.info('Run "unraid-api start" to start the API.');
|
||||
}
|
||||
await start();
|
||||
};
|
||||
|
||||
43
api/src/cli/index.ts
Normal file → Executable file
43
api/src/cli/index.ts
Normal file → Executable file
@@ -2,37 +2,18 @@ import { parse } from 'ts-command-line-args';
|
||||
import { cliLogger } from '@app/core/log';
|
||||
import { type Flags, mainOptions, options, args } from '@app/cli/options';
|
||||
import { setEnv } from '@app/cli/set-env';
|
||||
import { env } from '@app/dotenv';
|
||||
import { getters } from '@app/store';
|
||||
import { execSync } from 'child_process';
|
||||
import { PM2_PATH } from '@app/consts';
|
||||
import * as ENVIRONMENT from '@app/environment';
|
||||
|
||||
const command = mainOptions.command as unknown as string;
|
||||
|
||||
export const main = async (...argv: string[]) => {
|
||||
cliLogger.debug(env, 'Loading env file');
|
||||
|
||||
// Set envs
|
||||
setEnv('LOG_TYPE', 'pretty');
|
||||
cliLogger.debug({ paths: getters.paths() }, 'Starting CLI');
|
||||
cliLogger.debug({ paths: getters.paths(), environment: ENVIRONMENT }, 'Starting CLI');
|
||||
|
||||
setEnv('DEBUG', mainOptions.debug ?? false);
|
||||
setEnv('ENVIRONMENT', process.env.ENVIRONMENT ?? 'production');
|
||||
setEnv('PORT', process.env.PORT ?? mainOptions.port ?? '9000');
|
||||
setEnv(
|
||||
'LOG_LEVEL',
|
||||
process.env.LOG_LEVEL ?? mainOptions['log-level'] ?? 'INFO'
|
||||
);
|
||||
if (!process.env.LOG_TRANSPORT) {
|
||||
if (process.env.ENVIRONMENT === 'production' && !mainOptions.debug) {
|
||||
setEnv('LOG_TRANSPORT', 'file');
|
||||
setEnv('LOG_LEVEL', 'INFO');
|
||||
} else if (!mainOptions.debug) {
|
||||
// Staging Environment, backgrounded plugin
|
||||
setEnv('LOG_TRANSPORT', 'file');
|
||||
setEnv('LOG_LEVEL', 'TRACE');
|
||||
} else {
|
||||
cliLogger.debug('In Debug Mode - Log Level Defaulting to: stdout');
|
||||
}
|
||||
}
|
||||
|
||||
if (!command) {
|
||||
// Run help command
|
||||
@@ -49,15 +30,12 @@ export const main = async (...argv: string[]) => {
|
||||
start: import('@app/cli/commands/start').then((pkg) => pkg.start),
|
||||
stop: import('@app/cli/commands/stop').then((pkg) => pkg.stop),
|
||||
restart: import('@app/cli/commands/restart').then((pkg) => pkg.restart),
|
||||
'switch-env': import('@app/cli/commands/switch-env').then(
|
||||
(pkg) => pkg.switchEnv
|
||||
),
|
||||
logs: async () => execSync(`${PM2_PATH} logs unraid-api --lines 200`, { stdio: 'inherit' }),
|
||||
'switch-env': import('@app/cli/commands/switch-env').then((pkg) => pkg.switchEnv),
|
||||
version: import('@app/cli/commands/version').then((pkg) => pkg.version),
|
||||
status: import('@app/cli/commands/status').then((pkg) => pkg.status),
|
||||
report: import('@app/cli/commands/report').then((pkg) => pkg.report),
|
||||
'validate-token': import('@app/cli/commands/validate-token').then(
|
||||
(pkg) => pkg.validateToken
|
||||
),
|
||||
'validate-token': import('@app/cli/commands/validate-token').then((pkg) => pkg.validateToken),
|
||||
};
|
||||
|
||||
// Unknown command
|
||||
@@ -71,10 +49,5 @@ export const main = async (...argv: string[]) => {
|
||||
// Run the command
|
||||
await commandMethod(...argv);
|
||||
|
||||
// Allow the process to exit
|
||||
// Don't exit when we start though
|
||||
if (!['start', 'restart'].includes(command)) {
|
||||
// Ensure process is exited
|
||||
process.exit(0);
|
||||
}
|
||||
process.exit(0);
|
||||
};
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { getters, type RootState, store } from '@app/store';
|
||||
import { uniq } from 'lodash';
|
||||
import uniq from 'lodash/uniq';
|
||||
import {
|
||||
getServerIps,
|
||||
getUrlForField,
|
||||
@@ -68,7 +68,7 @@ const getRemoteAccessUrlsForAllowedOrigins = (
|
||||
return [];
|
||||
};
|
||||
|
||||
const getExtraOrigins = (): string[] => {
|
||||
export const getExtraOrigins = (): string[] => {
|
||||
const { extraOrigins } = getters.config().api;
|
||||
if (extraOrigins) {
|
||||
return extraOrigins
|
||||
|
||||
@@ -1,152 +0,0 @@
|
||||
import { ConnectListAllDomainsFlags } from '@vmngr/libvirt';
|
||||
import { getHypervisor } from '@app/core/utils/vms/get-hypervisor';
|
||||
import { getUnraidVersion } from '@app/common/dashboard/get-unraid-version';
|
||||
import { getArray } from '@app/common/dashboard/get-array';
|
||||
import { bootTimestamp } from '@app/common/dashboard/boot-timestamp';
|
||||
import { dashboardLogger } from '@app/core/log';
|
||||
import { getters, store } from '@app/store';
|
||||
import {
|
||||
type DashboardServiceInput,
|
||||
type DashboardInput,
|
||||
} from '@app/graphql/generated/client/graphql';
|
||||
import { API_VERSION } from '@app/environment';
|
||||
import { DynamicRemoteAccessType } from '@app/remoteAccess/types';
|
||||
import { DashboardInputSchema } from '@app/graphql/generated/client/validators';
|
||||
import { ZodError } from 'zod';
|
||||
|
||||
const getVmSummary = async (): Promise<DashboardInput['vms']> => {
|
||||
try {
|
||||
const hypervisor = await getHypervisor();
|
||||
if (!hypervisor) {
|
||||
return {
|
||||
installed: 0,
|
||||
started: 0,
|
||||
};
|
||||
}
|
||||
|
||||
const activeDomains = (await hypervisor.connectListAllDomains(
|
||||
ConnectListAllDomainsFlags.ACTIVE
|
||||
)) as unknown[];
|
||||
const inactiveDomains = (await hypervisor.connectListAllDomains(
|
||||
ConnectListAllDomainsFlags.INACTIVE
|
||||
)) as unknown[];
|
||||
return {
|
||||
installed: activeDomains.length + inactiveDomains.length,
|
||||
started: activeDomains.length,
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
installed: 0,
|
||||
started: 0,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
const getDynamicRemoteAccessService = (): DashboardServiceInput | null => {
|
||||
const { config, dynamicRemoteAccess } = store.getState();
|
||||
const enabledStatus = config.remote.dynamicRemoteAccessType;
|
||||
|
||||
return {
|
||||
name: 'dynamic-remote-access',
|
||||
online: enabledStatus !== DynamicRemoteAccessType.DISABLED,
|
||||
version: dynamicRemoteAccess.runningType,
|
||||
uptime: {
|
||||
timestamp: bootTimestamp.toISOString(),
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
const services = (): DashboardInput['services'] => {
|
||||
const dynamicRemoteAccess = getDynamicRemoteAccessService();
|
||||
return [
|
||||
{
|
||||
name: 'unraid-api',
|
||||
online: true,
|
||||
uptime: {
|
||||
timestamp: bootTimestamp.toISOString(),
|
||||
},
|
||||
version: API_VERSION,
|
||||
},
|
||||
...(dynamicRemoteAccess ? [dynamicRemoteAccess] : []),
|
||||
];
|
||||
};
|
||||
|
||||
const getData = async (): Promise<DashboardInput> => {
|
||||
const emhttp = getters.emhttp();
|
||||
const docker = getters.docker();
|
||||
|
||||
return {
|
||||
vars: {
|
||||
regState: emhttp.var.regState,
|
||||
regTy: emhttp.var.regTy,
|
||||
flashGuid: emhttp.var.flashGuid,
|
||||
serverName: emhttp.var.name,
|
||||
serverDescription: emhttp.var.comment,
|
||||
},
|
||||
apps: {
|
||||
installed: docker.installed ?? 0,
|
||||
started: docker.running ?? 0,
|
||||
},
|
||||
versions: {
|
||||
unraid: await getUnraidVersion(),
|
||||
},
|
||||
os: {
|
||||
hostname: emhttp.var.name,
|
||||
uptime: bootTimestamp.toISOString(),
|
||||
},
|
||||
vms: await getVmSummary(),
|
||||
array: getArray(),
|
||||
services: services(),
|
||||
display: {
|
||||
case: {
|
||||
url: '',
|
||||
icon: '',
|
||||
error: '',
|
||||
base64: '',
|
||||
},
|
||||
},
|
||||
config: emhttp.var.configValid
|
||||
? { valid: true }
|
||||
: {
|
||||
valid: false,
|
||||
error:
|
||||
{
|
||||
error: 'UNKNOWN_ERROR',
|
||||
invalid: 'INVALID',
|
||||
nokeyserver: 'NO_KEY_SERVER',
|
||||
withdrawn: 'WITHDRAWN',
|
||||
}[emhttp.var.configState] ?? 'UNKNOWN_ERROR',
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
export const generateData = async (): Promise<DashboardInput | null> => {
|
||||
const data = await getData();
|
||||
|
||||
try {
|
||||
// Validate generated data
|
||||
// @TODO: Fix this runtype to use generated types from the Zod validators (as seen in mothership Codegen)
|
||||
const result = DashboardInputSchema().parse(data);
|
||||
|
||||
return result;
|
||||
} catch (error: unknown) {
|
||||
// Log error for user
|
||||
if (error instanceof ZodError) {
|
||||
dashboardLogger.error(
|
||||
'Failed validation with issues: ',
|
||||
error.issues.map((issue) => ({
|
||||
message: issue.message,
|
||||
path: issue.path.join(','),
|
||||
}))
|
||||
);
|
||||
} else {
|
||||
dashboardLogger.error(
|
||||
'Failed validating dashboard object: ',
|
||||
error,
|
||||
data
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
};
|
||||
@@ -1,5 +1,6 @@
|
||||
import { PORT } from '@app/environment';
|
||||
import { type JSONWebKeySet } from 'jose';
|
||||
import { join } from 'path';
|
||||
|
||||
export const getInternalApiAddress = (isHttp = true, nginxPort = 80) => {
|
||||
const envPort = PORT;
|
||||
@@ -46,11 +47,6 @@ export const KEEP_ALIVE_INTERVAL_MS = THREE_MINUTES_MS; // This is set to 45 sec
|
||||
/**
|
||||
* Graphql link.
|
||||
*/
|
||||
export const MOTHERSHIP_GRAPHQL_LINK =
|
||||
process.env.MOTHERSHIP_GRAPHQL_LINK ??
|
||||
(process.env.ENVIRONMENT === 'staging'
|
||||
? 'https://staging.mothership.unraid.net/ws'
|
||||
: 'https://mothership.unraid.net/ws');
|
||||
|
||||
export const JWKS_LOCAL_PAYLOAD: JSONWebKeySet = {
|
||||
keys: [
|
||||
@@ -84,3 +80,5 @@ export const KEYSERVER_VALIDATION_ENDPOINT =
|
||||
|
||||
/** Set the max retries for the GraphQL Client */
|
||||
export const MAX_RETRIES_FOR_LINEAR_BACKOFF = 100;
|
||||
|
||||
export const PM2_PATH = join(import.meta.dirname, '../../', 'node_modules', '.bin', 'pm2');
|
||||
@@ -1,6 +0,0 @@
|
||||
import NanoBus from 'nanobus';
|
||||
|
||||
/**
|
||||
* Graphql event bus.
|
||||
*/
|
||||
export const bus = new NanoBus();
|
||||
@@ -4,7 +4,7 @@ import { AppError } from '@app/core/errors/app-error';
|
||||
* API key error.
|
||||
*/
|
||||
export class ApiKeyError extends AppError {
|
||||
// eslint-disable-next-line @typescript-eslint/no-useless-constructor
|
||||
|
||||
constructor(message: string) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
export * as modules from '@app/core/modules';
|
||||
export * as notifiers from '@app/core/notifiers';
|
||||
export * as utils from '@app/core/utils';
|
||||
export * from '@app/core/bus';
|
||||
export * from '@app/core/log';
|
||||
export * from '@app/core/permission-manager';
|
||||
export * from '@app/core/permissions';
|
||||
|
||||
@@ -1,34 +1,7 @@
|
||||
import { pino } from 'pino';
|
||||
import { LOG_TRANSPORT, LOG_TYPE } from '@app/environment';
|
||||
import { LOG_TYPE } from '@app/environment';
|
||||
|
||||
import pretty from 'pino-pretty';
|
||||
import { chmodSync, existsSync, mkdirSync, rmSync, statSync } from 'node:fs';
|
||||
import { getters } from '@app/store/index';
|
||||
import { join } from 'node:path';
|
||||
|
||||
const makeLoggingDirectoryIfNotExists = () => {
|
||||
if (!existsSync(getters.paths()['log-base'])) {
|
||||
console.log('Creating logging directory');
|
||||
mkdirSync(getters.paths()['log-base']);
|
||||
}
|
||||
|
||||
chmodSync(getters.paths()['log-base'], 0o644);
|
||||
if (
|
||||
existsSync(`${getters.paths()['log-base']}/stdout.log`) &&
|
||||
statSync(`${getters.paths()['log-base']}/stdout.log`).size > 5_000_000
|
||||
) {
|
||||
rmSync(`${getters.paths()['log-base']}/stdout.log`);
|
||||
}
|
||||
try {
|
||||
rmSync(`${getters.paths()['log-base']}/stdout.log.*`);
|
||||
} catch (e) {
|
||||
// Ignore Error
|
||||
}
|
||||
};
|
||||
|
||||
if (LOG_TRANSPORT === 'file') {
|
||||
makeLoggingDirectoryIfNotExists();
|
||||
}
|
||||
|
||||
export const levels = [
|
||||
'trace',
|
||||
@@ -47,12 +20,8 @@ const level =
|
||||
] ?? 'info';
|
||||
|
||||
export const logDestination = pino.destination({
|
||||
dest:
|
||||
LOG_TRANSPORT === 'file'
|
||||
? join(getters.paths()['log-base'], 'stdout.log')
|
||||
: 1,
|
||||
minLength: 1_024,
|
||||
sync: false,
|
||||
sync: true,
|
||||
});
|
||||
|
||||
const stream =
|
||||
@@ -112,30 +81,3 @@ export const loggers = [
|
||||
remoteQueryLogger,
|
||||
apiLogger,
|
||||
];
|
||||
|
||||
// Send SIGUSR1 to increase log level
|
||||
process.on('SIGUSR1', () => {
|
||||
const level = logger.level;
|
||||
const nextLevel =
|
||||
levels[levels.findIndex((_level) => _level === level) + 1] ?? levels[0];
|
||||
loggers.forEach((logger) => {
|
||||
logger.level = nextLevel;
|
||||
});
|
||||
internalLogger.info({
|
||||
message: `Log level changed from ${level} to ${nextLevel}`,
|
||||
});
|
||||
});
|
||||
|
||||
// Send SIGUSR1 to decrease log level
|
||||
process.on('SIGUSR2', () => {
|
||||
const level = logger.level;
|
||||
const nextLevel =
|
||||
levels[levels.findIndex((_level) => _level === level) - 1] ??
|
||||
levels[levels.length - 1];
|
||||
loggers.forEach((logger) => {
|
||||
logger.level = nextLevel;
|
||||
});
|
||||
internalLogger.info({
|
||||
message: `Log level changed from ${level} to ${nextLevel}`,
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import type { CoreContext, CoreResult } from '@app/core/types';
|
||||
import { bus } from '@app/core/bus';
|
||||
import { AppError } from '@app/core/errors/app-error';
|
||||
import { ensurePermission } from '@app/core/utils/permissions/ensure-permission';
|
||||
import { hasFields } from '@app/core/utils/validation/has-fields';
|
||||
import { FieldMissingError } from '@app/core/errors/field-missing-error';
|
||||
import { emcmd } from '@app/core/utils/clients/emcmd';
|
||||
import { getters } from '@app/store';
|
||||
import { pubsub } from '@app/core/pubsub';
|
||||
|
||||
interface Context extends CoreContext {
|
||||
readonly data: {
|
||||
@@ -61,7 +61,7 @@ export const addUser = async (context: Context): Promise<CoreResult> => {
|
||||
}
|
||||
|
||||
// Update users channel with new user
|
||||
bus.emit('users', {
|
||||
pubsub.publish('users', {
|
||||
users: {
|
||||
mutation: 'CREATED',
|
||||
node: [user],
|
||||
@@ -69,7 +69,7 @@ export const addUser = async (context: Context): Promise<CoreResult> => {
|
||||
});
|
||||
|
||||
// Update user channel with new user
|
||||
bus.emit('user', {
|
||||
pubsub.publish('user', {
|
||||
user: {
|
||||
mutation: 'CREATED',
|
||||
node: user,
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { getServerIdentifier } from '@app/core/utils/server-identifier';
|
||||
import {
|
||||
ArrayDiskType,
|
||||
type ArrayCapacity,
|
||||
@@ -56,6 +57,7 @@ export const getArrayData = (getState = store.getState): ArrayType => {
|
||||
};
|
||||
|
||||
return {
|
||||
id: getServerIdentifier('array'),
|
||||
state: emhttp.var.mdState,
|
||||
capacity,
|
||||
boot,
|
||||
|
||||
@@ -1,30 +0,0 @@
|
||||
import type { CoreResult, CoreContext } from '@app/core/types';
|
||||
import { ensurePermission } from '@app/core/utils/permissions/ensure-permission';
|
||||
import { getShares } from '@app/core/utils/shares/get-shares';
|
||||
|
||||
/**
|
||||
* Get all shares.
|
||||
*/
|
||||
export const getAllShares = async (context: CoreContext): Promise<CoreResult> => {
|
||||
const { user } = context;
|
||||
|
||||
// Check permissions
|
||||
ensurePermission(user, {
|
||||
resource: 'share',
|
||||
action: 'read',
|
||||
possession: 'any',
|
||||
});
|
||||
|
||||
const userShares = getShares('users');
|
||||
const diskShares = getShares('disks');
|
||||
|
||||
const shares = [
|
||||
...userShares,
|
||||
...diskShares,
|
||||
];
|
||||
|
||||
return {
|
||||
text: `Shares: ${JSON.stringify(shares, null, 2)}`,
|
||||
json: shares,
|
||||
};
|
||||
};
|
||||
@@ -4,7 +4,6 @@ import {
|
||||
blockDevices,
|
||||
diskLayout,
|
||||
} from 'systeminformation';
|
||||
import { map as asyncMap } from 'p-iteration';
|
||||
import {
|
||||
type Disk,
|
||||
DiskInterfaceType,
|
||||
@@ -91,8 +90,9 @@ export const getDisks = async (
|
||||
const partitions = await blockDevices().then((devices) =>
|
||||
devices.filter((device) => device.type === 'part')
|
||||
);
|
||||
const disks = await asyncMap(await diskLayout(), async (disk) =>
|
||||
parseDisk(disk, partitions)
|
||||
const diskLayoutData = await diskLayout();
|
||||
const disks = await Promise.all(
|
||||
diskLayoutData.map((disk) => parseDisk(disk, partitions))
|
||||
);
|
||||
|
||||
return disks;
|
||||
|
||||
@@ -30,7 +30,7 @@ export const getPermissions = async function (context: CoreContext): Promise<Cor
|
||||
const grants = Object.entries(ac.getGrants())
|
||||
.map(([name, grant]) => {
|
||||
// @ts-expect-error - $extend and grants are any
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
|
||||
const { $extend: _, ...grants } = grant;
|
||||
return [name, grants];
|
||||
})
|
||||
|
||||
@@ -4,27 +4,22 @@ import type { CoreResult, CoreContext } from '@app/core/types';
|
||||
import { getUnraidApiService } from '@app/core/modules/services/get-unraid-api';
|
||||
import { NODE_ENV } from '@app/environment';
|
||||
|
||||
const devNames = [
|
||||
'emhttpd',
|
||||
'rest-api',
|
||||
];
|
||||
const devNames = ['emhttpd', 'rest-api'];
|
||||
|
||||
const coreNames = [
|
||||
'unraid-api',
|
||||
];
|
||||
const coreNames = ['unraid-api'];
|
||||
|
||||
interface Service {
|
||||
online: boolean;
|
||||
uptime: string;
|
||||
version: string;
|
||||
online: boolean;
|
||||
uptime: string;
|
||||
version: string;
|
||||
}
|
||||
|
||||
interface ServiceResult extends CoreResult {
|
||||
json: Service;
|
||||
json: Service;
|
||||
}
|
||||
|
||||
interface ServiceWithName extends Service {
|
||||
name: string;
|
||||
name: string;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -33,39 +28,40 @@ interface ServiceWithName extends Service {
|
||||
* @param services
|
||||
* @param names
|
||||
*/
|
||||
const addNameToService = (services: ServiceResult[], names: string[]): ServiceWithName[] => services.map((service, index) => ({
|
||||
name: names[index],
|
||||
...service.json,
|
||||
}));
|
||||
const addNameToService = (services: ServiceResult[], names: string[]): ServiceWithName[] =>
|
||||
services.map((service, index) => ({
|
||||
name: names[index],
|
||||
...service.json,
|
||||
}));
|
||||
|
||||
interface Result extends CoreResult {
|
||||
json: ServiceWithName[];
|
||||
json: ServiceWithName[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all services.
|
||||
*/
|
||||
export const getServices = async (context: CoreContext): Promise<Result> => {
|
||||
const logErrorAndReturnEmptyArray = (error: Error) => {
|
||||
logger.error(error);
|
||||
return [];
|
||||
};
|
||||
const logErrorAndReturnEmptyArray = (error: Error) => {
|
||||
logger.error(error);
|
||||
return [];
|
||||
};
|
||||
|
||||
const devServices: ServiceResult[] = NODE_ENV === 'development' ? await Promise.all([
|
||||
getEmhttpdService(context),
|
||||
]).catch(logErrorAndReturnEmptyArray) as ServiceResult[] : [];
|
||||
const devServices: ServiceResult[] = (await Promise.all([getEmhttpdService(context)]).catch(
|
||||
logErrorAndReturnEmptyArray
|
||||
)) as ServiceResult[];
|
||||
|
||||
const coreServices: ServiceResult[] = await Promise.all([
|
||||
getUnraidApiService(context),
|
||||
]).catch(logErrorAndReturnEmptyArray) as ServiceResult[];
|
||||
const coreServices: ServiceResult[] = (await Promise.all([getUnraidApiService(context)]).catch(
|
||||
logErrorAndReturnEmptyArray
|
||||
)) as ServiceResult[];
|
||||
|
||||
const result = [
|
||||
...addNameToService(devServices, devNames),
|
||||
...addNameToService(coreServices, coreNames),
|
||||
];
|
||||
const result = [
|
||||
...addNameToService(devServices, devNames),
|
||||
...addNameToService(coreServices, coreNames),
|
||||
];
|
||||
|
||||
return {
|
||||
text: `Services: ${JSON.stringify(result, null, 2)}`,
|
||||
json: result,
|
||||
};
|
||||
return {
|
||||
text: `Services: ${JSON.stringify(result, null, 2)}`,
|
||||
json: result,
|
||||
};
|
||||
};
|
||||
|
||||
@@ -11,7 +11,6 @@ export * from './users';
|
||||
export * from './vms';
|
||||
export * from './add-share';
|
||||
export * from './add-user';
|
||||
export * from './get-all-shares';
|
||||
export * from './get-apps';
|
||||
export * from './get-devices';
|
||||
export * from './get-disks';
|
||||
|
||||
@@ -78,7 +78,7 @@ export const updateDisk = async (context: Context): Promise<Result> => {
|
||||
7: '7 hours',
|
||||
8: '8 hours',
|
||||
9: '9 hours',
|
||||
/* eslint-enable @typescript-eslint/naming-convention */
|
||||
|
||||
});
|
||||
|
||||
// Defines the type of partition layout to create when formatting hard drives 2TB in size and smaller **only**. (All devices larger then 2TB are always set up with GPT partition tables.)
|
||||
@@ -89,7 +89,7 @@ export const updateDisk = async (context: Context): Promise<Result> => {
|
||||
|
||||
1: 'MBR: unaligned',
|
||||
2: 'MBR: 4K-aligned',
|
||||
/* eslint-enable @typescript-eslint/naming-convention */
|
||||
|
||||
});
|
||||
|
||||
// Selects the method to employ when writing to enabled disk in parity protected array.
|
||||
@@ -98,7 +98,7 @@ export const updateDisk = async (context: Context): Promise<Result> => {
|
||||
|
||||
0: 'read/modify/write',
|
||||
1: 'reconstruct write',
|
||||
/* eslint-enable @typescript-eslint/naming-convention */
|
||||
|
||||
});
|
||||
|
||||
// Defines the default file system type to create when an * unmountable * array device is formatted.
|
||||
@@ -111,7 +111,7 @@ export const updateDisk = async (context: Context): Promise<Result> => {
|
||||
'luks:xfs': 'xfs - encrypted',
|
||||
'luks:btrfs': 'btrfs - encrypted',
|
||||
'luks:reiserfs': 'reiserfs - encrypted',
|
||||
/* eslint-enable @typescript-eslint/naming-convention */
|
||||
|
||||
});
|
||||
|
||||
const {
|
||||
|
||||
@@ -8,8 +8,7 @@ export type Options = NotifierOptions
|
||||
*/
|
||||
export class HttpNotifier extends Notifier {
|
||||
readonly $http = got;
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-useless-constructor
|
||||
|
||||
constructor(options: Options) {
|
||||
super(options);
|
||||
}
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
import Mustache from 'mustache';
|
||||
import { type LooseObject } from '@app/core/types';
|
||||
import { type NotificationIni } from '../types/states/notification';
|
||||
|
||||
export type NotifierLevel = 'info' | 'warn' | 'error';
|
||||
|
||||
export type NotifierOptions = Partial<{
|
||||
level: NotifierLevel;
|
||||
importance?: NotificationIni['importance'];
|
||||
helpers?: Record<string, unknown>;
|
||||
template?: string;
|
||||
}>;
|
||||
|
||||
39
api/src/core/notifiers/unraid-local.ts
Normal file
39
api/src/core/notifiers/unraid-local.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
import { logger } from '@app/core/log';
|
||||
import { Notifier, type NotifierSendOptions, type NotifierOptions } from '@app/core/notifiers/notifier';
|
||||
import { execa } from 'execa';
|
||||
|
||||
type ValidLocalLevels = 'alert' | 'warning' | 'normal';
|
||||
|
||||
export class UnraidLocalNotifier extends Notifier {
|
||||
private convertNotifierLevel(level: NotifierOptions['level']): ValidLocalLevels {
|
||||
switch (level) {
|
||||
case 'error':
|
||||
return 'alert';
|
||||
case 'warn':
|
||||
return 'warning';
|
||||
case 'info':
|
||||
return 'normal';
|
||||
default:
|
||||
return 'normal';
|
||||
}
|
||||
}
|
||||
|
||||
constructor(options: NotifierOptions = {}) {
|
||||
super(options);
|
||||
|
||||
this.level = options.importance ?? this.convertNotifierLevel(options.level ?? 'info');
|
||||
this.template = options.template ?? '{{ message }}';
|
||||
}
|
||||
|
||||
async send(options: NotifierSendOptions) {
|
||||
const { title, data } = options;
|
||||
const { level } = this;
|
||||
|
||||
const template = this.render(data);
|
||||
try {
|
||||
await execa('/usr/local/emhttp/webGui/scripts/notify', ['-i', `${level}`, '-s', 'Unraid API', '-d', `${template}`, '-e', `${title}`]);
|
||||
} catch (error: unknown) {
|
||||
logger.warn(`Error sending unraid notification: ${error instanceof Error ? error.message : 'No Error Information'}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -20,6 +20,7 @@ const roles: Record<string, Role> = {
|
||||
{ resource: 'apikey', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'cloud', action: 'read:own', attributes: '*' },
|
||||
{ resource: 'config', action: 'update:own', attributes: '*' },
|
||||
{ resource: 'config', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'connect', action: 'read:own', attributes: '*' },
|
||||
{ resource: 'connect', action: 'update:own', attributes: '*' },
|
||||
{ resource: 'customizations', action: 'read:any', attributes: '*' },
|
||||
@@ -117,22 +118,46 @@ const roles: Record<string, Role> = {
|
||||
{ resource: 'config', action: 'update:own', attributes: '*' },
|
||||
{ resource: 'connect', action: 'read:own', attributes: '*' },
|
||||
{ resource: 'connect', action: 'update:own', attributes: '*' },
|
||||
{ resource: 'notifications', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'notifications', action: 'update:any', attributes: '*' },
|
||||
],
|
||||
},
|
||||
my_servers: {
|
||||
extends: 'guest',
|
||||
permissions: [
|
||||
{ resource: 'array', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'config', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'connect', action: 'read:any', attributes: '*' },
|
||||
{
|
||||
resource: 'connect/dynamic-remote-access',
|
||||
action: 'read:any',
|
||||
attributes: '*',
|
||||
},
|
||||
{
|
||||
resource: 'connect/dynamic-remote-access',
|
||||
action: 'update:own',
|
||||
attributes: '*',
|
||||
},
|
||||
{ resource: 'customizations', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'dashboard', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'display', action: 'read:any', attributes: '*' },
|
||||
{
|
||||
resource: 'docker/container',
|
||||
action: 'read:any',
|
||||
attributes: '*',
|
||||
},
|
||||
{ resource: 'docker', action: 'read:any', attributes: '*' },
|
||||
{
|
||||
resource: 'docker/container',
|
||||
action: 'read:any',
|
||||
attributes: '*',
|
||||
},
|
||||
{ resource: 'info', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'logs', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'docker/network', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'network', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'notifications', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'services', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'vars', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'vms', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'vms/domain', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'unraid-version', action: 'read:any', attributes: '*' },
|
||||
|
||||
@@ -11,6 +11,8 @@ export enum PUBSUB_CHANNEL {
|
||||
DISPLAY = 'DISPLAY',
|
||||
INFO = 'INFO',
|
||||
NOTIFICATION = 'NOTIFICATION',
|
||||
NOTIFICATION_ADDED = 'NOTIFICATION_ADDED',
|
||||
NOTIFICATION_OVERVIEW = 'NOTIFICATION_OVERVIEW',
|
||||
OWNER = 'OWNER',
|
||||
SERVERS = 'SERVERS',
|
||||
VMS = 'VMS',
|
||||
|
||||
23
api/src/core/types/states/nginx.ts
Normal file
23
api/src/core/types/states/nginx.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
export interface FqdnEntry {
|
||||
interface: string;
|
||||
id: number | null;
|
||||
fqdn: string;
|
||||
isIpv6: boolean;
|
||||
}
|
||||
|
||||
export interface Nginx {
|
||||
certificateName: string;
|
||||
certificatePath: string;
|
||||
defaultUrl: string;
|
||||
httpPort: number;
|
||||
httpsPort: number;
|
||||
lanIp: string;
|
||||
lanIp6: string;
|
||||
lanMdns: string;
|
||||
lanName: string;
|
||||
sslEnabled: boolean;
|
||||
sslMode: 'yes' | 'no' | 'auto';
|
||||
wanAccessEnabled: boolean;
|
||||
wanIp: string;
|
||||
fqdnUrls: FqdnEntry[];
|
||||
}
|
||||
8
api/src/core/types/states/notification.ts
Normal file
8
api/src/core/types/states/notification.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
export interface NotificationIni {
|
||||
timestamp: string;
|
||||
event: string;
|
||||
subject: string;
|
||||
description: string;
|
||||
importance: 'normal' | 'alert' | 'warning';
|
||||
link?: string;
|
||||
}
|
||||
@@ -1,23 +0,0 @@
|
||||
import { getters } from '@app/store';
|
||||
import htpasswd from 'htpasswd-js';
|
||||
|
||||
interface Options {
|
||||
username: string;
|
||||
password: string;
|
||||
file?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the username and password match a htpasswd file.
|
||||
*/
|
||||
export const checkAuth = async (options: Options): Promise<unknown> => {
|
||||
const { username, password, file } = options;
|
||||
|
||||
// `valid` will be true if and only if
|
||||
// username and password were correct.
|
||||
return htpasswd.authenticate({
|
||||
username,
|
||||
password,
|
||||
file: file ?? getters.paths().htpasswd,
|
||||
});
|
||||
};
|
||||
@@ -1,28 +0,0 @@
|
||||
import { PermissionError } from '@app/core/errors/permission-error';
|
||||
import { checkAuth } from '@app/core/utils/authentication/check-auth';
|
||||
import { getters } from '@app/store';
|
||||
|
||||
interface Options {
|
||||
username: string;
|
||||
password: string;
|
||||
file: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the username and password match a htpasswd file
|
||||
*/
|
||||
export const ensureAuth = async (options: Options) => {
|
||||
const { username, password, file } = options;
|
||||
|
||||
// `valid` will be true if and only if
|
||||
// username and password were correct.
|
||||
const valid = await checkAuth({
|
||||
username,
|
||||
password,
|
||||
file: file || getters.paths().htpasswd,
|
||||
});
|
||||
|
||||
if (!valid) {
|
||||
throw new PermissionError('Invalid auth!');
|
||||
}
|
||||
};
|
||||
@@ -1,5 +1,5 @@
|
||||
import { getAllowedOrigins } from '@app/common/allowed-origins';
|
||||
import { DynamicRemoteAccessType } from '@app/remoteAccess/types';
|
||||
import { DynamicRemoteAccessType } from '@app/graphql/generated/api/types';
|
||||
import {
|
||||
type SliceState as ConfigSliceState,
|
||||
initialState,
|
||||
@@ -9,7 +9,7 @@ import type {
|
||||
MyServersConfig,
|
||||
MyServersConfigMemory,
|
||||
} from '@app/types/my-servers-config';
|
||||
import { isEqual } from 'lodash';
|
||||
import { isEqual } from 'lodash-es';
|
||||
|
||||
export type ConfigType = 'flash' | 'memory';
|
||||
type ConfigObject<T> = T extends 'flash'
|
||||
@@ -23,7 +23,7 @@ type ConfigObject<T> = T extends 'flash'
|
||||
* @param mode 'flash' or 'memory', changes what fields are included in the writeable payload
|
||||
* @returns
|
||||
*/
|
||||
// eslint-disable-next-line complexity
|
||||
|
||||
export const getWriteableConfig = <T extends ConfigType>(
|
||||
config: ConfigSliceState,
|
||||
mode: T
|
||||
@@ -32,7 +32,7 @@ export const getWriteableConfig = <T extends ConfigType>(
|
||||
const { api, local, notifier, remote, upc, connectionStatus } = config;
|
||||
|
||||
// Create new state
|
||||
// eslint-disable-next-line @typescript-eslint/consistent-type-assertions
|
||||
|
||||
const newState: ConfigObject<T> = {
|
||||
api: {
|
||||
version: api?.version ?? initialState.api.version,
|
||||
|
||||
21
api/src/core/utils/files/safe-ini-serializer.ts
Normal file
21
api/src/core/utils/files/safe-ini-serializer.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
import { Serializer } from 'multi-ini';
|
||||
|
||||
const serializer = new Serializer({ keep_quotes: false });
|
||||
|
||||
const replacer = (_, value: unknown) => {
|
||||
if (typeof value === 'boolean') {
|
||||
return value ? 'true' : 'false';
|
||||
}
|
||||
|
||||
return value;
|
||||
};
|
||||
|
||||
/**
|
||||
*
|
||||
* @param object Any object to serialize
|
||||
* @returns String converted to ini with multi-ini, with any booleans string escaped to prevent a crash
|
||||
*/
|
||||
export const safelySerializeObjectToIni = (object: object): string => {
|
||||
const safeObject = JSON.parse(JSON.stringify(object, replacer));
|
||||
return serializer.serialize(safeObject);
|
||||
};
|
||||
@@ -1,7 +1,6 @@
|
||||
// Created from 'create-ts-index'
|
||||
|
||||
export * from './array';
|
||||
export * from './authentication';
|
||||
export * from './clients';
|
||||
export * from './plugins';
|
||||
export * from './shares';
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
import { parse as parseIni } from 'ini';
|
||||
import camelCaseKeys from 'camelcase-keys';
|
||||
import { includeKeys } from 'filter-obj';
|
||||
import mapObject from 'map-obj';
|
||||
import { AppError } from '@app/core/errors/app-error';
|
||||
import { accessSync, readFileSync } from 'fs';
|
||||
import { access } from 'fs/promises';
|
||||
@@ -11,15 +9,15 @@ import { extname } from 'path';
|
||||
type ConfigType = 'ini' | 'cfg';
|
||||
|
||||
type OptionsWithPath = {
|
||||
/** Relative or absolute file path. */
|
||||
filePath: string;
|
||||
/** If the file is an "ini" or a "cfg". */
|
||||
type?: ConfigType;
|
||||
/** Relative or absolute file path. */
|
||||
filePath: string;
|
||||
/** If the file is an "ini" or a "cfg". */
|
||||
type?: ConfigType;
|
||||
};
|
||||
|
||||
type OptionsWithLoadedFile = {
|
||||
file: string;
|
||||
type: ConfigType;
|
||||
file: string;
|
||||
type: ConfigType;
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -38,53 +36,66 @@ type OptionsWithLoadedFile = {
|
||||
* ```
|
||||
*/
|
||||
const fixObjectArrays = (object: Record<string, any>) => {
|
||||
// An object of arrays for keys that end in `:${number}`
|
||||
const temporaryArrays = {};
|
||||
// An object of arrays for keys that end in `:${number}`
|
||||
const temporaryArrays = {};
|
||||
|
||||
// An object without any array items
|
||||
const filteredObject = includeKeys(object, (key, value) => {
|
||||
// eslint-disable-next-line @typescript-eslint/prefer-regexp-exec
|
||||
const [, name, index] = [...((key).match(/(.*):(\d+$)/) ?? [])];
|
||||
if (!name || !index) {
|
||||
return true;
|
||||
}
|
||||
// An object without any array items
|
||||
const filteredObject = Object.fromEntries(
|
||||
Object.entries(object).filter(([key, value]) => {
|
||||
const match = key.match(/(.*):(\d+$)/);
|
||||
if (!match) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Create initial array
|
||||
if (!Array.isArray(temporaryArrays[name])) {
|
||||
temporaryArrays[name] = [];
|
||||
}
|
||||
const [, name, index] = match;
|
||||
if (!name || !index) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Add value
|
||||
temporaryArrays[name].push(value);
|
||||
// Create initial array
|
||||
if (!Array.isArray(temporaryArrays[name])) {
|
||||
temporaryArrays[name] = [];
|
||||
}
|
||||
|
||||
// Remove the old field
|
||||
return false;
|
||||
});
|
||||
// Add value
|
||||
temporaryArrays[name].push(value);
|
||||
|
||||
return {
|
||||
...filteredObject,
|
||||
...temporaryArrays,
|
||||
};
|
||||
// Remove the old field
|
||||
return false;
|
||||
})
|
||||
);
|
||||
|
||||
return {
|
||||
...filteredObject,
|
||||
...temporaryArrays,
|
||||
};
|
||||
};
|
||||
|
||||
export const fileExists = async (path: string) => access(path, F_OK).then(() => true).catch(() => false);
|
||||
export const fileExists = async (path: string) =>
|
||||
access(path, F_OK)
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
export const fileExistsSync = (path: string) => {
|
||||
try {
|
||||
accessSync(path, F_OK);
|
||||
return true;
|
||||
} catch (error: unknown) {
|
||||
return false;
|
||||
}
|
||||
try {
|
||||
accessSync(path, F_OK);
|
||||
return true;
|
||||
} catch (error: unknown) {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
export const getExtensionFromPath = (filePath: string): string => extname(filePath);
|
||||
|
||||
const isFilePathOptions = (options: OptionsWithLoadedFile | OptionsWithPath): options is OptionsWithPath => Object.keys(options).includes('filePath');
|
||||
const isFileOptions = (options: OptionsWithLoadedFile | OptionsWithPath): options is OptionsWithLoadedFile => Object.keys(options).includes('file');
|
||||
const isFilePathOptions = (
|
||||
options: OptionsWithLoadedFile | OptionsWithPath
|
||||
): options is OptionsWithPath => Object.keys(options).includes('filePath');
|
||||
const isFileOptions = (
|
||||
options: OptionsWithLoadedFile | OptionsWithPath
|
||||
): options is OptionsWithLoadedFile => Object.keys(options).includes('file');
|
||||
|
||||
export const loadFileFromPathSync = (filePath: string): string => {
|
||||
if (!fileExistsSync(filePath)) throw new Error(`Failed to load file at path: ${filePath}`);
|
||||
return readFileSync(filePath, 'utf-8').toString();
|
||||
if (!fileExistsSync(filePath)) throw new Error(`Failed to load file at path: ${filePath}`);
|
||||
return readFileSync(filePath, 'utf-8').toString();
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -94,48 +105,51 @@ export const loadFileFromPathSync = (filePath: string): string => {
|
||||
*/
|
||||
const isValidConfigExtension = (extension: string): boolean => ['ini', 'cfg'].includes(extension);
|
||||
|
||||
export const parseConfig = <T extends Record<string, any>>(options: OptionsWithLoadedFile | OptionsWithPath): T => {
|
||||
let fileContents: string;
|
||||
let extension: string;
|
||||
export const parseConfig = <T extends Record<string, any>>(
|
||||
options: OptionsWithLoadedFile | OptionsWithPath
|
||||
): T => {
|
||||
let fileContents: string;
|
||||
let extension: string;
|
||||
|
||||
if (isFilePathOptions(options)) {
|
||||
const { filePath, type } = options;
|
||||
if (isFilePathOptions(options)) {
|
||||
const { filePath, type } = options;
|
||||
|
||||
const validFile = fileExistsSync(filePath);
|
||||
extension = type ?? getExtensionFromPath(filePath);
|
||||
const validExtension = isValidConfigExtension(extension);
|
||||
const validFile = fileExistsSync(filePath);
|
||||
extension = type ?? getExtensionFromPath(filePath);
|
||||
const validExtension = isValidConfigExtension(extension);
|
||||
|
||||
if (validFile && validExtension) {
|
||||
fileContents = loadFileFromPathSync(options.filePath);
|
||||
} else {
|
||||
throw new AppError(`Invalid File Path: ${options.filePath}, or Extension: ${extension}`);
|
||||
}
|
||||
} else if (isFileOptions(options)) {
|
||||
const { file, type } = options;
|
||||
fileContents = file;
|
||||
const extension = type;
|
||||
if (!isValidConfigExtension(extension)) {
|
||||
throw new AppError(`Invalid Extension for Ini File: ${extension}`);
|
||||
}
|
||||
} else {
|
||||
throw new AppError('Invalid Parameters Passed to ParseConfig');
|
||||
}
|
||||
if (validFile && validExtension) {
|
||||
fileContents = loadFileFromPathSync(options.filePath);
|
||||
} else {
|
||||
throw new AppError(`Invalid File Path: ${options.filePath}, or Extension: ${extension}`);
|
||||
}
|
||||
} else if (isFileOptions(options)) {
|
||||
const { file, type } = options;
|
||||
fileContents = file;
|
||||
const extension = type;
|
||||
if (!isValidConfigExtension(extension)) {
|
||||
throw new AppError(`Invalid Extension for Ini File: ${extension}`);
|
||||
}
|
||||
} else {
|
||||
throw new AppError('Invalid Parameters Passed to ParseConfig');
|
||||
}
|
||||
|
||||
const data: Record<string, any> = parseIni(fileContents);
|
||||
// Remove quotes around keys
|
||||
const dataWithoutQuoteKeys = mapObject(data, (key, value) =>
|
||||
// @SEE: https://stackoverflow.com/a/19156197/2311366
|
||||
[(key).replace(/^"(.+(?="$))"$/, '$1'), value],
|
||||
);
|
||||
const data: Record<string, any> = parseIni(fileContents);
|
||||
// Remove quotes around keys
|
||||
const dataWithoutQuoteKeys = Object.fromEntries(
|
||||
Object.entries(data).map(([key, value]) => [key.replace(/^"(.+(?="$))"$/, '$1'), value])
|
||||
);
|
||||
|
||||
// Result object with array items as actual arrays
|
||||
const result = Object.fromEntries(
|
||||
Object.entries(dataWithoutQuoteKeys)
|
||||
.map(([key, value]) => [key, typeof value === 'object' ? fixObjectArrays(value) : value]),
|
||||
);
|
||||
// Result object with array items as actual arrays
|
||||
const result = Object.fromEntries(
|
||||
Object.entries(dataWithoutQuoteKeys).map(([key, value]) => [
|
||||
key,
|
||||
typeof value === 'object' ? fixObjectArrays(value) : value,
|
||||
])
|
||||
);
|
||||
|
||||
// Convert all keys to camel case
|
||||
return camelCaseKeys(result, {
|
||||
deep: true,
|
||||
}) as T;
|
||||
// Convert all keys to camel case
|
||||
return camelCaseKeys(result, {
|
||||
deep: true,
|
||||
}) as T;
|
||||
};
|
||||
|
||||
@@ -8,6 +8,6 @@ export const getPermissions = (role: string): Record<string, Record<string, stri
|
||||
const grants: Record<string, Record<string, string[]>> = ac.getGrants();
|
||||
const { $extend, ...roles } = grants[role] ?? {};
|
||||
const inheritedRoles = Array.isArray($extend) ? $extend.map(role => getPermissions(role))[0] : {};
|
||||
// eslint-disable-next-line prefer-object-spread
|
||||
|
||||
return Object.assign({}, roles, inheritedRoles);
|
||||
};
|
||||
|
||||
@@ -40,16 +40,23 @@ export const phpLoader = async (options: Options) => {
|
||||
encodeParameters(body),
|
||||
];
|
||||
|
||||
return execa('php', options_, { cwd: __dirname })
|
||||
.then(({ stdout }) => {
|
||||
// Missing php file
|
||||
if (stdout.includes(`Warning: include(${file}): failed to open stream: No such file or directory in ${path.join(__dirname, '/wrapper.php')}`)) {
|
||||
throw new FileMissingError(file);
|
||||
}
|
||||
return execa('php', options_, { cwd: import.meta.dirname })
|
||||
.then(({ stdout }) => {
|
||||
// Missing php file
|
||||
if (
|
||||
stdout.includes(
|
||||
`Warning: include(${file}): failed to open stream: No such file or directory in ${path.join(
|
||||
import.meta.dirname,
|
||||
'/wrapper.php'
|
||||
)}`
|
||||
)
|
||||
) {
|
||||
throw new FileMissingError(file);
|
||||
}
|
||||
|
||||
return stdout;
|
||||
})
|
||||
.catch(error => {
|
||||
throw new PhpError(error);
|
||||
});
|
||||
return stdout;
|
||||
})
|
||||
.catch((error) => {
|
||||
throw new PhpError(error);
|
||||
});
|
||||
};
|
||||
|
||||
18
api/src/core/utils/plugins/wrapper.php
Normal file
18
api/src/core/utils/plugins/wrapper.php
Normal file
@@ -0,0 +1,18 @@
|
||||
<?php
|
||||
// Borrowed with love from https://b3z13r.wordpress.com/2011/05/16/passing-values-from-the-commandline-to-php-by-getpost-method/
|
||||
// e.g. `./wrapper.php GET /tmp/random_file.php?arg1=true&arg2=a-really-long-string` { "username": "root" }
|
||||
$method = $argv[1];
|
||||
$query_parts = explode('?', $argv[2], 2);
|
||||
$file = $query_parts[0];
|
||||
$query_params = $query_parts[1];
|
||||
$body = $argv[3];
|
||||
|
||||
// Load query_params or body into correct var
|
||||
if ($method === 'GET') {
|
||||
parse_str($query_params, $_GET);
|
||||
} else {
|
||||
parse_str($body, $_POST);
|
||||
}
|
||||
|
||||
include($file);
|
||||
?>
|
||||
26
api/src/core/utils/pm2/unraid-api-running.ts
Normal file
26
api/src/core/utils/pm2/unraid-api-running.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
import pm2 from 'pm2';
|
||||
|
||||
export const isUnraidApiRunning = async (): Promise<boolean | undefined> => {
|
||||
return new Promise((resolve, reject) => {
|
||||
pm2.connect(function (err) {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
reject('Could not connect to pm2');
|
||||
}
|
||||
|
||||
pm2.describe('unraid-api', function (err, processDescription) {
|
||||
console.log(err);
|
||||
if (err || processDescription.length === 0) {
|
||||
console.log(false); // Service not found or error occurred
|
||||
resolve(false);
|
||||
} else {
|
||||
const isOnline = processDescription?.[0]?.pm2_env?.status === 'online';
|
||||
console.log(isOnline); // Output true if online, false otherwise
|
||||
resolve(isOnline);
|
||||
}
|
||||
|
||||
pm2.disconnect();
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
||||
14
api/src/core/utils/server-identifier.ts
Normal file
14
api/src/core/utils/server-identifier.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import { getters } from '@app/store/index';
|
||||
import crypto from 'crypto';
|
||||
import { hostname } from 'os';
|
||||
export const getServerIdentifier = (): string => {
|
||||
const flashGuid = getters.emhttp()?.var?.flashGuid ?? 'FLASH_GUID_NOT_FOUND';
|
||||
return crypto
|
||||
.createHash('sha256')
|
||||
.update(`${flashGuid}-${hostname()}`)
|
||||
.digest('hex');
|
||||
};
|
||||
|
||||
export const serverIdentifierMatches = (serverIdentifier: string): boolean => {
|
||||
return serverIdentifier === getServerIdentifier();
|
||||
}
|
||||
@@ -1,4 +1,3 @@
|
||||
/* eslint-disable @typescript-eslint/no-unused-vars */
|
||||
import { getters } from '@app/store';
|
||||
import type { DiskShare, Share, UserShare } from '@app/core/types/states/share';
|
||||
import { type ArrayDisk } from '@app/graphql/generated/api/types';
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { execa } from 'execa';
|
||||
import { map as asyncMap } from 'p-iteration';
|
||||
import { sync as commandExistsSync } from 'command-exists';
|
||||
|
||||
interface Device {
|
||||
@@ -13,9 +12,9 @@ interface Device {
|
||||
* @param devices Devices to be checked.
|
||||
* @returns Processed devices.
|
||||
*/
|
||||
export const filterDevices = async (devices: Device[]): Promise<Device[]> => asyncMap(devices, async (device: Device) => {
|
||||
export const filterDevices = async (devices: Device[]): Promise<Device[]> => {
|
||||
// Don't run if we don't have the udevadm command available
|
||||
if (!commandExistsSync('udevadm')) return device;
|
||||
if (!commandExistsSync('udevadm')) return devices;
|
||||
|
||||
const networkDeviceIds = await execa('udevadm', 'info -q path -p /sys/class/net/eth0'.split(' '))
|
||||
.then(({ stdout }) => {
|
||||
@@ -25,7 +24,11 @@ export const filterDevices = async (devices: Device[]): Promise<Device[]> => asy
|
||||
.catch(() => []);
|
||||
|
||||
const allowed = new Set(networkDeviceIds);
|
||||
device.allowed = allowed.has(device.id);
|
||||
|
||||
return device;
|
||||
});
|
||||
const processedDevices = devices.map((device: Device) => {
|
||||
device.allowed = allowed.has(device.id);
|
||||
return device;
|
||||
});
|
||||
|
||||
return processedDevices;
|
||||
};
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import pProps from 'p-props';
|
||||
import { type Domain } from '@app/core/types';
|
||||
import { getHypervisor } from '@app/core/utils/vms/get-hypervisor';
|
||||
|
||||
@@ -27,24 +26,34 @@ export const parseDomain = async (type: DomainLookupType, id: string): Promise<D
|
||||
const domain = await client[method](id);
|
||||
const info = await domain.getInfoAsync();
|
||||
|
||||
const results = await pProps({
|
||||
uuid: domain.getUUIDAsync(),
|
||||
osType: domain.getOSTypeAsync(),
|
||||
autostart: domain.getAutostartAsync(),
|
||||
maxMemory: domain.getMaxMemoryAsync(),
|
||||
schedulerType: domain.getSchedulerTypeAsync(),
|
||||
schedulerParameters: domain.getSchedulerParametersAsync(),
|
||||
securityLabel: domain.getSecurityLabelAsync(),
|
||||
name: domain.getNameAsync(),
|
||||
const [uuid, osType, autostart, maxMemory, schedulerType, schedulerParameters, securityLabel, name] = await Promise.all([
|
||||
domain.getUUIDAsync(),
|
||||
domain.getOSTypeAsync(),
|
||||
domain.getAutostartAsync(),
|
||||
domain.getMaxMemoryAsync(),
|
||||
domain.getSchedulerTypeAsync(),
|
||||
domain.getSchedulerParametersAsync(),
|
||||
domain.getSecurityLabelAsync(),
|
||||
domain.getNameAsync(),
|
||||
]);
|
||||
|
||||
const results = {
|
||||
uuid,
|
||||
osType,
|
||||
autostart,
|
||||
maxMemory,
|
||||
schedulerType,
|
||||
schedulerParameters,
|
||||
securityLabel,
|
||||
name,
|
||||
...info,
|
||||
state: info.state.replace(' ', '_'),
|
||||
});
|
||||
};
|
||||
|
||||
if (info.state === 'running') {
|
||||
results.vcpus = await domain.getVcpusAsync();
|
||||
results.memoryStats = await domain.getMemoryStatsAsync();
|
||||
}
|
||||
|
||||
// @ts-expect-error fix pProps inferred type
|
||||
return results;
|
||||
};
|
||||
|
||||
15
api/src/core/utils/write-to-boot.ts
Normal file
15
api/src/core/utils/write-to-boot.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { logger } from '@app/core/log';
|
||||
import convert from 'convert';
|
||||
|
||||
const writeFile = async (filePath: string, fileContents: string | Buffer) => {
|
||||
logger.debug(`Writing ${convert(fileContents.length, 'bytes').to('kilobytes')} to ${filePath}`);
|
||||
await fs.promises.writeFile(filePath, fileContents);
|
||||
};
|
||||
|
||||
export const writeToBoot = async (filePath: string, fileContents: string | Buffer) => {
|
||||
const basePath = '/boot/config/plugins/dynamix/';
|
||||
const resolvedPath = path.resolve(basePath, filePath);
|
||||
await writeFile(resolvedPath, fileContents);
|
||||
};
|
||||
11
api/src/dotenv.ts
Normal file
11
api/src/dotenv.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
import { config } from 'dotenv';
|
||||
|
||||
const env =
|
||||
process.env.NODE_ENV === 'development' || process.env.NODE_ENV === 'test'
|
||||
? config({ debug: true, path: `./.env.${process.env.NODE_ENV}`, encoding: 'utf-8' })
|
||||
: config({
|
||||
path: '/usr/local/unraid-api/.env',
|
||||
encoding: 'utf-8',
|
||||
});
|
||||
|
||||
export default env;
|
||||
@@ -1,20 +1,33 @@
|
||||
export const API_VERSION = process.env.VERSION ?? 'THIS_WILL_BE_REPLACED_WHEN_BUILT';
|
||||
export const NODE_ENV = process.env.NODE_ENV as 'development' | 'test' | 'staging' | 'production';
|
||||
import { version } from 'package.json';
|
||||
|
||||
export const API_VERSION =
|
||||
process.env.npm_package_version ?? version ?? new Error('API_VERSION not set');
|
||||
|
||||
export const NODE_ENV = process.env.NODE_ENV as 'development' | 'test' | 'staging' | 'production' ?? 'production';
|
||||
export const environment = {
|
||||
IS_MAIN_PROCESS: false,
|
||||
IS_MAIN_PROCESS: false,
|
||||
};
|
||||
export const CHOKIDAR_USEPOLLING = process.env.CHOKIDAR_USEPOLLING === 'true';
|
||||
export const IS_DOCKER = process.env.IS_DOCKER === 'true';
|
||||
export const DEBUG = process.env.DEBUG === 'true';
|
||||
export const INTROSPECTION = process.env.INTROSPECTION === 'true';
|
||||
export const ENVIRONMENT = process.env.ENVIRONMENT as 'production' | 'staging' | 'development'
|
||||
export const GRAPHQL_INTROSPECTION = Boolean(
|
||||
INTROSPECTION ?? DEBUG ?? ENVIRONMENT !== 'production'
|
||||
);
|
||||
export const ENVIRONMENT = process.env.ENVIRONMENT as 'production' | 'staging' | 'development' ?? 'production';
|
||||
export const GRAPHQL_INTROSPECTION = Boolean(INTROSPECTION ?? DEBUG ?? ENVIRONMENT !== 'production');
|
||||
export const PORT = process.env.PORT ?? '/var/run/unraid-api.sock';
|
||||
export const DRY_RUN = process.env.DRY_RUN === 'true';
|
||||
export const BYPASS_PERMISSION_CHECKS = process.env.BYPASS_PERMISSION_CHECKS === 'true';
|
||||
export const BYPASS_CORS_CHECKS = process.env.BYPASS_CORS_CHECKS === 'true';
|
||||
export const LOG_CORS = process.env.LOG_CORS === 'true';
|
||||
export const LOG_TYPE = process.env.LOG_TYPE as 'pretty' | 'raw' ?? 'pretty';
|
||||
export const LOG_LEVEL = process.env.LOG_LEVEL as 'TRACE' | 'DEBUG' | 'INFO' | 'WARN' | 'ERROR' | 'FATAL';
|
||||
export const LOG_TRANSPORT = process.env.LOG_TRANSPORT as 'file' | 'stdout';
|
||||
export const LOG_TYPE = (process.env.LOG_TYPE as 'pretty' | 'raw') ?? 'pretty';
|
||||
export const LOG_LEVEL = process.env.LOG_LEVEL as
|
||||
| 'TRACE'
|
||||
| 'DEBUG'
|
||||
| 'INFO'
|
||||
| 'WARN'
|
||||
| 'ERROR'
|
||||
| 'FATAL' ?? process.env.ENVIRONMENT === 'production' ? 'INFO' : 'TRACE';
|
||||
export const MOTHERSHIP_GRAPHQL_LINK =
|
||||
process.env.MOTHERSHIP_GRAPHQL_LINK ??
|
||||
(process.env.ENVIRONMENT === 'staging'
|
||||
? 'https://staging.mothership.unraid.net/ws'
|
||||
: 'https://mothership.unraid.net/ws');
|
||||
@@ -3,14 +3,14 @@ import {
|
||||
HttpLink,
|
||||
InMemoryCache,
|
||||
split,
|
||||
} from '@apollo/client/core/core.cjs';
|
||||
import { onError } from '@apollo/client/link/error';
|
||||
} from '@apollo/client/core/index.js';
|
||||
import { onError } from '@apollo/client/link/error/index.js';
|
||||
import { getInternalApiAddress } from '@app/consts';
|
||||
import WebSocket from 'ws';
|
||||
import { fetch } from 'cross-fetch';
|
||||
import { getMainDefinition } from '@apollo/client/utilities';
|
||||
import { getMainDefinition } from '@apollo/client/utilities/index.js';
|
||||
import { graphqlLogger } from '@app/core/log';
|
||||
import { GraphQLWsLink } from '@apollo/client/link/subscriptions';
|
||||
import { GraphQLWsLink } from '@apollo/client/link/subscriptions/index.js';
|
||||
import { createClient } from 'graphql-ws';
|
||||
import { getters } from '@app/store/index';
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
import * as Types from '@app/graphql/generated/api/types';
|
||||
|
||||
import { z } from 'zod'
|
||||
import { AllowedOriginInput, ApiKey, ApiKeyResponse, ArrayType, ArrayCapacity, ArrayDisk, ArrayDiskFsColor, ArrayDiskStatus, ArrayDiskType, ArrayPendingState, ArrayState, Baseboard, Capacity, Case, Cloud, CloudResponse, Config, ConfigErrorState, ConnectSignInInput, ConnectUserInfoInput, ContainerHostConfig, ContainerMount, ContainerPort, ContainerPortType, ContainerState, Devices, Disk, DiskFsType, DiskInterfaceType, DiskPartition, DiskSmartStatus, Display, DockerContainer, DockerNetwork, Flash, Gpu, Importance, Info, InfoApps, InfoCpu, InfoMemory, KeyFile, Me, MemoryFormFactor, MemoryLayout, MemoryType, MinigraphStatus, MinigraphqlResponse, Mount, Network, Notification, NotificationFilter, NotificationInput, NotificationType, Os, Owner, ParityCheck, Partition, Pci, ProfileModel, Registration, RegistrationState, RelayResponse, Server, ServerStatus, Service, SetupRemoteAccessInput, Share, System, Temperature, Theme, UnassignedDevice, Uptime, Usb, User, UserAccount, Vars, Versions, VmDomain, VmState, Vms, WAN_ACCESS_TYPE, WAN_FORWARD_TYPE, Welcome, addApiKeyInput, addUserInput, arrayDiskInput, authenticateInput, deleteUserInput, mdState, registrationType, updateApikeyInput, usersInput } from '@app/graphql/generated/api/types'
|
||||
import { AccessUrl, AccessUrlInput, AllowedOriginInput, ApiKey, ApiKeyResponse, ArrayType, ArrayCapacity, ArrayDisk, ArrayDiskFsColor, ArrayDiskStatus, ArrayDiskType, ArrayPendingState, ArrayState, Baseboard, Capacity, Case, Cloud, CloudResponse, Config, ConfigErrorState, Connect, ConnectSignInInput, ConnectUserInfoInput, ContainerHostConfig, ContainerMount, ContainerPort, ContainerPortType, ContainerState, Devices, Disk, DiskFsType, DiskInterfaceType, DiskPartition, DiskSmartStatus, Display, Docker, DockerContainer, DockerNetwork, DynamicRemoteAccessStatus, DynamicRemoteAccessType, EnableDynamicRemoteAccessInput, Flash, Gpu, Importance, Info, InfoApps, InfoCpu, InfoMemory, KeyFile, Me, MemoryFormFactor, MemoryLayout, MemoryType, MinigraphStatus, MinigraphqlResponse, Mount, Network, Node, Notification, NotificationCounts, NotificationData, NotificationFilter, NotificationOverview, NotificationType, Notifications, NotificationslistArgs, Os, Owner, ParityCheck, Partition, Pci, ProfileModel, Registration, RegistrationState, RelayResponse, RemoteAccess, Server, ServerStatus, Service, SetupRemoteAccessInput, Share, System, Temperature, Theme, URL_TYPE, UnassignedDevice, Uptime, Usb, User, UserAccount, Vars, Versions, VmDomain, VmState, Vms, WAN_ACCESS_TYPE, WAN_FORWARD_TYPE, Welcome, addApiKeyInput, addUserInput, arrayDiskInput, authenticateInput, deleteUserInput, mdState, registrationType, updateApikeyInput, usersInput } from '@app/graphql/generated/api/types'
|
||||
import { TypedDocumentNode as DocumentNode } from '@graphql-typed-document-node/core';
|
||||
|
||||
type Properties<T> = Required<{
|
||||
@@ -37,6 +37,8 @@ export const DiskInterfaceTypeSchema = z.nativeEnum(DiskInterfaceType);
|
||||
|
||||
export const DiskSmartStatusSchema = z.nativeEnum(DiskSmartStatus);
|
||||
|
||||
export const DynamicRemoteAccessTypeSchema = z.nativeEnum(DynamicRemoteAccessType);
|
||||
|
||||
export const ImportanceSchema = z.nativeEnum(Importance);
|
||||
|
||||
export const MemoryFormFactorSchema = z.nativeEnum(MemoryFormFactor);
|
||||
@@ -55,6 +57,8 @@ export const TemperatureSchema = z.nativeEnum(Temperature);
|
||||
|
||||
export const ThemeSchema = z.nativeEnum(Theme);
|
||||
|
||||
export const URL_TYPESchema = z.nativeEnum(URL_TYPE);
|
||||
|
||||
export const VmStateSchema = z.nativeEnum(VmState);
|
||||
|
||||
export const WAN_ACCESS_TYPESchema = z.nativeEnum(WAN_ACCESS_TYPE);
|
||||
@@ -65,6 +69,25 @@ export const mdStateSchema = z.nativeEnum(mdState);
|
||||
|
||||
export const registrationTypeSchema = z.nativeEnum(registrationType);
|
||||
|
||||
export function AccessUrlSchema(): z.ZodObject<Properties<AccessUrl>> {
|
||||
return z.object({
|
||||
__typename: z.literal('AccessUrl').optional(),
|
||||
ipv4: definedNonNullAnySchema.nullish(),
|
||||
ipv6: definedNonNullAnySchema.nullish(),
|
||||
name: z.string().nullish(),
|
||||
type: URL_TYPESchema
|
||||
})
|
||||
}
|
||||
|
||||
export function AccessUrlInputSchema(): z.ZodObject<Properties<AccessUrlInput>> {
|
||||
return z.object({
|
||||
ipv4: definedNonNullAnySchema.nullish(),
|
||||
ipv6: definedNonNullAnySchema.nullish(),
|
||||
name: z.string().nullish(),
|
||||
type: URL_TYPESchema
|
||||
})
|
||||
}
|
||||
|
||||
export function AllowedOriginInputSchema(): z.ZodObject<Properties<AllowedOriginInput>> {
|
||||
return z.object({
|
||||
origins: z.array(z.string())
|
||||
@@ -97,6 +120,7 @@ export function ArrayTypeSchema(): z.ZodObject<Properties<ArrayType>> {
|
||||
caches: z.array(ArrayDiskSchema()),
|
||||
capacity: ArrayCapacitySchema(),
|
||||
disks: z.array(ArrayDiskSchema()),
|
||||
id: z.string(),
|
||||
parities: z.array(ArrayDiskSchema()),
|
||||
pendingState: ArrayPendingStateSchema.nullish(),
|
||||
previousState: ArrayStateSchema.nullish(),
|
||||
@@ -195,10 +219,19 @@ export function ConfigSchema(): z.ZodObject<Properties<Config>> {
|
||||
return z.object({
|
||||
__typename: z.literal('Config').optional(),
|
||||
error: ConfigErrorStateSchema.nullish(),
|
||||
id: z.string(),
|
||||
valid: z.boolean().nullish()
|
||||
})
|
||||
}
|
||||
|
||||
export function ConnectSchema(): z.ZodObject<Properties<Connect>> {
|
||||
return z.object({
|
||||
__typename: z.literal('Connect').optional(),
|
||||
dynamicRemoteAccess: DynamicRemoteAccessStatusSchema(),
|
||||
id: z.string()
|
||||
})
|
||||
}
|
||||
|
||||
export function ConnectSignInInputSchema(): z.ZodObject<Properties<ConnectSignInInput>> {
|
||||
return z.object({
|
||||
accessToken: z.string().nullish(),
|
||||
@@ -300,6 +333,7 @@ export function DisplaySchema(): z.ZodObject<Properties<Display>> {
|
||||
dashapps: z.string().nullish(),
|
||||
date: z.string().nullish(),
|
||||
hot: z.number().nullish(),
|
||||
id: z.string(),
|
||||
locale: z.string().nullish(),
|
||||
max: z.number().nullish(),
|
||||
number: z.string().nullish(),
|
||||
@@ -317,6 +351,15 @@ export function DisplaySchema(): z.ZodObject<Properties<Display>> {
|
||||
})
|
||||
}
|
||||
|
||||
export function DockerSchema(): z.ZodObject<Properties<Docker>> {
|
||||
return z.object({
|
||||
__typename: z.literal('Docker').optional(),
|
||||
containers: z.array(DockerContainerSchema()).nullish(),
|
||||
id: z.string(),
|
||||
networks: z.array(DockerNetworkSchema()).nullish()
|
||||
})
|
||||
}
|
||||
|
||||
export function DockerContainerSchema(): z.ZodObject<Properties<DockerContainer>> {
|
||||
return z.object({
|
||||
__typename: z.literal('DockerContainer').optional(),
|
||||
@@ -359,6 +402,22 @@ export function DockerNetworkSchema(): z.ZodObject<Properties<DockerNetwork>> {
|
||||
})
|
||||
}
|
||||
|
||||
export function DynamicRemoteAccessStatusSchema(): z.ZodObject<Properties<DynamicRemoteAccessStatus>> {
|
||||
return z.object({
|
||||
__typename: z.literal('DynamicRemoteAccessStatus').optional(),
|
||||
enabledType: DynamicRemoteAccessTypeSchema,
|
||||
error: z.string().nullish(),
|
||||
runningType: DynamicRemoteAccessTypeSchema
|
||||
})
|
||||
}
|
||||
|
||||
export function EnableDynamicRemoteAccessInputSchema(): z.ZodObject<Properties<EnableDynamicRemoteAccessInput>> {
|
||||
return z.object({
|
||||
enabled: z.boolean(),
|
||||
url: z.lazy(() => AccessUrlInputSchema())
|
||||
})
|
||||
}
|
||||
|
||||
export function FlashSchema(): z.ZodObject<Properties<Flash>> {
|
||||
return z.object({
|
||||
__typename: z.literal('Flash').optional(),
|
||||
@@ -389,10 +448,12 @@ export function InfoSchema(): z.ZodObject<Properties<Info>> {
|
||||
cpu: InfoCpuSchema().nullish(),
|
||||
devices: DevicesSchema().nullish(),
|
||||
display: DisplaySchema().nullish(),
|
||||
id: z.string(),
|
||||
machineId: z.string().nullish(),
|
||||
memory: InfoMemorySchema().nullish(),
|
||||
os: OsSchema().nullish(),
|
||||
system: SystemSchema().nullish(),
|
||||
time: z.string(),
|
||||
versions: VersionsSchema().nullish()
|
||||
})
|
||||
}
|
||||
@@ -503,8 +564,10 @@ export function MountSchema(): z.ZodObject<Properties<Mount>> {
|
||||
export function NetworkSchema(): z.ZodObject<Properties<Network>> {
|
||||
return z.object({
|
||||
__typename: z.literal('Network').optional(),
|
||||
accessUrls: z.array(AccessUrlSchema()).nullish(),
|
||||
carrierChanges: z.string().nullish(),
|
||||
duplex: z.string().nullish(),
|
||||
id: z.string(),
|
||||
iface: z.string().nullish(),
|
||||
ifaceName: z.string().nullish(),
|
||||
internal: z.string().nullish(),
|
||||
@@ -518,10 +581,17 @@ export function NetworkSchema(): z.ZodObject<Properties<Network>> {
|
||||
})
|
||||
}
|
||||
|
||||
export function NodeSchema(): z.ZodObject<Properties<Node>> {
|
||||
return z.object({
|
||||
id: z.string()
|
||||
})
|
||||
}
|
||||
|
||||
export function NotificationSchema(): z.ZodObject<Properties<Notification>> {
|
||||
return z.object({
|
||||
__typename: z.literal('Notification').optional(),
|
||||
description: z.string(),
|
||||
formattedTimestamp: z.string().nullish(),
|
||||
id: z.string(),
|
||||
importance: ImportanceSchema,
|
||||
link: z.string().nullish(),
|
||||
@@ -532,6 +602,26 @@ export function NotificationSchema(): z.ZodObject<Properties<Notification>> {
|
||||
})
|
||||
}
|
||||
|
||||
export function NotificationCountsSchema(): z.ZodObject<Properties<NotificationCounts>> {
|
||||
return z.object({
|
||||
__typename: z.literal('NotificationCounts').optional(),
|
||||
alert: z.number(),
|
||||
info: z.number(),
|
||||
total: z.number(),
|
||||
warning: z.number()
|
||||
})
|
||||
}
|
||||
|
||||
export function NotificationDataSchema(): z.ZodObject<Properties<NotificationData>> {
|
||||
return z.object({
|
||||
description: z.string(),
|
||||
importance: ImportanceSchema,
|
||||
link: z.string().nullish(),
|
||||
subject: z.string(),
|
||||
title: z.string()
|
||||
})
|
||||
}
|
||||
|
||||
export function NotificationFilterSchema(): z.ZodObject<Properties<NotificationFilter>> {
|
||||
return z.object({
|
||||
importance: ImportanceSchema.nullish(),
|
||||
@@ -541,16 +631,26 @@ export function NotificationFilterSchema(): z.ZodObject<Properties<NotificationF
|
||||
})
|
||||
}
|
||||
|
||||
export function NotificationInputSchema(): z.ZodObject<Properties<NotificationInput>> {
|
||||
export function NotificationOverviewSchema(): z.ZodObject<Properties<NotificationOverview>> {
|
||||
return z.object({
|
||||
description: z.string().nullish(),
|
||||
__typename: z.literal('NotificationOverview').optional(),
|
||||
archive: NotificationCountsSchema(),
|
||||
unread: NotificationCountsSchema()
|
||||
})
|
||||
}
|
||||
|
||||
export function NotificationsSchema(): z.ZodObject<Properties<Notifications>> {
|
||||
return z.object({
|
||||
__typename: z.literal('Notifications').optional(),
|
||||
id: z.string(),
|
||||
importance: ImportanceSchema,
|
||||
link: z.string().nullish(),
|
||||
subject: z.string(),
|
||||
timestamp: z.string().nullish(),
|
||||
title: z.string(),
|
||||
type: NotificationTypeSchema
|
||||
list: z.array(NotificationSchema()),
|
||||
overview: NotificationOverviewSchema()
|
||||
})
|
||||
}
|
||||
|
||||
export function NotificationslistArgsSchema(): z.ZodObject<Properties<NotificationslistArgs>> {
|
||||
return z.object({
|
||||
filter: NotificationFilterSchema()
|
||||
})
|
||||
}
|
||||
|
||||
@@ -702,6 +802,15 @@ export function RelayResponseSchema(): z.ZodObject<Properties<RelayResponse>> {
|
||||
})
|
||||
}
|
||||
|
||||
export function RemoteAccessSchema(): z.ZodObject<Properties<RemoteAccess>> {
|
||||
return z.object({
|
||||
__typename: z.literal('RemoteAccess').optional(),
|
||||
accessType: WAN_ACCESS_TYPESchema,
|
||||
forwardType: WAN_FORWARD_TYPESchema.nullish(),
|
||||
port: z.number().nullish()
|
||||
})
|
||||
}
|
||||
|
||||
export function ServerSchema(): z.ZodObject<Properties<Server>> {
|
||||
return z.object({
|
||||
__typename: z.literal('Server').optional(),
|
||||
@@ -720,6 +829,7 @@ export function ServerSchema(): z.ZodObject<Properties<Server>> {
|
||||
export function ServiceSchema(): z.ZodObject<Properties<Service>> {
|
||||
return z.object({
|
||||
__typename: z.literal('Service').optional(),
|
||||
id: z.string(),
|
||||
name: z.string().nullish(),
|
||||
online: z.boolean().nullish(),
|
||||
uptime: UptimeSchema().nullish(),
|
||||
@@ -894,6 +1004,7 @@ export function VarsSchema(): z.ZodObject<Properties<Vars>> {
|
||||
fuseRememberDefault: z.string().nullish(),
|
||||
fuseRememberStatus: z.string().nullish(),
|
||||
hideDotFiles: z.boolean().nullish(),
|
||||
id: z.string(),
|
||||
joinStatus: z.string().nullish(),
|
||||
localMaster: z.boolean().nullish(),
|
||||
localTld: z.string().nullish(),
|
||||
|
||||
@@ -20,9 +20,25 @@ export type Scalars = {
|
||||
JSON: { input: { [key: string]: any }; output: { [key: string]: any }; }
|
||||
Long: { input: number; output: number; }
|
||||
Port: { input: number; output: number; }
|
||||
URL: { input: URL; output: URL; }
|
||||
UUID: { input: string; output: string; }
|
||||
};
|
||||
|
||||
export type AccessUrl = {
|
||||
__typename?: 'AccessUrl';
|
||||
ipv4?: Maybe<Scalars['URL']['output']>;
|
||||
ipv6?: Maybe<Scalars['URL']['output']>;
|
||||
name?: Maybe<Scalars['String']['output']>;
|
||||
type: URL_TYPE;
|
||||
};
|
||||
|
||||
export type AccessUrlInput = {
|
||||
ipv4?: InputMaybe<Scalars['URL']['input']>;
|
||||
ipv6?: InputMaybe<Scalars['URL']['input']>;
|
||||
name?: InputMaybe<Scalars['String']['input']>;
|
||||
type: URL_TYPE;
|
||||
};
|
||||
|
||||
export type AllowedOriginInput = {
|
||||
origins: Array<Scalars['String']['input']>;
|
||||
};
|
||||
@@ -42,7 +58,7 @@ export type ApiKeyResponse = {
|
||||
valid: Scalars['Boolean']['output'];
|
||||
};
|
||||
|
||||
export type ArrayType = {
|
||||
export type ArrayType = Node & {
|
||||
__typename?: 'Array';
|
||||
/** Current boot disk */
|
||||
boot?: Maybe<ArrayDisk>;
|
||||
@@ -52,6 +68,7 @@ export type ArrayType = {
|
||||
capacity: ArrayCapacity;
|
||||
/** Data disks in the current array */
|
||||
disks: Array<ArrayDisk>;
|
||||
id: Scalars['ID']['output'];
|
||||
/** Parity disks in the current array */
|
||||
parities: Array<ArrayDisk>;
|
||||
/** Array state after this query/mutation */
|
||||
@@ -232,9 +249,10 @@ export type CloudResponse = {
|
||||
status: Scalars['String']['output'];
|
||||
};
|
||||
|
||||
export type Config = {
|
||||
export type Config = Node & {
|
||||
__typename?: 'Config';
|
||||
error?: Maybe<ConfigErrorState>;
|
||||
id: Scalars['ID']['output'];
|
||||
valid?: Maybe<Scalars['Boolean']['output']>;
|
||||
};
|
||||
|
||||
@@ -245,6 +263,12 @@ export enum ConfigErrorState {
|
||||
WITHDRAWN = 'WITHDRAWN'
|
||||
}
|
||||
|
||||
export type Connect = Node & {
|
||||
__typename?: 'Connect';
|
||||
dynamicRemoteAccess: DynamicRemoteAccessStatus;
|
||||
id: Scalars['ID']['output'];
|
||||
};
|
||||
|
||||
export type ConnectSignInInput = {
|
||||
accessToken?: InputMaybe<Scalars['String']['input']>;
|
||||
apiKey: Scalars['String']['input'];
|
||||
@@ -359,6 +383,7 @@ export type Display = {
|
||||
dashapps?: Maybe<Scalars['String']['output']>;
|
||||
date?: Maybe<Scalars['String']['output']>;
|
||||
hot?: Maybe<Scalars['Int']['output']>;
|
||||
id: Scalars['ID']['output'];
|
||||
locale?: Maybe<Scalars['String']['output']>;
|
||||
max?: Maybe<Scalars['Int']['output']>;
|
||||
number?: Maybe<Scalars['String']['output']>;
|
||||
@@ -375,6 +400,13 @@ export type Display = {
|
||||
wwn?: Maybe<Scalars['Boolean']['output']>;
|
||||
};
|
||||
|
||||
export type Docker = Node & {
|
||||
__typename?: 'Docker';
|
||||
containers?: Maybe<Array<DockerContainer>>;
|
||||
id: Scalars['ID']['output'];
|
||||
networks?: Maybe<Array<DockerNetwork>>;
|
||||
};
|
||||
|
||||
export type DockerContainer = {
|
||||
__typename?: 'DockerContainer';
|
||||
autoStart: Scalars['Boolean']['output'];
|
||||
@@ -414,6 +446,24 @@ export type DockerNetwork = {
|
||||
scope?: Maybe<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
export type DynamicRemoteAccessStatus = {
|
||||
__typename?: 'DynamicRemoteAccessStatus';
|
||||
enabledType: DynamicRemoteAccessType;
|
||||
error?: Maybe<Scalars['String']['output']>;
|
||||
runningType: DynamicRemoteAccessType;
|
||||
};
|
||||
|
||||
export enum DynamicRemoteAccessType {
|
||||
DISABLED = 'DISABLED',
|
||||
STATIC = 'STATIC',
|
||||
UPNP = 'UPNP'
|
||||
}
|
||||
|
||||
export type EnableDynamicRemoteAccessInput = {
|
||||
enabled: Scalars['Boolean']['input'];
|
||||
url: AccessUrlInput;
|
||||
};
|
||||
|
||||
export type Flash = {
|
||||
__typename?: 'Flash';
|
||||
guid?: Maybe<Scalars['String']['output']>;
|
||||
@@ -438,7 +488,7 @@ export enum Importance {
|
||||
WARNING = 'WARNING'
|
||||
}
|
||||
|
||||
export type Info = {
|
||||
export type Info = Node & {
|
||||
__typename?: 'Info';
|
||||
/** Count of docker containers */
|
||||
apps?: Maybe<InfoApps>;
|
||||
@@ -446,11 +496,13 @@ export type Info = {
|
||||
cpu?: Maybe<InfoCpu>;
|
||||
devices?: Maybe<Devices>;
|
||||
display?: Maybe<Display>;
|
||||
id: Scalars['ID']['output'];
|
||||
/** Machine ID */
|
||||
machineId?: Maybe<Scalars['ID']['output']>;
|
||||
memory?: Maybe<InfoMemory>;
|
||||
os?: Maybe<Os>;
|
||||
system?: Maybe<System>;
|
||||
time: Scalars['DateTime']['output'];
|
||||
versions?: Maybe<Versions>;
|
||||
};
|
||||
|
||||
@@ -570,13 +622,21 @@ export type Mutation = {
|
||||
addDiskToArray?: Maybe<ArrayType>;
|
||||
/** Add a new user */
|
||||
addUser?: Maybe<User>;
|
||||
archiveAll: NotificationOverview;
|
||||
/** Marks a notification as archived. */
|
||||
archiveNotification: Notification;
|
||||
archiveNotifications: NotificationOverview;
|
||||
/** Cancel parity check */
|
||||
cancelParityCheck?: Maybe<Scalars['JSON']['output']>;
|
||||
clearArrayDiskStatistics?: Maybe<Scalars['JSON']['output']>;
|
||||
connectSignIn: Scalars['Boolean']['output'];
|
||||
connectSignOut: Scalars['Boolean']['output'];
|
||||
createNotification: Notification;
|
||||
deleteAllNotifications: NotificationOverview;
|
||||
deleteNotification: NotificationOverview;
|
||||
/** Delete a user */
|
||||
deleteUser?: Maybe<User>;
|
||||
enableDynamicRemoteAccess: Scalars['Boolean']['output'];
|
||||
/** Get an existing API key */
|
||||
getApiKey?: Maybe<ApiKey>;
|
||||
login?: Maybe<Scalars['String']['output']>;
|
||||
@@ -584,11 +644,12 @@ export type Mutation = {
|
||||
/** Pause parity check */
|
||||
pauseParityCheck?: Maybe<Scalars['JSON']['output']>;
|
||||
reboot?: Maybe<Scalars['String']['output']>;
|
||||
/** Reads each notification to recompute & update the overview. */
|
||||
recalculateOverview: NotificationOverview;
|
||||
/** Remove existing disk from array. NOTE: The array must be stopped before running this otherwise it'll throw an error. */
|
||||
removeDiskFromArray?: Maybe<ArrayType>;
|
||||
/** Resume parity check */
|
||||
resumeParityCheck?: Maybe<Scalars['JSON']['output']>;
|
||||
sendNotification?: Maybe<Notification>;
|
||||
setAdditionalAllowedOrigins: Array<Scalars['String']['output']>;
|
||||
setupRemoteAccess: Scalars['Boolean']['output'];
|
||||
shutdown?: Maybe<Scalars['String']['output']>;
|
||||
@@ -598,7 +659,11 @@ export type Mutation = {
|
||||
startParityCheck?: Maybe<Scalars['JSON']['output']>;
|
||||
/** Stop array */
|
||||
stopArray?: Maybe<ArrayType>;
|
||||
unarchiveAll: NotificationOverview;
|
||||
unarchiveNotifications: NotificationOverview;
|
||||
unmountArrayDisk?: Maybe<Disk>;
|
||||
/** Marks a notification as unread. */
|
||||
unreadNotification: Notification;
|
||||
/** Update an existing API key */
|
||||
updateApikey?: Maybe<ApiKey>;
|
||||
};
|
||||
@@ -620,6 +685,21 @@ export type MutationaddUserArgs = {
|
||||
};
|
||||
|
||||
|
||||
export type MutationarchiveAllArgs = {
|
||||
importance?: InputMaybe<Importance>;
|
||||
};
|
||||
|
||||
|
||||
export type MutationarchiveNotificationArgs = {
|
||||
id: Scalars['String']['input'];
|
||||
};
|
||||
|
||||
|
||||
export type MutationarchiveNotificationsArgs = {
|
||||
ids?: InputMaybe<Array<Scalars['String']['input']>>;
|
||||
};
|
||||
|
||||
|
||||
export type MutationclearArrayDiskStatisticsArgs = {
|
||||
id: Scalars['ID']['input'];
|
||||
};
|
||||
@@ -630,11 +710,27 @@ export type MutationconnectSignInArgs = {
|
||||
};
|
||||
|
||||
|
||||
export type MutationcreateNotificationArgs = {
|
||||
input: NotificationData;
|
||||
};
|
||||
|
||||
|
||||
export type MutationdeleteNotificationArgs = {
|
||||
id: Scalars['String']['input'];
|
||||
type: NotificationType;
|
||||
};
|
||||
|
||||
|
||||
export type MutationdeleteUserArgs = {
|
||||
input: deleteUserInput;
|
||||
};
|
||||
|
||||
|
||||
export type MutationenableDynamicRemoteAccessArgs = {
|
||||
input: EnableDynamicRemoteAccessInput;
|
||||
};
|
||||
|
||||
|
||||
export type MutationgetApiKeyArgs = {
|
||||
input?: InputMaybe<authenticateInput>;
|
||||
name: Scalars['String']['input'];
|
||||
@@ -657,11 +753,6 @@ export type MutationremoveDiskFromArrayArgs = {
|
||||
};
|
||||
|
||||
|
||||
export type MutationsendNotificationArgs = {
|
||||
notification: NotificationInput;
|
||||
};
|
||||
|
||||
|
||||
export type MutationsetAdditionalAllowedOriginsArgs = {
|
||||
input: AllowedOriginInput;
|
||||
};
|
||||
@@ -677,20 +768,37 @@ export type MutationstartParityCheckArgs = {
|
||||
};
|
||||
|
||||
|
||||
export type MutationunarchiveAllArgs = {
|
||||
importance?: InputMaybe<Importance>;
|
||||
};
|
||||
|
||||
|
||||
export type MutationunarchiveNotificationsArgs = {
|
||||
ids?: InputMaybe<Array<Scalars['String']['input']>>;
|
||||
};
|
||||
|
||||
|
||||
export type MutationunmountArrayDiskArgs = {
|
||||
id: Scalars['ID']['input'];
|
||||
};
|
||||
|
||||
|
||||
export type MutationunreadNotificationArgs = {
|
||||
id: Scalars['String']['input'];
|
||||
};
|
||||
|
||||
|
||||
export type MutationupdateApikeyArgs = {
|
||||
input?: InputMaybe<updateApikeyInput>;
|
||||
name: Scalars['String']['input'];
|
||||
};
|
||||
|
||||
export type Network = {
|
||||
export type Network = Node & {
|
||||
__typename?: 'Network';
|
||||
accessUrls?: Maybe<Array<AccessUrl>>;
|
||||
carrierChanges?: Maybe<Scalars['String']['output']>;
|
||||
duplex?: Maybe<Scalars['String']['output']>;
|
||||
id: Scalars['ID']['output'];
|
||||
iface?: Maybe<Scalars['String']['output']>;
|
||||
ifaceName?: Maybe<Scalars['String']['output']>;
|
||||
internal?: Maybe<Scalars['String']['output']>;
|
||||
@@ -703,19 +811,41 @@ export type Network = {
|
||||
type?: Maybe<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
export type Notification = {
|
||||
export type Node = {
|
||||
id: Scalars['ID']['output'];
|
||||
};
|
||||
|
||||
export type Notification = Node & {
|
||||
__typename?: 'Notification';
|
||||
description: Scalars['String']['output'];
|
||||
formattedTimestamp?: Maybe<Scalars['String']['output']>;
|
||||
id: Scalars['ID']['output'];
|
||||
importance: Importance;
|
||||
link?: Maybe<Scalars['String']['output']>;
|
||||
subject: Scalars['String']['output'];
|
||||
/** ISO Timestamp for when the notification occurred */
|
||||
/** ISO Timestamp for when the notification occurred */
|
||||
timestamp?: Maybe<Scalars['String']['output']>;
|
||||
/** Also known as 'event' */
|
||||
title: Scalars['String']['output'];
|
||||
type: NotificationType;
|
||||
};
|
||||
|
||||
export type NotificationCounts = {
|
||||
__typename?: 'NotificationCounts';
|
||||
alert: Scalars['Int']['output'];
|
||||
info: Scalars['Int']['output'];
|
||||
total: Scalars['Int']['output'];
|
||||
warning: Scalars['Int']['output'];
|
||||
};
|
||||
|
||||
export type NotificationData = {
|
||||
description: Scalars['String']['input'];
|
||||
importance: Importance;
|
||||
link?: InputMaybe<Scalars['String']['input']>;
|
||||
subject: Scalars['String']['input'];
|
||||
title: Scalars['String']['input'];
|
||||
};
|
||||
|
||||
export type NotificationFilter = {
|
||||
importance?: InputMaybe<Importance>;
|
||||
limit: Scalars['Int']['input'];
|
||||
@@ -723,23 +853,30 @@ export type NotificationFilter = {
|
||||
type?: InputMaybe<NotificationType>;
|
||||
};
|
||||
|
||||
export type NotificationInput = {
|
||||
description?: InputMaybe<Scalars['String']['input']>;
|
||||
id: Scalars['ID']['input'];
|
||||
importance: Importance;
|
||||
link?: InputMaybe<Scalars['String']['input']>;
|
||||
subject: Scalars['String']['input'];
|
||||
timestamp?: InputMaybe<Scalars['String']['input']>;
|
||||
title: Scalars['String']['input'];
|
||||
type: NotificationType;
|
||||
export type NotificationOverview = {
|
||||
__typename?: 'NotificationOverview';
|
||||
archive: NotificationCounts;
|
||||
unread: NotificationCounts;
|
||||
};
|
||||
|
||||
export enum NotificationType {
|
||||
ARCHIVED = 'ARCHIVED',
|
||||
RESTORED = 'RESTORED',
|
||||
ARCHIVE = 'ARCHIVE',
|
||||
UNREAD = 'UNREAD'
|
||||
}
|
||||
|
||||
export type Notifications = Node & {
|
||||
__typename?: 'Notifications';
|
||||
id: Scalars['ID']['output'];
|
||||
list: Array<Notification>;
|
||||
/** A cached overview of the notifications in the system & their severity. */
|
||||
overview: NotificationOverview;
|
||||
};
|
||||
|
||||
|
||||
export type NotificationslistArgs = {
|
||||
filter: NotificationFilter;
|
||||
};
|
||||
|
||||
export type Os = {
|
||||
__typename?: 'Os';
|
||||
arch?: Maybe<Scalars['String']['output']>;
|
||||
@@ -863,28 +1000,34 @@ export type Query = {
|
||||
array: ArrayType;
|
||||
cloud?: Maybe<Cloud>;
|
||||
config: Config;
|
||||
connect: Connect;
|
||||
/** Single disk */
|
||||
disk?: Maybe<Disk>;
|
||||
/** Mulitiple disks */
|
||||
disks: Array<Maybe<Disk>>;
|
||||
display?: Maybe<Display>;
|
||||
docker: Docker;
|
||||
/** All Docker containers */
|
||||
dockerContainers: Array<DockerContainer>;
|
||||
/** Docker network */
|
||||
dockerNetwork: DockerNetwork;
|
||||
/** All Docker networks */
|
||||
dockerNetworks: Array<Maybe<DockerNetwork>>;
|
||||
extraAllowedOrigins: Array<Scalars['String']['output']>;
|
||||
flash?: Maybe<Flash>;
|
||||
info?: Maybe<Info>;
|
||||
/** Current user account */
|
||||
me?: Maybe<Me>;
|
||||
notifications: Array<Notification>;
|
||||
network?: Maybe<Network>;
|
||||
notifications: Notifications;
|
||||
online?: Maybe<Scalars['Boolean']['output']>;
|
||||
owner?: Maybe<Owner>;
|
||||
parityHistory?: Maybe<Array<Maybe<ParityCheck>>>;
|
||||
registration?: Maybe<Registration>;
|
||||
remoteAccess: RemoteAccess;
|
||||
server?: Maybe<Server>;
|
||||
servers: Array<Server>;
|
||||
services: Array<Service>;
|
||||
/** Network Shares */
|
||||
shares?: Maybe<Array<Maybe<Share>>>;
|
||||
unassignedDevices?: Maybe<Array<Maybe<UnassignedDevice>>>;
|
||||
@@ -918,11 +1061,6 @@ export type QuerydockerNetworksArgs = {
|
||||
};
|
||||
|
||||
|
||||
export type QuerynotificationsArgs = {
|
||||
filter: NotificationFilter;
|
||||
};
|
||||
|
||||
|
||||
export type QueryuserArgs = {
|
||||
id: Scalars['ID']['input'];
|
||||
};
|
||||
@@ -990,6 +1128,13 @@ export type RelayResponse = {
|
||||
timeout?: Maybe<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
export type RemoteAccess = {
|
||||
__typename?: 'RemoteAccess';
|
||||
accessType: WAN_ACCESS_TYPE;
|
||||
forwardType?: Maybe<WAN_FORWARD_TYPE>;
|
||||
port?: Maybe<Scalars['Port']['output']>;
|
||||
};
|
||||
|
||||
export type Server = {
|
||||
__typename?: 'Server';
|
||||
apikey: Scalars['String']['output'];
|
||||
@@ -1009,8 +1154,9 @@ export enum ServerStatus {
|
||||
ONLINE = 'online'
|
||||
}
|
||||
|
||||
export type Service = {
|
||||
export type Service = Node & {
|
||||
__typename?: 'Service';
|
||||
id: Scalars['ID']['output'];
|
||||
name?: Maybe<Scalars['String']['output']>;
|
||||
online?: Maybe<Scalars['Boolean']['output']>;
|
||||
uptime?: Maybe<Uptime>;
|
||||
@@ -1064,6 +1210,7 @@ export type Subscription = {
|
||||
info: Info;
|
||||
me?: Maybe<Me>;
|
||||
notificationAdded: Notification;
|
||||
notificationsOverview: NotificationOverview;
|
||||
online: Scalars['Boolean']['output'];
|
||||
owner: Owner;
|
||||
parityHistory: ParityCheck;
|
||||
@@ -1124,6 +1271,15 @@ export enum Theme {
|
||||
WHITE = 'white'
|
||||
}
|
||||
|
||||
export enum URL_TYPE {
|
||||
DEFAULT = 'DEFAULT',
|
||||
LAN = 'LAN',
|
||||
MDNS = 'MDNS',
|
||||
OTHER = 'OTHER',
|
||||
WAN = 'WAN',
|
||||
WIREGUARD = 'WIREGUARD'
|
||||
}
|
||||
|
||||
export type UnassignedDevice = {
|
||||
__typename?: 'UnassignedDevice';
|
||||
devlinks?: Maybe<Scalars['String']['output']>;
|
||||
@@ -1210,7 +1366,7 @@ export type UserAccount = {
|
||||
roles: Scalars['String']['output'];
|
||||
};
|
||||
|
||||
export type Vars = {
|
||||
export type Vars = Node & {
|
||||
__typename?: 'Vars';
|
||||
bindMgt?: Maybe<Scalars['Boolean']['output']>;
|
||||
cacheNumDevices?: Maybe<Scalars['Int']['output']>;
|
||||
@@ -1244,6 +1400,7 @@ export type Vars = {
|
||||
fuseRememberDefault?: Maybe<Scalars['String']['output']>;
|
||||
fuseRememberStatus?: Maybe<Scalars['String']['output']>;
|
||||
hideDotFiles?: Maybe<Scalars['Boolean']['output']>;
|
||||
id: Scalars['ID']['output'];
|
||||
joinStatus?: Maybe<Scalars['String']['output']>;
|
||||
localMaster?: Maybe<Scalars['Boolean']['output']>;
|
||||
localTld?: Maybe<Scalars['String']['output']>;
|
||||
@@ -1567,12 +1724,15 @@ export type DirectiveResolverFn<TResult = {}, TParent = {}, TContext = {}, TArgs
|
||||
|
||||
|
||||
/** Mapping of interface types */
|
||||
export type ResolversInterfaceTypes<RefType extends Record<string, unknown>> = ResolversObject<{
|
||||
export type ResolversInterfaceTypes<_RefType extends Record<string, unknown>> = ResolversObject<{
|
||||
Node: ( ArrayType ) | ( Config ) | ( Connect ) | ( Docker ) | ( Info ) | ( Network ) | ( Notification ) | ( Notifications ) | ( Service ) | ( Vars );
|
||||
UserAccount: ( Me ) | ( User );
|
||||
}>;
|
||||
|
||||
/** Mapping between all available schema types and the resolvers types */
|
||||
export type ResolversTypes = ResolversObject<{
|
||||
AccessUrl: ResolverTypeWrapper<AccessUrl>;
|
||||
AccessUrlInput: AccessUrlInput;
|
||||
AllowedOriginInput: AllowedOriginInput;
|
||||
ApiKey: ResolverTypeWrapper<ApiKey>;
|
||||
ApiKeyResponse: ResolverTypeWrapper<ApiKeyResponse>;
|
||||
@@ -1592,6 +1752,7 @@ export type ResolversTypes = ResolversObject<{
|
||||
CloudResponse: ResolverTypeWrapper<CloudResponse>;
|
||||
Config: ResolverTypeWrapper<Config>;
|
||||
ConfigErrorState: ConfigErrorState;
|
||||
Connect: ResolverTypeWrapper<Connect>;
|
||||
ConnectSignInInput: ConnectSignInInput;
|
||||
ConnectUserInfoInput: ConnectUserInfoInput;
|
||||
ContainerHostConfig: ResolverTypeWrapper<ContainerHostConfig>;
|
||||
@@ -1607,8 +1768,12 @@ export type ResolversTypes = ResolversObject<{
|
||||
DiskPartition: ResolverTypeWrapper<DiskPartition>;
|
||||
DiskSmartStatus: DiskSmartStatus;
|
||||
Display: ResolverTypeWrapper<Display>;
|
||||
Docker: ResolverTypeWrapper<Docker>;
|
||||
DockerContainer: ResolverTypeWrapper<DockerContainer>;
|
||||
DockerNetwork: ResolverTypeWrapper<DockerNetwork>;
|
||||
DynamicRemoteAccessStatus: ResolverTypeWrapper<DynamicRemoteAccessStatus>;
|
||||
DynamicRemoteAccessType: DynamicRemoteAccessType;
|
||||
EnableDynamicRemoteAccessInput: EnableDynamicRemoteAccessInput;
|
||||
Flash: ResolverTypeWrapper<Flash>;
|
||||
Float: ResolverTypeWrapper<Scalars['Float']['output']>;
|
||||
Gpu: ResolverTypeWrapper<Gpu>;
|
||||
@@ -1631,10 +1796,14 @@ export type ResolversTypes = ResolversObject<{
|
||||
Mount: ResolverTypeWrapper<Mount>;
|
||||
Mutation: ResolverTypeWrapper<{}>;
|
||||
Network: ResolverTypeWrapper<Network>;
|
||||
Node: ResolverTypeWrapper<ResolversInterfaceTypes<ResolversTypes>['Node']>;
|
||||
Notification: ResolverTypeWrapper<Notification>;
|
||||
NotificationCounts: ResolverTypeWrapper<NotificationCounts>;
|
||||
NotificationData: NotificationData;
|
||||
NotificationFilter: NotificationFilter;
|
||||
NotificationInput: NotificationInput;
|
||||
NotificationOverview: ResolverTypeWrapper<NotificationOverview>;
|
||||
NotificationType: NotificationType;
|
||||
Notifications: ResolverTypeWrapper<Notifications>;
|
||||
Os: ResolverTypeWrapper<Os>;
|
||||
Owner: ResolverTypeWrapper<Owner>;
|
||||
ParityCheck: ResolverTypeWrapper<ParityCheck>;
|
||||
@@ -1646,6 +1815,7 @@ export type ResolversTypes = ResolversObject<{
|
||||
Registration: ResolverTypeWrapper<Registration>;
|
||||
RegistrationState: RegistrationState;
|
||||
RelayResponse: ResolverTypeWrapper<RelayResponse>;
|
||||
RemoteAccess: ResolverTypeWrapper<RemoteAccess>;
|
||||
Server: ResolverTypeWrapper<Server>;
|
||||
ServerStatus: ServerStatus;
|
||||
Service: ResolverTypeWrapper<Service>;
|
||||
@@ -1656,6 +1826,8 @@ export type ResolversTypes = ResolversObject<{
|
||||
System: ResolverTypeWrapper<System>;
|
||||
Temperature: Temperature;
|
||||
Theme: Theme;
|
||||
URL: ResolverTypeWrapper<Scalars['URL']['output']>;
|
||||
URL_TYPE: URL_TYPE;
|
||||
UUID: ResolverTypeWrapper<Scalars['UUID']['output']>;
|
||||
UnassignedDevice: ResolverTypeWrapper<UnassignedDevice>;
|
||||
Uptime: ResolverTypeWrapper<Uptime>;
|
||||
@@ -1683,6 +1855,8 @@ export type ResolversTypes = ResolversObject<{
|
||||
|
||||
/** Mapping between all available schema types and the resolvers parents */
|
||||
export type ResolversParentTypes = ResolversObject<{
|
||||
AccessUrl: AccessUrl;
|
||||
AccessUrlInput: AccessUrlInput;
|
||||
AllowedOriginInput: AllowedOriginInput;
|
||||
ApiKey: ApiKey;
|
||||
ApiKeyResponse: ApiKeyResponse;
|
||||
@@ -1696,6 +1870,7 @@ export type ResolversParentTypes = ResolversObject<{
|
||||
Cloud: Cloud;
|
||||
CloudResponse: CloudResponse;
|
||||
Config: Config;
|
||||
Connect: Connect;
|
||||
ConnectSignInInput: ConnectSignInInput;
|
||||
ConnectUserInfoInput: ConnectUserInfoInput;
|
||||
ContainerHostConfig: ContainerHostConfig;
|
||||
@@ -1706,8 +1881,11 @@ export type ResolversParentTypes = ResolversObject<{
|
||||
Disk: Disk;
|
||||
DiskPartition: DiskPartition;
|
||||
Display: Display;
|
||||
Docker: Docker;
|
||||
DockerContainer: DockerContainer;
|
||||
DockerNetwork: DockerNetwork;
|
||||
DynamicRemoteAccessStatus: DynamicRemoteAccessStatus;
|
||||
EnableDynamicRemoteAccessInput: EnableDynamicRemoteAccessInput;
|
||||
Flash: Flash;
|
||||
Float: Scalars['Float']['output'];
|
||||
Gpu: Gpu;
|
||||
@@ -1726,9 +1904,13 @@ export type ResolversParentTypes = ResolversObject<{
|
||||
Mount: Mount;
|
||||
Mutation: {};
|
||||
Network: Network;
|
||||
Node: ResolversInterfaceTypes<ResolversParentTypes>['Node'];
|
||||
Notification: Notification;
|
||||
NotificationCounts: NotificationCounts;
|
||||
NotificationData: NotificationData;
|
||||
NotificationFilter: NotificationFilter;
|
||||
NotificationInput: NotificationInput;
|
||||
NotificationOverview: NotificationOverview;
|
||||
Notifications: Notifications;
|
||||
Os: Os;
|
||||
Owner: Owner;
|
||||
ParityCheck: ParityCheck;
|
||||
@@ -1739,6 +1921,7 @@ export type ResolversParentTypes = ResolversObject<{
|
||||
Query: {};
|
||||
Registration: Registration;
|
||||
RelayResponse: RelayResponse;
|
||||
RemoteAccess: RemoteAccess;
|
||||
Server: Server;
|
||||
Service: Service;
|
||||
SetupRemoteAccessInput: SetupRemoteAccessInput;
|
||||
@@ -1746,6 +1929,7 @@ export type ResolversParentTypes = ResolversObject<{
|
||||
String: Scalars['String']['output'];
|
||||
Subscription: {};
|
||||
System: System;
|
||||
URL: Scalars['URL']['output'];
|
||||
UUID: Scalars['UUID']['output'];
|
||||
UnassignedDevice: UnassignedDevice;
|
||||
Uptime: Uptime;
|
||||
@@ -1766,6 +1950,14 @@ export type ResolversParentTypes = ResolversObject<{
|
||||
usersInput: usersInput;
|
||||
}>;
|
||||
|
||||
export type AccessUrlResolvers<ContextType = Context, ParentType extends ResolversParentTypes['AccessUrl'] = ResolversParentTypes['AccessUrl']> = ResolversObject<{
|
||||
ipv4?: Resolver<Maybe<ResolversTypes['URL']>, ParentType, ContextType>;
|
||||
ipv6?: Resolver<Maybe<ResolversTypes['URL']>, ParentType, ContextType>;
|
||||
name?: Resolver<Maybe<ResolversTypes['String']>, ParentType, ContextType>;
|
||||
type?: Resolver<ResolversTypes['URL_TYPE'], ParentType, ContextType>;
|
||||
__isTypeOf?: IsTypeOfResolverFn<ParentType, ContextType>;
|
||||
}>;
|
||||
|
||||
export type ApiKeyResolvers<ContextType = Context, ParentType extends ResolversParentTypes['ApiKey'] = ResolversParentTypes['ApiKey']> = ResolversObject<{
|
||||
description?: Resolver<Maybe<ResolversTypes['String']>, ParentType, ContextType>;
|
||||
expiresAt?: Resolver<ResolversTypes['Long'], ParentType, ContextType>;
|
||||
@@ -1786,6 +1978,7 @@ export type ArrayResolvers<ContextType = Context, ParentType extends ResolversPa
|
||||
caches?: Resolver<Array<ResolversTypes['ArrayDisk']>, ParentType, ContextType>;
|
||||
capacity?: Resolver<ResolversTypes['ArrayCapacity'], ParentType, ContextType>;
|
||||
disks?: Resolver<Array<ResolversTypes['ArrayDisk']>, ParentType, ContextType>;
|
||||
id?: Resolver<ResolversTypes['ID'], ParentType, ContextType>;
|
||||
parities?: Resolver<Array<ResolversTypes['ArrayDisk']>, ParentType, ContextType>;
|
||||
pendingState?: Resolver<Maybe<ResolversTypes['ArrayPendingState']>, ParentType, ContextType>;
|
||||
previousState?: Resolver<Maybe<ResolversTypes['ArrayState']>, ParentType, ContextType>;
|
||||
@@ -1868,10 +2061,17 @@ export type CloudResponseResolvers<ContextType = Context, ParentType extends Res
|
||||
|
||||
export type ConfigResolvers<ContextType = Context, ParentType extends ResolversParentTypes['Config'] = ResolversParentTypes['Config']> = ResolversObject<{
|
||||
error?: Resolver<Maybe<ResolversTypes['ConfigErrorState']>, ParentType, ContextType>;
|
||||
id?: Resolver<ResolversTypes['ID'], ParentType, ContextType>;
|
||||
valid?: Resolver<Maybe<ResolversTypes['Boolean']>, ParentType, ContextType>;
|
||||
__isTypeOf?: IsTypeOfResolverFn<ParentType, ContextType>;
|
||||
}>;
|
||||
|
||||
export type ConnectResolvers<ContextType = Context, ParentType extends ResolversParentTypes['Connect'] = ResolversParentTypes['Connect']> = ResolversObject<{
|
||||
dynamicRemoteAccess?: Resolver<ResolversTypes['DynamicRemoteAccessStatus'], ParentType, ContextType>;
|
||||
id?: Resolver<ResolversTypes['ID'], ParentType, ContextType>;
|
||||
__isTypeOf?: IsTypeOfResolverFn<ParentType, ContextType>;
|
||||
}>;
|
||||
|
||||
export type ContainerHostConfigResolvers<ContextType = Context, ParentType extends ResolversParentTypes['ContainerHostConfig'] = ResolversParentTypes['ContainerHostConfig']> = ResolversObject<{
|
||||
networkMode?: Resolver<ResolversTypes['String'], ParentType, ContextType>;
|
||||
__isTypeOf?: IsTypeOfResolverFn<ParentType, ContextType>;
|
||||
@@ -1945,6 +2145,7 @@ export type DisplayResolvers<ContextType = Context, ParentType extends Resolvers
|
||||
dashapps?: Resolver<Maybe<ResolversTypes['String']>, ParentType, ContextType>;
|
||||
date?: Resolver<Maybe<ResolversTypes['String']>, ParentType, ContextType>;
|
||||
hot?: Resolver<Maybe<ResolversTypes['Int']>, ParentType, ContextType>;
|
||||
id?: Resolver<ResolversTypes['ID'], ParentType, ContextType>;
|
||||
locale?: Resolver<Maybe<ResolversTypes['String']>, ParentType, ContextType>;
|
||||
max?: Resolver<Maybe<ResolversTypes['Int']>, ParentType, ContextType>;
|
||||
number?: Resolver<Maybe<ResolversTypes['String']>, ParentType, ContextType>;
|
||||
@@ -1962,6 +2163,13 @@ export type DisplayResolvers<ContextType = Context, ParentType extends Resolvers
|
||||
__isTypeOf?: IsTypeOfResolverFn<ParentType, ContextType>;
|
||||
}>;
|
||||
|
||||
export type DockerResolvers<ContextType = Context, ParentType extends ResolversParentTypes['Docker'] = ResolversParentTypes['Docker']> = ResolversObject<{
|
||||
containers?: Resolver<Maybe<Array<ResolversTypes['DockerContainer']>>, ParentType, ContextType>;
|
||||
id?: Resolver<ResolversTypes['ID'], ParentType, ContextType>;
|
||||
networks?: Resolver<Maybe<Array<ResolversTypes['DockerNetwork']>>, ParentType, ContextType>;
|
||||
__isTypeOf?: IsTypeOfResolverFn<ParentType, ContextType>;
|
||||
}>;
|
||||
|
||||
export type DockerContainerResolvers<ContextType = Context, ParentType extends ResolversParentTypes['DockerContainer'] = ResolversParentTypes['DockerContainer']> = ResolversObject<{
|
||||
autoStart?: Resolver<ResolversTypes['Boolean'], ParentType, ContextType>;
|
||||
command?: Resolver<ResolversTypes['String'], ParentType, ContextType>;
|
||||
@@ -2000,6 +2208,13 @@ export type DockerNetworkResolvers<ContextType = Context, ParentType extends Res
|
||||
__isTypeOf?: IsTypeOfResolverFn<ParentType, ContextType>;
|
||||
}>;
|
||||
|
||||
export type DynamicRemoteAccessStatusResolvers<ContextType = Context, ParentType extends ResolversParentTypes['DynamicRemoteAccessStatus'] = ResolversParentTypes['DynamicRemoteAccessStatus']> = ResolversObject<{
|
||||
enabledType?: Resolver<ResolversTypes['DynamicRemoteAccessType'], ParentType, ContextType>;
|
||||
error?: Resolver<Maybe<ResolversTypes['String']>, ParentType, ContextType>;
|
||||
runningType?: Resolver<ResolversTypes['DynamicRemoteAccessType'], ParentType, ContextType>;
|
||||
__isTypeOf?: IsTypeOfResolverFn<ParentType, ContextType>;
|
||||
}>;
|
||||
|
||||
export type FlashResolvers<ContextType = Context, ParentType extends ResolversParentTypes['Flash'] = ResolversParentTypes['Flash']> = ResolversObject<{
|
||||
guid?: Resolver<Maybe<ResolversTypes['String']>, ParentType, ContextType>;
|
||||
product?: Resolver<Maybe<ResolversTypes['String']>, ParentType, ContextType>;
|
||||
@@ -2024,10 +2239,12 @@ export type InfoResolvers<ContextType = Context, ParentType extends ResolversPar
|
||||
cpu?: Resolver<Maybe<ResolversTypes['InfoCpu']>, ParentType, ContextType>;
|
||||
devices?: Resolver<Maybe<ResolversTypes['Devices']>, ParentType, ContextType>;
|
||||
display?: Resolver<Maybe<ResolversTypes['Display']>, ParentType, ContextType>;
|
||||
id?: Resolver<ResolversTypes['ID'], ParentType, ContextType>;
|
||||
machineId?: Resolver<Maybe<ResolversTypes['ID']>, ParentType, ContextType>;
|
||||
memory?: Resolver<Maybe<ResolversTypes['InfoMemory']>, ParentType, ContextType>;
|
||||
os?: Resolver<Maybe<ResolversTypes['Os']>, ParentType, ContextType>;
|
||||
system?: Resolver<Maybe<ResolversTypes['System']>, ParentType, ContextType>;
|
||||
time?: Resolver<ResolversTypes['DateTime'], ParentType, ContextType>;
|
||||
versions?: Resolver<Maybe<ResolversTypes['Versions']>, ParentType, ContextType>;
|
||||
__isTypeOf?: IsTypeOfResolverFn<ParentType, ContextType>;
|
||||
}>;
|
||||
@@ -2131,32 +2348,44 @@ export type MutationResolvers<ContextType = Context, ParentType extends Resolver
|
||||
addApikey?: Resolver<Maybe<ResolversTypes['ApiKey']>, ParentType, ContextType, RequireFields<MutationaddApikeyArgs, 'name'>>;
|
||||
addDiskToArray?: Resolver<Maybe<ResolversTypes['Array']>, ParentType, ContextType, Partial<MutationaddDiskToArrayArgs>>;
|
||||
addUser?: Resolver<Maybe<ResolversTypes['User']>, ParentType, ContextType, RequireFields<MutationaddUserArgs, 'input'>>;
|
||||
archiveAll?: Resolver<ResolversTypes['NotificationOverview'], ParentType, ContextType, Partial<MutationarchiveAllArgs>>;
|
||||
archiveNotification?: Resolver<ResolversTypes['Notification'], ParentType, ContextType, RequireFields<MutationarchiveNotificationArgs, 'id'>>;
|
||||
archiveNotifications?: Resolver<ResolversTypes['NotificationOverview'], ParentType, ContextType, Partial<MutationarchiveNotificationsArgs>>;
|
||||
cancelParityCheck?: Resolver<Maybe<ResolversTypes['JSON']>, ParentType, ContextType>;
|
||||
clearArrayDiskStatistics?: Resolver<Maybe<ResolversTypes['JSON']>, ParentType, ContextType, RequireFields<MutationclearArrayDiskStatisticsArgs, 'id'>>;
|
||||
connectSignIn?: Resolver<ResolversTypes['Boolean'], ParentType, ContextType, RequireFields<MutationconnectSignInArgs, 'input'>>;
|
||||
connectSignOut?: Resolver<ResolversTypes['Boolean'], ParentType, ContextType>;
|
||||
createNotification?: Resolver<ResolversTypes['Notification'], ParentType, ContextType, RequireFields<MutationcreateNotificationArgs, 'input'>>;
|
||||
deleteAllNotifications?: Resolver<ResolversTypes['NotificationOverview'], ParentType, ContextType>;
|
||||
deleteNotification?: Resolver<ResolversTypes['NotificationOverview'], ParentType, ContextType, RequireFields<MutationdeleteNotificationArgs, 'id' | 'type'>>;
|
||||
deleteUser?: Resolver<Maybe<ResolversTypes['User']>, ParentType, ContextType, RequireFields<MutationdeleteUserArgs, 'input'>>;
|
||||
enableDynamicRemoteAccess?: Resolver<ResolversTypes['Boolean'], ParentType, ContextType, RequireFields<MutationenableDynamicRemoteAccessArgs, 'input'>>;
|
||||
getApiKey?: Resolver<Maybe<ResolversTypes['ApiKey']>, ParentType, ContextType, RequireFields<MutationgetApiKeyArgs, 'name'>>;
|
||||
login?: Resolver<Maybe<ResolversTypes['String']>, ParentType, ContextType, RequireFields<MutationloginArgs, 'password' | 'username'>>;
|
||||
mountArrayDisk?: Resolver<Maybe<ResolversTypes['Disk']>, ParentType, ContextType, RequireFields<MutationmountArrayDiskArgs, 'id'>>;
|
||||
pauseParityCheck?: Resolver<Maybe<ResolversTypes['JSON']>, ParentType, ContextType>;
|
||||
reboot?: Resolver<Maybe<ResolversTypes['String']>, ParentType, ContextType>;
|
||||
recalculateOverview?: Resolver<ResolversTypes['NotificationOverview'], ParentType, ContextType>;
|
||||
removeDiskFromArray?: Resolver<Maybe<ResolversTypes['Array']>, ParentType, ContextType, Partial<MutationremoveDiskFromArrayArgs>>;
|
||||
resumeParityCheck?: Resolver<Maybe<ResolversTypes['JSON']>, ParentType, ContextType>;
|
||||
sendNotification?: Resolver<Maybe<ResolversTypes['Notification']>, ParentType, ContextType, RequireFields<MutationsendNotificationArgs, 'notification'>>;
|
||||
setAdditionalAllowedOrigins?: Resolver<Array<ResolversTypes['String']>, ParentType, ContextType, RequireFields<MutationsetAdditionalAllowedOriginsArgs, 'input'>>;
|
||||
setupRemoteAccess?: Resolver<ResolversTypes['Boolean'], ParentType, ContextType, RequireFields<MutationsetupRemoteAccessArgs, 'input'>>;
|
||||
shutdown?: Resolver<Maybe<ResolversTypes['String']>, ParentType, ContextType>;
|
||||
startArray?: Resolver<Maybe<ResolversTypes['Array']>, ParentType, ContextType>;
|
||||
startParityCheck?: Resolver<Maybe<ResolversTypes['JSON']>, ParentType, ContextType, Partial<MutationstartParityCheckArgs>>;
|
||||
stopArray?: Resolver<Maybe<ResolversTypes['Array']>, ParentType, ContextType>;
|
||||
unarchiveAll?: Resolver<ResolversTypes['NotificationOverview'], ParentType, ContextType, Partial<MutationunarchiveAllArgs>>;
|
||||
unarchiveNotifications?: Resolver<ResolversTypes['NotificationOverview'], ParentType, ContextType, Partial<MutationunarchiveNotificationsArgs>>;
|
||||
unmountArrayDisk?: Resolver<Maybe<ResolversTypes['Disk']>, ParentType, ContextType, RequireFields<MutationunmountArrayDiskArgs, 'id'>>;
|
||||
unreadNotification?: Resolver<ResolversTypes['Notification'], ParentType, ContextType, RequireFields<MutationunreadNotificationArgs, 'id'>>;
|
||||
updateApikey?: Resolver<Maybe<ResolversTypes['ApiKey']>, ParentType, ContextType, RequireFields<MutationupdateApikeyArgs, 'name'>>;
|
||||
}>;
|
||||
|
||||
export type NetworkResolvers<ContextType = Context, ParentType extends ResolversParentTypes['Network'] = ResolversParentTypes['Network']> = ResolversObject<{
|
||||
accessUrls?: Resolver<Maybe<Array<ResolversTypes['AccessUrl']>>, ParentType, ContextType>;
|
||||
carrierChanges?: Resolver<Maybe<ResolversTypes['String']>, ParentType, ContextType>;
|
||||
duplex?: Resolver<Maybe<ResolversTypes['String']>, ParentType, ContextType>;
|
||||
id?: Resolver<ResolversTypes['ID'], ParentType, ContextType>;
|
||||
iface?: Resolver<Maybe<ResolversTypes['String']>, ParentType, ContextType>;
|
||||
ifaceName?: Resolver<Maybe<ResolversTypes['String']>, ParentType, ContextType>;
|
||||
internal?: Resolver<Maybe<ResolversTypes['String']>, ParentType, ContextType>;
|
||||
@@ -2170,8 +2399,14 @@ export type NetworkResolvers<ContextType = Context, ParentType extends Resolvers
|
||||
__isTypeOf?: IsTypeOfResolverFn<ParentType, ContextType>;
|
||||
}>;
|
||||
|
||||
export type NodeResolvers<ContextType = Context, ParentType extends ResolversParentTypes['Node'] = ResolversParentTypes['Node']> = ResolversObject<{
|
||||
__resolveType: TypeResolveFn<'Array' | 'Config' | 'Connect' | 'Docker' | 'Info' | 'Network' | 'Notification' | 'Notifications' | 'Service' | 'Vars', ParentType, ContextType>;
|
||||
id?: Resolver<ResolversTypes['ID'], ParentType, ContextType>;
|
||||
}>;
|
||||
|
||||
export type NotificationResolvers<ContextType = Context, ParentType extends ResolversParentTypes['Notification'] = ResolversParentTypes['Notification']> = ResolversObject<{
|
||||
description?: Resolver<ResolversTypes['String'], ParentType, ContextType>;
|
||||
formattedTimestamp?: Resolver<Maybe<ResolversTypes['String']>, ParentType, ContextType>;
|
||||
id?: Resolver<ResolversTypes['ID'], ParentType, ContextType>;
|
||||
importance?: Resolver<ResolversTypes['Importance'], ParentType, ContextType>;
|
||||
link?: Resolver<Maybe<ResolversTypes['String']>, ParentType, ContextType>;
|
||||
@@ -2182,6 +2417,27 @@ export type NotificationResolvers<ContextType = Context, ParentType extends Reso
|
||||
__isTypeOf?: IsTypeOfResolverFn<ParentType, ContextType>;
|
||||
}>;
|
||||
|
||||
export type NotificationCountsResolvers<ContextType = Context, ParentType extends ResolversParentTypes['NotificationCounts'] = ResolversParentTypes['NotificationCounts']> = ResolversObject<{
|
||||
alert?: Resolver<ResolversTypes['Int'], ParentType, ContextType>;
|
||||
info?: Resolver<ResolversTypes['Int'], ParentType, ContextType>;
|
||||
total?: Resolver<ResolversTypes['Int'], ParentType, ContextType>;
|
||||
warning?: Resolver<ResolversTypes['Int'], ParentType, ContextType>;
|
||||
__isTypeOf?: IsTypeOfResolverFn<ParentType, ContextType>;
|
||||
}>;
|
||||
|
||||
export type NotificationOverviewResolvers<ContextType = Context, ParentType extends ResolversParentTypes['NotificationOverview'] = ResolversParentTypes['NotificationOverview']> = ResolversObject<{
|
||||
archive?: Resolver<ResolversTypes['NotificationCounts'], ParentType, ContextType>;
|
||||
unread?: Resolver<ResolversTypes['NotificationCounts'], ParentType, ContextType>;
|
||||
__isTypeOf?: IsTypeOfResolverFn<ParentType, ContextType>;
|
||||
}>;
|
||||
|
||||
export type NotificationsResolvers<ContextType = Context, ParentType extends ResolversParentTypes['Notifications'] = ResolversParentTypes['Notifications']> = ResolversObject<{
|
||||
id?: Resolver<ResolversTypes['ID'], ParentType, ContextType>;
|
||||
list?: Resolver<Array<ResolversTypes['Notification']>, ParentType, ContextType, RequireFields<NotificationslistArgs, 'filter'>>;
|
||||
overview?: Resolver<ResolversTypes['NotificationOverview'], ParentType, ContextType>;
|
||||
__isTypeOf?: IsTypeOfResolverFn<ParentType, ContextType>;
|
||||
}>;
|
||||
|
||||
export type OsResolvers<ContextType = Context, ParentType extends ResolversParentTypes['Os'] = ResolversParentTypes['Os']> = ResolversObject<{
|
||||
arch?: Resolver<Maybe<ResolversTypes['String']>, ParentType, ContextType>;
|
||||
build?: Resolver<Maybe<ResolversTypes['String']>, ParentType, ContextType>;
|
||||
@@ -2306,22 +2562,28 @@ export type QueryResolvers<ContextType = Context, ParentType extends ResolversPa
|
||||
array?: Resolver<ResolversTypes['Array'], ParentType, ContextType>;
|
||||
cloud?: Resolver<Maybe<ResolversTypes['Cloud']>, ParentType, ContextType>;
|
||||
config?: Resolver<ResolversTypes['Config'], ParentType, ContextType>;
|
||||
connect?: Resolver<ResolversTypes['Connect'], ParentType, ContextType>;
|
||||
disk?: Resolver<Maybe<ResolversTypes['Disk']>, ParentType, ContextType, RequireFields<QuerydiskArgs, 'id'>>;
|
||||
disks?: Resolver<Array<Maybe<ResolversTypes['Disk']>>, ParentType, ContextType>;
|
||||
display?: Resolver<Maybe<ResolversTypes['Display']>, ParentType, ContextType>;
|
||||
docker?: Resolver<ResolversTypes['Docker'], ParentType, ContextType>;
|
||||
dockerContainers?: Resolver<Array<ResolversTypes['DockerContainer']>, ParentType, ContextType, Partial<QuerydockerContainersArgs>>;
|
||||
dockerNetwork?: Resolver<ResolversTypes['DockerNetwork'], ParentType, ContextType, RequireFields<QuerydockerNetworkArgs, 'id'>>;
|
||||
dockerNetworks?: Resolver<Array<Maybe<ResolversTypes['DockerNetwork']>>, ParentType, ContextType, Partial<QuerydockerNetworksArgs>>;
|
||||
extraAllowedOrigins?: Resolver<Array<ResolversTypes['String']>, ParentType, ContextType>;
|
||||
flash?: Resolver<Maybe<ResolversTypes['Flash']>, ParentType, ContextType>;
|
||||
info?: Resolver<Maybe<ResolversTypes['Info']>, ParentType, ContextType>;
|
||||
me?: Resolver<Maybe<ResolversTypes['Me']>, ParentType, ContextType>;
|
||||
notifications?: Resolver<Array<ResolversTypes['Notification']>, ParentType, ContextType, RequireFields<QuerynotificationsArgs, 'filter'>>;
|
||||
network?: Resolver<Maybe<ResolversTypes['Network']>, ParentType, ContextType>;
|
||||
notifications?: Resolver<ResolversTypes['Notifications'], ParentType, ContextType>;
|
||||
online?: Resolver<Maybe<ResolversTypes['Boolean']>, ParentType, ContextType>;
|
||||
owner?: Resolver<Maybe<ResolversTypes['Owner']>, ParentType, ContextType>;
|
||||
parityHistory?: Resolver<Maybe<Array<Maybe<ResolversTypes['ParityCheck']>>>, ParentType, ContextType>;
|
||||
registration?: Resolver<Maybe<ResolversTypes['Registration']>, ParentType, ContextType>;
|
||||
remoteAccess?: Resolver<ResolversTypes['RemoteAccess'], ParentType, ContextType>;
|
||||
server?: Resolver<Maybe<ResolversTypes['Server']>, ParentType, ContextType>;
|
||||
servers?: Resolver<Array<ResolversTypes['Server']>, ParentType, ContextType>;
|
||||
services?: Resolver<Array<ResolversTypes['Service']>, ParentType, ContextType>;
|
||||
shares?: Resolver<Maybe<Array<Maybe<ResolversTypes['Share']>>>, ParentType, ContextType>;
|
||||
unassignedDevices?: Resolver<Maybe<Array<Maybe<ResolversTypes['UnassignedDevice']>>>, ParentType, ContextType>;
|
||||
user?: Resolver<Maybe<ResolversTypes['User']>, ParentType, ContextType, RequireFields<QueryuserArgs, 'id'>>;
|
||||
@@ -2347,6 +2609,13 @@ export type RelayResponseResolvers<ContextType = Context, ParentType extends Res
|
||||
__isTypeOf?: IsTypeOfResolverFn<ParentType, ContextType>;
|
||||
}>;
|
||||
|
||||
export type RemoteAccessResolvers<ContextType = Context, ParentType extends ResolversParentTypes['RemoteAccess'] = ResolversParentTypes['RemoteAccess']> = ResolversObject<{
|
||||
accessType?: Resolver<ResolversTypes['WAN_ACCESS_TYPE'], ParentType, ContextType>;
|
||||
forwardType?: Resolver<Maybe<ResolversTypes['WAN_FORWARD_TYPE']>, ParentType, ContextType>;
|
||||
port?: Resolver<Maybe<ResolversTypes['Port']>, ParentType, ContextType>;
|
||||
__isTypeOf?: IsTypeOfResolverFn<ParentType, ContextType>;
|
||||
}>;
|
||||
|
||||
export type ServerResolvers<ContextType = Context, ParentType extends ResolversParentTypes['Server'] = ResolversParentTypes['Server']> = ResolversObject<{
|
||||
apikey?: Resolver<ResolversTypes['String'], ParentType, ContextType>;
|
||||
guid?: Resolver<ResolversTypes['String'], ParentType, ContextType>;
|
||||
@@ -2361,6 +2630,7 @@ export type ServerResolvers<ContextType = Context, ParentType extends ResolversP
|
||||
}>;
|
||||
|
||||
export type ServiceResolvers<ContextType = Context, ParentType extends ResolversParentTypes['Service'] = ResolversParentTypes['Service']> = ResolversObject<{
|
||||
id?: Resolver<ResolversTypes['ID'], ParentType, ContextType>;
|
||||
name?: Resolver<Maybe<ResolversTypes['String']>, ParentType, ContextType>;
|
||||
online?: Resolver<Maybe<ResolversTypes['Boolean']>, ParentType, ContextType>;
|
||||
uptime?: Resolver<Maybe<ResolversTypes['Uptime']>, ParentType, ContextType>;
|
||||
@@ -2400,6 +2670,7 @@ export type SubscriptionResolvers<ContextType = Context, ParentType extends Reso
|
||||
info?: SubscriptionResolver<ResolversTypes['Info'], "info", ParentType, ContextType>;
|
||||
me?: SubscriptionResolver<Maybe<ResolversTypes['Me']>, "me", ParentType, ContextType>;
|
||||
notificationAdded?: SubscriptionResolver<ResolversTypes['Notification'], "notificationAdded", ParentType, ContextType>;
|
||||
notificationsOverview?: SubscriptionResolver<ResolversTypes['NotificationOverview'], "notificationsOverview", ParentType, ContextType>;
|
||||
online?: SubscriptionResolver<ResolversTypes['Boolean'], "online", ParentType, ContextType>;
|
||||
owner?: SubscriptionResolver<ResolversTypes['Owner'], "owner", ParentType, ContextType>;
|
||||
parityHistory?: SubscriptionResolver<ResolversTypes['ParityCheck'], "parityHistory", ParentType, ContextType>;
|
||||
@@ -2426,6 +2697,10 @@ export type SystemResolvers<ContextType = Context, ParentType extends ResolversP
|
||||
__isTypeOf?: IsTypeOfResolverFn<ParentType, ContextType>;
|
||||
}>;
|
||||
|
||||
export interface URLScalarConfig extends GraphQLScalarTypeConfig<ResolversTypes['URL'], any> {
|
||||
name: 'URL';
|
||||
}
|
||||
|
||||
export interface UUIDScalarConfig extends GraphQLScalarTypeConfig<ResolversTypes['UUID'], any> {
|
||||
name: 'UUID';
|
||||
}
|
||||
@@ -2545,6 +2820,7 @@ export type VarsResolvers<ContextType = Context, ParentType extends ResolversPar
|
||||
fuseRememberDefault?: Resolver<Maybe<ResolversTypes['String']>, ParentType, ContextType>;
|
||||
fuseRememberStatus?: Resolver<Maybe<ResolversTypes['String']>, ParentType, ContextType>;
|
||||
hideDotFiles?: Resolver<Maybe<ResolversTypes['Boolean']>, ParentType, ContextType>;
|
||||
id?: Resolver<ResolversTypes['ID'], ParentType, ContextType>;
|
||||
joinStatus?: Resolver<Maybe<ResolversTypes['String']>, ParentType, ContextType>;
|
||||
localMaster?: Resolver<Maybe<ResolversTypes['Boolean']>, ParentType, ContextType>;
|
||||
localTld?: Resolver<Maybe<ResolversTypes['String']>, ParentType, ContextType>;
|
||||
@@ -2708,6 +2984,7 @@ export type WelcomeResolvers<ContextType = Context, ParentType extends Resolvers
|
||||
}>;
|
||||
|
||||
export type Resolvers<ContextType = Context> = ResolversObject<{
|
||||
AccessUrl?: AccessUrlResolvers<ContextType>;
|
||||
ApiKey?: ApiKeyResolvers<ContextType>;
|
||||
ApiKeyResponse?: ApiKeyResponseResolvers<ContextType>;
|
||||
Array?: ArrayResolvers<ContextType>;
|
||||
@@ -2719,6 +2996,7 @@ export type Resolvers<ContextType = Context> = ResolversObject<{
|
||||
Cloud?: CloudResolvers<ContextType>;
|
||||
CloudResponse?: CloudResponseResolvers<ContextType>;
|
||||
Config?: ConfigResolvers<ContextType>;
|
||||
Connect?: ConnectResolvers<ContextType>;
|
||||
ContainerHostConfig?: ContainerHostConfigResolvers<ContextType>;
|
||||
ContainerMount?: ContainerMountResolvers<ContextType>;
|
||||
ContainerPort?: ContainerPortResolvers<ContextType>;
|
||||
@@ -2727,8 +3005,10 @@ export type Resolvers<ContextType = Context> = ResolversObject<{
|
||||
Disk?: DiskResolvers<ContextType>;
|
||||
DiskPartition?: DiskPartitionResolvers<ContextType>;
|
||||
Display?: DisplayResolvers<ContextType>;
|
||||
Docker?: DockerResolvers<ContextType>;
|
||||
DockerContainer?: DockerContainerResolvers<ContextType>;
|
||||
DockerNetwork?: DockerNetworkResolvers<ContextType>;
|
||||
DynamicRemoteAccessStatus?: DynamicRemoteAccessStatusResolvers<ContextType>;
|
||||
Flash?: FlashResolvers<ContextType>;
|
||||
Gpu?: GpuResolvers<ContextType>;
|
||||
Info?: InfoResolvers<ContextType>;
|
||||
@@ -2744,7 +3024,11 @@ export type Resolvers<ContextType = Context> = ResolversObject<{
|
||||
Mount?: MountResolvers<ContextType>;
|
||||
Mutation?: MutationResolvers<ContextType>;
|
||||
Network?: NetworkResolvers<ContextType>;
|
||||
Node?: NodeResolvers<ContextType>;
|
||||
Notification?: NotificationResolvers<ContextType>;
|
||||
NotificationCounts?: NotificationCountsResolvers<ContextType>;
|
||||
NotificationOverview?: NotificationOverviewResolvers<ContextType>;
|
||||
Notifications?: NotificationsResolvers<ContextType>;
|
||||
Os?: OsResolvers<ContextType>;
|
||||
Owner?: OwnerResolvers<ContextType>;
|
||||
ParityCheck?: ParityCheckResolvers<ContextType>;
|
||||
@@ -2755,11 +3039,13 @@ export type Resolvers<ContextType = Context> = ResolversObject<{
|
||||
Query?: QueryResolvers<ContextType>;
|
||||
Registration?: RegistrationResolvers<ContextType>;
|
||||
RelayResponse?: RelayResponseResolvers<ContextType>;
|
||||
RemoteAccess?: RemoteAccessResolvers<ContextType>;
|
||||
Server?: ServerResolvers<ContextType>;
|
||||
Service?: ServiceResolvers<ContextType>;
|
||||
Share?: ShareResolvers<ContextType>;
|
||||
Subscription?: SubscriptionResolvers<ContextType>;
|
||||
System?: SystemResolvers<ContextType>;
|
||||
URL?: GraphQLScalarType;
|
||||
UUID?: GraphQLScalarType;
|
||||
UnassignedDevice?: UnassignedDeviceResolvers<ContextType>;
|
||||
Uptime?: UptimeResolvers<ContextType>;
|
||||
|
||||
@@ -20,25 +20,45 @@ export function useFragment<TType>(
|
||||
_documentNode: DocumentTypeDecoration<TType, any>,
|
||||
fragmentType: FragmentType<DocumentTypeDecoration<TType, any>>
|
||||
): TType;
|
||||
// return nullable if `fragmentType` is undefined
|
||||
export function useFragment<TType>(
|
||||
_documentNode: DocumentTypeDecoration<TType, any>,
|
||||
fragmentType: FragmentType<DocumentTypeDecoration<TType, any>> | undefined
|
||||
): TType | undefined;
|
||||
// return nullable if `fragmentType` is nullable
|
||||
export function useFragment<TType>(
|
||||
_documentNode: DocumentTypeDecoration<TType, any>,
|
||||
fragmentType: FragmentType<DocumentTypeDecoration<TType, any>> | null
|
||||
): TType | null;
|
||||
// return nullable if `fragmentType` is nullable or undefined
|
||||
export function useFragment<TType>(
|
||||
_documentNode: DocumentTypeDecoration<TType, any>,
|
||||
fragmentType: FragmentType<DocumentTypeDecoration<TType, any>> | null | undefined
|
||||
): TType | null | undefined;
|
||||
// return array of non-nullable if `fragmentType` is array of non-nullable
|
||||
export function useFragment<TType>(
|
||||
_documentNode: DocumentTypeDecoration<TType, any>,
|
||||
fragmentType: Array<FragmentType<DocumentTypeDecoration<TType, any>>>
|
||||
): Array<TType>;
|
||||
// return array of nullable if `fragmentType` is array of nullable
|
||||
export function useFragment<TType>(
|
||||
_documentNode: DocumentTypeDecoration<TType, any>,
|
||||
fragmentType: Array<FragmentType<DocumentTypeDecoration<TType, any>>> | null | undefined
|
||||
): Array<TType> | null | undefined;
|
||||
// return readonly array of non-nullable if `fragmentType` is array of non-nullable
|
||||
export function useFragment<TType>(
|
||||
_documentNode: DocumentTypeDecoration<TType, any>,
|
||||
fragmentType: ReadonlyArray<FragmentType<DocumentTypeDecoration<TType, any>>>
|
||||
): ReadonlyArray<TType>;
|
||||
// return array of nullable if `fragmentType` is array of nullable
|
||||
// return readonly array of nullable if `fragmentType` is array of nullable
|
||||
export function useFragment<TType>(
|
||||
_documentNode: DocumentTypeDecoration<TType, any>,
|
||||
fragmentType: ReadonlyArray<FragmentType<DocumentTypeDecoration<TType, any>>> | null | undefined
|
||||
): ReadonlyArray<TType> | null | undefined;
|
||||
export function useFragment<TType>(
|
||||
_documentNode: DocumentTypeDecoration<TType, any>,
|
||||
fragmentType: FragmentType<DocumentTypeDecoration<TType, any>> | ReadonlyArray<FragmentType<DocumentTypeDecoration<TType, any>>> | null | undefined
|
||||
): TType | ReadonlyArray<TType> | null | undefined {
|
||||
fragmentType: FragmentType<DocumentTypeDecoration<TType, any>> | Array<FragmentType<DocumentTypeDecoration<TType, any>>> | ReadonlyArray<FragmentType<DocumentTypeDecoration<TType, any>>> | null | undefined
|
||||
): TType | Array<TType> | ReadonlyArray<TType> | null | undefined {
|
||||
return fragmentType as any;
|
||||
}
|
||||
|
||||
|
||||
53
api/src/graphql/generated/client/gql.ts
Normal file
53
api/src/graphql/generated/client/gql.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
/* eslint-disable */
|
||||
import * as types from './graphql.js';
|
||||
import type { TypedDocumentNode as DocumentNode } from '@graphql-typed-document-node/core';
|
||||
|
||||
/**
|
||||
* Map of all GraphQL operations in the project.
|
||||
*
|
||||
* This map has several performance disadvantages:
|
||||
* 1. It is not tree-shakeable, so it will include all operations in the project.
|
||||
* 2. It is not minifiable, so the string of a GraphQL query will be multiple times inside the bundle.
|
||||
* 3. It does not support dead code elimination, so it will add unused operations.
|
||||
*
|
||||
* Therefore it is highly recommended to use the babel or swc plugin for production.
|
||||
* Learn more about it here: https://the-guild.dev/graphql/codegen/plugins/presets/preset-client#reducing-bundle-size
|
||||
*/
|
||||
const documents = {
|
||||
"\n mutation sendRemoteGraphQLResponse($input: RemoteGraphQLServerInput!) {\n remoteGraphQLResponse(input: $input)\n }\n": types.sendRemoteGraphQLResponseDocument,
|
||||
"\n fragment RemoteGraphQLEventFragment on RemoteGraphQLEvent {\n remoteGraphQLEventData: data {\n type\n body\n sha256\n }\n }\n": types.RemoteGraphQLEventFragmentFragmentDoc,
|
||||
"\n subscription events {\n events {\n __typename\n ... on ClientConnectedEvent {\n connectedData: data {\n type\n version\n apiKey\n }\n connectedEvent: type\n }\n ... on ClientDisconnectedEvent {\n disconnectedData: data {\n type\n version\n apiKey\n }\n disconnectedEvent: type\n }\n ...RemoteGraphQLEventFragment\n }\n }\n": types.eventsDocument,
|
||||
};
|
||||
|
||||
/**
|
||||
* The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
|
||||
*
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const query = graphql(`query GetUser($id: ID!) { user(id: $id) { name } }`);
|
||||
* ```
|
||||
*
|
||||
* The query argument is unknown!
|
||||
* Please regenerate the types.
|
||||
*/
|
||||
export function graphql(source: string): unknown;
|
||||
|
||||
/**
|
||||
* The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
|
||||
*/
|
||||
export function graphql(source: "\n mutation sendRemoteGraphQLResponse($input: RemoteGraphQLServerInput!) {\n remoteGraphQLResponse(input: $input)\n }\n"): (typeof documents)["\n mutation sendRemoteGraphQLResponse($input: RemoteGraphQLServerInput!) {\n remoteGraphQLResponse(input: $input)\n }\n"];
|
||||
/**
|
||||
* The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
|
||||
*/
|
||||
export function graphql(source: "\n fragment RemoteGraphQLEventFragment on RemoteGraphQLEvent {\n remoteGraphQLEventData: data {\n type\n body\n sha256\n }\n }\n"): (typeof documents)["\n fragment RemoteGraphQLEventFragment on RemoteGraphQLEvent {\n remoteGraphQLEventData: data {\n type\n body\n sha256\n }\n }\n"];
|
||||
/**
|
||||
* The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
|
||||
*/
|
||||
export function graphql(source: "\n subscription events {\n events {\n __typename\n ... on ClientConnectedEvent {\n connectedData: data {\n type\n version\n apiKey\n }\n connectedEvent: type\n }\n ... on ClientDisconnectedEvent {\n disconnectedData: data {\n type\n version\n apiKey\n }\n disconnectedEvent: type\n }\n ...RemoteGraphQLEventFragment\n }\n }\n"): (typeof documents)["\n subscription events {\n events {\n __typename\n ... on ClientConnectedEvent {\n connectedData: data {\n type\n version\n apiKey\n }\n connectedEvent: type\n }\n ... on ClientDisconnectedEvent {\n disconnectedData: data {\n type\n version\n apiKey\n }\n disconnectedEvent: type\n }\n ...RemoteGraphQLEventFragment\n }\n }\n"];
|
||||
|
||||
export function graphql(source: string) {
|
||||
return (documents as any)[source] ?? {};
|
||||
}
|
||||
|
||||
export type DocumentType<TDocumentNode extends DocumentNode<any, any>> = TDocumentNode extends DocumentNode< infer TType, any> ? TType : never;
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user